repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
coderfi/ansible-modules-extras | net_infrastructure/bigip_monitor_tcp.py | 33 | 16829 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, serge van Ginderachter <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: bigip_monitor_tcp
short_description: "Manages F5 BIG-IP LTM tcp monitors"
description:
- "Manages F5 BIG-IP LTM tcp monitors via iControl SOAP API"
version_added: "1.4"
author: Serge van Ginderachter
notes:
- "Requires BIG-IP software version >= 11"
- "F5 developed module 'bigsuds' required (see http://devcentral.f5.com)"
- "Best run as a local_action in your playbook"
- "Monitor API documentation: https://devcentral.f5.com/wiki/iControl.LocalLB__Monitor.ashx"
requirements:
- bigsuds
options:
server:
description:
- BIG-IP host
required: true
default: null
user:
description:
- BIG-IP username
required: true
default: null
password:
description:
- BIG-IP password
required: true
default: null
state:
description:
- Monitor state
required: false
default: 'present'
choices: ['present', 'absent']
name:
description:
- Monitor name
required: true
default: null
aliases: ['monitor']
partition:
description:
- Partition for the monitor
required: false
default: 'Common'
type:
description:
- The template type of this monitor template
required: false
default: 'tcp'
choices: [ 'TTYPE_TCP', 'TTYPE_TCP_ECHO', 'TTYPE_TCP_HALF_OPEN']
parent:
description:
- The parent template of this monitor template
required: false
default: 'tcp'
choices: [ 'tcp', 'tcp_echo', 'tcp_half_open']
parent_partition:
description:
- Partition for the parent monitor
required: false
default: 'Common'
send:
description:
- The send string for the monitor call
required: true
default: none
receive:
description:
- The receive string for the monitor call
required: true
default: none
ip:
description:
- IP address part of the ipport definition. The default API setting
is "0.0.0.0".
required: false
default: none
port:
description:
- port address part op the ipport definition. The default API
setting is 0.
required: false
default: none
interval:
description:
- The interval specifying how frequently the monitor instance
of this template will run. By default, this interval is used for up and
down states. The default API setting is 5.
required: false
default: none
timeout:
description:
- The number of seconds in which the node or service must respond to
the monitor request. If the target responds within the set time
period, it is considered up. If the target does not respond within
the set time period, it is considered down. You can change this
number to any number you want, however, it should be 3 times the
interval number of seconds plus 1 second. The default API setting
is 16.
required: false
default: none
time_until_up:
description:
- Specifies the amount of time in seconds after the first successful
response before a node will be marked up. A value of 0 will cause a
node to be marked up immediately after a valid response is received
from the node. The default API setting is 0.
required: false
default: none
'''
EXAMPLES = '''
- name: BIGIP F5 | Create TCP Monitor
local_action:
module: bigip_monitor_tcp
state: present
server: "{{ f5server }}"
user: "{{ f5user }}"
password: "{{ f5password }}"
name: "{{ item.monitorname }}"
type: tcp
send: "{{ item.send }}"
receive: "{{ item.receive }}"
with_items: f5monitors-tcp
- name: BIGIP F5 | Create TCP half open Monitor
local_action:
module: bigip_monitor_tcp
state: present
server: "{{ f5server }}"
user: "{{ f5user }}"
password: "{{ f5password }}"
name: "{{ item.monitorname }}"
type: tcp
send: "{{ item.send }}"
receive: "{{ item.receive }}"
with_items: f5monitors-halftcp
- name: BIGIP F5 | Remove TCP Monitor
local_action:
module: bigip_monitor_tcp
state: absent
server: "{{ f5server }}"
user: "{{ f5user }}"
password: "{{ f5password }}"
name: "{{ monitorname }}"
with_flattened:
- f5monitors-tcp
- f5monitors-halftcp
'''
try:
import bigsuds
except ImportError:
bigsuds_found = False
else:
bigsuds_found = True
TEMPLATE_TYPE = DEFAULT_TEMPLATE_TYPE = 'TTYPE_TCP'
TEMPLATE_TYPE_CHOICES = ['tcp', 'tcp_echo', 'tcp_half_open']
DEFAULT_PARENT = DEFAULT_TEMPLATE_TYPE_CHOICE = DEFAULT_TEMPLATE_TYPE.replace('TTYPE_', '').lower()
# ===========================================
# bigip_monitor module generic methods.
# these should be re-useable for other monitor types
#
def bigip_api(bigip, user, password):
api = bigsuds.BIGIP(hostname=bigip, username=user, password=password)
return api
def check_monitor_exists(module, api, monitor, parent):
# hack to determine if monitor exists
result = False
try:
ttype = api.LocalLB.Monitor.get_template_type(template_names=[monitor])[0]
parent2 = api.LocalLB.Monitor.get_parent_template(template_names=[monitor])[0]
if ttype == TEMPLATE_TYPE and parent == parent2:
result = True
else:
module.fail_json(msg='Monitor already exists, but has a different type (%s) or parent(%s)' % (ttype, parent))
except bigsuds.OperationFailed, e:
if "was not found" in str(e):
result = False
else:
# genuine exception
raise
return result
def create_monitor(api, monitor, template_attributes):
try:
api.LocalLB.Monitor.create_template(templates=[{'template_name': monitor, 'template_type': TEMPLATE_TYPE}], template_attributes=[template_attributes])
except bigsuds.OperationFailed, e:
if "already exists" in str(e):
return False
else:
# genuine exception
raise
return True
def delete_monitor(api, monitor):
try:
api.LocalLB.Monitor.delete_template(template_names=[monitor])
except bigsuds.OperationFailed, e:
# maybe it was deleted since we checked
if "was not found" in str(e):
return False
else:
# genuine exception
raise
return True
def check_string_property(api, monitor, str_property):
try:
return str_property == api.LocalLB.Monitor.get_template_string_property([monitor], [str_property['type']])[0]
except bigsuds.OperationFailed, e:
# happens in check mode if not created yet
if "was not found" in str(e):
return True
else:
# genuine exception
raise
return True
def set_string_property(api, monitor, str_property):
api.LocalLB.Monitor.set_template_string_property(template_names=[monitor], values=[str_property])
def check_integer_property(api, monitor, int_property):
try:
return int_property == api.LocalLB.Monitor.get_template_integer_property([monitor], [int_property['type']])[0]
except bigsuds.OperationFailed, e:
# happens in check mode if not created yet
if "was not found" in str(e):
return True
else:
# genuine exception
raise
return True
def set_integer_property(api, monitor, int_property):
api.LocalLB.Monitor.set_template_int_property(template_names=[monitor], values=[int_property])
def update_monitor_properties(api, module, monitor, template_string_properties, template_integer_properties):
changed = False
for str_property in template_string_properties:
if str_property['value'] is not None and not check_string_property(api, monitor, str_property):
if not module.check_mode:
set_string_property(api, monitor, str_property)
changed = True
for int_property in template_integer_properties:
if int_property['value'] is not None and not check_integer_property(api, monitor, int_property):
if not module.check_mode:
set_integer_property(api, monitor, int_property)
changed = True
return changed
def get_ipport(api, monitor):
return api.LocalLB.Monitor.get_template_destination(template_names=[monitor])[0]
def set_ipport(api, monitor, ipport):
try:
api.LocalLB.Monitor.set_template_destination(template_names=[monitor], destinations=[ipport])
return True, ""
except bigsuds.OperationFailed, e:
if "Cannot modify the address type of monitor" in str(e):
return False, "Cannot modify the address type of monitor if already assigned to a pool."
else:
# genuine exception
raise
# ===========================================
# main loop
#
# writing a module for other monitor types should
# only need an updated main() (and monitor specific functions)
def main():
# begin monitor specific stuff
module = AnsibleModule(
argument_spec = dict(
server = dict(required=True),
user = dict(required=True),
password = dict(required=True),
partition = dict(default='Common'),
state = dict(default='present', choices=['present', 'absent']),
name = dict(required=True),
type = dict(default=DEFAULT_TEMPLATE_TYPE_CHOICE, choices=TEMPLATE_TYPE_CHOICES),
parent = dict(default=DEFAULT_PARENT),
parent_partition = dict(default='Common'),
send = dict(required=False),
receive = dict(required=False),
ip = dict(required=False),
port = dict(required=False, type='int'),
interval = dict(required=False, type='int'),
timeout = dict(required=False, type='int'),
time_until_up = dict(required=False, type='int', default=0)
),
supports_check_mode=True
)
server = module.params['server']
user = module.params['user']
password = module.params['password']
partition = module.params['partition']
parent_partition = module.params['parent_partition']
state = module.params['state']
name = module.params['name']
type = 'TTYPE_' + module.params['type'].upper()
parent = "/%s/%s" % (parent_partition, module.params['parent'])
monitor = "/%s/%s" % (partition, name)
send = module.params['send']
receive = module.params['receive']
ip = module.params['ip']
port = module.params['port']
interval = module.params['interval']
timeout = module.params['timeout']
time_until_up = module.params['time_until_up']
# tcp monitor has multiple types, so overrule
global TEMPLATE_TYPE
TEMPLATE_TYPE = type
# end monitor specific stuff
if not bigsuds_found:
module.fail_json(msg="the python bigsuds module is required")
api = bigip_api(server, user, password)
monitor_exists = check_monitor_exists(module, api, monitor, parent)
# ipport is a special setting
if monitor_exists: # make sure to not update current settings if not asked
cur_ipport = get_ipport(api, monitor)
if ip is None:
ip = cur_ipport['ipport']['address']
if port is None:
port = cur_ipport['ipport']['port']
else: # use API defaults if not defined to create it
if interval is None:
interval = 5
if timeout is None:
timeout = 16
if ip is None:
ip = '0.0.0.0'
if port is None:
port = 0
if send is None:
send = ''
if receive is None:
receive = ''
# define and set address type
if ip == '0.0.0.0' and port == 0:
address_type = 'ATYPE_STAR_ADDRESS_STAR_PORT'
elif ip == '0.0.0.0' and port != 0:
address_type = 'ATYPE_STAR_ADDRESS_EXPLICIT_PORT'
elif ip != '0.0.0.0' and port != 0:
address_type = 'ATYPE_EXPLICIT_ADDRESS_EXPLICIT_PORT'
else:
address_type = 'ATYPE_UNSET'
ipport = {'address_type': address_type,
'ipport': {'address': ip,
'port': port}}
template_attributes = {'parent_template': parent,
'interval': interval,
'timeout': timeout,
'dest_ipport': ipport,
'is_read_only': False,
'is_directly_usable': True}
# monitor specific stuff
if type == 'TTYPE_TCP':
template_string_properties = [{'type': 'STYPE_SEND',
'value': send},
{'type': 'STYPE_RECEIVE',
'value': receive}]
else:
template_string_properties = []
template_integer_properties = [{'type': 'ITYPE_INTERVAL',
'value': interval},
{'type': 'ITYPE_TIMEOUT',
'value': timeout},
{'type': 'ITYPE_TIME_UNTIL_UP',
'value': interval}]
# main logic, monitor generic
try:
result = {'changed': False} # default
if state == 'absent':
if monitor_exists:
if not module.check_mode:
# possible race condition if same task
# on other node deleted it first
result['changed'] |= delete_monitor(api, monitor)
else:
result['changed'] |= True
else: # state present
## check for monitor itself
if not monitor_exists: # create it
if not module.check_mode:
# again, check changed status here b/c race conditions
# if other task already created it
result['changed'] |= create_monitor(api, monitor, template_attributes)
else:
result['changed'] |= True
## check for monitor parameters
# whether it already existed, or was just created, now update
# the update functions need to check for check mode but
# cannot update settings if it doesn't exist which happens in check mode
if monitor_exists and not module.check_mode:
result['changed'] |= update_monitor_properties(api, module, monitor,
template_string_properties,
template_integer_properties)
# else assume nothing changed
# we just have to update the ipport if monitor already exists and it's different
if monitor_exists and cur_ipport != ipport:
set_ipport(api, monitor, ipport)
result['changed'] |= True
#else: monitor doesn't exist (check mode) or ipport is already ok
except Exception, e:
module.fail_json(msg="received exception: %s" % e)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
s0undt3ch/sorbic | sorbic/db.py | 2 | 7598 | '''
Interface to interact on a database level
'''
# Import python libs
import os
import io
import shutil
# Import sorbic libs
import sorbic.ind.hdht
import sorbic.stor.files
import sorbic.utils.traverse
# Import third party libs
import msgpack
DB_OPTS = (
'key_delim',
'hash_limit',
'key_hash',
'fmt',
'fmt_map',
'header_len',
'serial')
class DB(object):
'''
Databaseing
'''
def __init__(
self,
root,
key_delim='/',
hash_limit=0xfffff,
key_hash='sha1',
fmt='>KsQH',
fmt_map=None,
header_len=1024,
serial='msgpack'):
self.root = root
self.key_delim = key_delim
self.hash_limit = hash_limit
self.key_hash = key_hash
self.fmt = fmt
self.fmt_map = fmt_map
self.header_len = header_len
self.serial = serial
self._get_db_meta()
self.index = sorbic.ind.hdht.HDHT(
self.root,
self.key_delim,
self.hash_limit,
self.key_hash,
self.fmt,
self.fmt_map,
self.header_len)
self.write_stor_funcs = self.__gen_write_stor_funcs()
self.read_stor_funcs = self.__gen_read_stor_funcs()
def __gen_write_stor_funcs(self):
'''
Return the storage write functions dict mapping to types
'''
return {'doc': self.index.write_doc_stor,
'file': sorbic.stor.files.write}
def __gen_read_stor_funcs(self):
'''
Return the storage read functions dict mapping to types
'''
return {'doc': self.index.read_doc_stor,
'file': sorbic.stor.files.read}
def _get_db_meta(self):
'''
Read in the database metadata to preserve the original behavior
as to when the database are created
'''
db_meta = os.path.join(self.root, 'sorbic_db_meta.mp')
meta = {}
if os.path.isfile(db_meta):
with io.open(db_meta, 'rb') as fp_:
meta = msgpack.loads(fp_.read())
for entry in DB_OPTS:
meta[entry] = meta.get(entry, getattr(self, entry))
setattr(self, entry, meta[entry])
if not os.path.isdir(self.root):
os.makedirs(self.root)
with io.open(db_meta, 'w+b') as fp_:
fp_.write(msgpack.dumps(meta))
def _get_storage(self, entries, **kwargs):
stor = self.read_stor_funcs[entries['data']['t']](entries, self.serial, **kwargs)
return stor
def write_stor(self, table_entry, data, serial, type_):
'''
Write the applicable storage type subsytem
'''
return self.write_stor_funcs[type_](
table_entry,
data,
serial)
def insert(self, key, data, id_=None, type_='doc', serial=None, **kwargs):
'''
Insert a key into the database
'''
c_key = self.index.raw_crypt_key(key)
table_entry = self.index.get_table_entry(key, c_key)
serial = serial if serial else self.serial
kwargs.update(self.write_stor(
table_entry,
data,
serial,
type_))
return self.index.commit(
table_entry,
key,
c_key,
id_,
type_,
**kwargs)
def get_meta(self, key, id_=None, count=None):
'''
Retrive a meta entry
'''
return self.index.get_index_entry(key, id_, count)
def get(self, key, id_=None, meta=False, count=None, **kwargs):
'''
Retrive a data entry
'''
entries = self.get_meta(key, id_, count)
if not entries:
return None
if count:
ret = []
for index_entry in entries['data']:
meta_entries = {'table': entries['table'], 'data': index_entry}
stor_ret = self._get_storage(meta_entries, **kwargs)
if meta:
ret.append({'data': stor_ret, 'meta': index_entry})
else:
ret.append(self._get_storage(meta_entries, **kwargs))
return ret
if not meta:
return self._get_storage(entries, **kwargs)
else:
ret = {}
ret['data'] = self._get_storage(entries, **kwargs)
ret['meta'] = entries
return ret
def compress(self, d_key=None, num=None):
'''
Compress a single given index, remove any associated data
'''
fn_root = self.root
if not d_key or d_key == self.key_delim:
pass
else:
fn_root = self.index.entry_root('{0}/blank'.format(d_key))
fn_ = os.path.join(fn_root, 'sorbic_table_{0}'.format(num))
trans_fn = os.path.join(fn_root, 'trans_table_{0}'.format(num))
if os.path.exists(trans_fn):
os.remove(trans_fn)
table = self.index.get_hash_table(fn_)
trans_table = self.index.get_hash_table(trans_fn)
table_entries = []
for entry in self.index._get_table_entries(fn_):
table_entries.append(entry)
for entry in table_entries:
self._compress_entry(entry, table, trans_table)
table['fp'].close()
trans_table['fp'].close()
self.index.tables.pop(fn_)
self.index.tables.pop(trans_fn)
shutil.move(trans_fn, fn_)
def _compress_entry(self, entry, table, trans_table):
'''
Read the table entries to keep out of the given entry and write them
fresh to the trans table
'''
c_key = self.index.raw_crypt_key(entry['key'])
i_entries = self.index.get_index_entry(entry['key'], count=0xffffffff)
tte = {}
tte['tfn'] = trans_table['fp'].name
tte['key'] = i_entries['table']['key']
tte['prev'] = i_entries['table']['prev']
tte['pos'] = i_entries['table']['pos']
tte['rev'] = 0
keeps = []
for ind in reversed(range(len(i_entries['data']))):
i_entry = i_entries['data'][ind]
if i_entry['_status'] != 'k':
continue
keeps.append(i_entry)
for i_entry in keeps:
serial = i_entry.get('serial', self.serial)
get_entries = {'table': i_entries['table'], 'data': i_entry}
stor = self._get_storage(get_entries)
i_entry.update(self.write_stor(
tte,
stor,
serial,
i_entry.get('type', 'doc')))
kwargs = i_entry
key = kwargs.pop('key')
id_ = kwargs.pop('id')
if 'type' in kwargs:
type_ = kwargs.pop('type')
else:
type_ = 'doc'
self.index.commit(tte, key, c_key, id_, type_, **kwargs)
tte['rev'] += 1
def listdir(self, d_key):
'''
List the contents of a directory
'''
return self.index.listdir(d_key)
def rmdir(self, d_key):
'''
Recursively remove a key directory and all subdirs and subkeys.
THIS OPERATION IS IRREVERSIBLE!!
'''
return self.index.rmdir(d_key)
def rm(self, key, id_=None):
'''
Make a key for deletion, if the id is omitted then the key itself
and all revs will be removed. THIS OPERATION IS IRREVERSIBLE!!
'''
return self.index.rm_key(key, id_)
| apache-2.0 |
Mj258/weiboapi | srapyDemo/envs/Lib/site-packages/twisted/test/test_rebuild.py | 10 | 7749 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
import sys, os
import types
from twisted.trial import unittest
from twisted.python import rebuild
import crash_test_dummy
f = crash_test_dummy.foo
class Foo: pass
class Bar(Foo): pass
class Baz(object): pass
class Buz(Bar, Baz): pass
class HashRaisesRuntimeError:
"""
Things that don't hash (raise an Exception) should be ignored by the
rebuilder.
@ivar hashCalled: C{bool} set to True when __hash__ is called.
"""
def __init__(self):
self.hashCalled = False
def __hash__(self):
self.hashCalled = True
raise RuntimeError('not a TypeError!')
unhashableObject = None # set in test_hashException
class RebuildTests(unittest.TestCase):
"""
Simple testcase for rebuilding, to at least exercise the code.
"""
def setUp(self):
self.libPath = self.mktemp()
os.mkdir(self.libPath)
self.fakelibPath = os.path.join(self.libPath, 'twisted_rebuild_fakelib')
os.mkdir(self.fakelibPath)
file(os.path.join(self.fakelibPath, '__init__.py'), 'w').close()
sys.path.insert(0, self.libPath)
def tearDown(self):
sys.path.remove(self.libPath)
def testFileRebuild(self):
from twisted.python.util import sibpath
import shutil, time
shutil.copyfile(sibpath(__file__, "myrebuilder1.py"),
os.path.join(self.fakelibPath, "myrebuilder.py"))
from twisted_rebuild_fakelib import myrebuilder
a = myrebuilder.A()
try:
object
except NameError:
pass
else:
from twisted.test import test_rebuild
b = myrebuilder.B()
class C(myrebuilder.B):
pass
test_rebuild.C = C
C()
i = myrebuilder.Inherit()
self.assertEqual(a.a(), 'a')
# necessary because the file has not "changed" if a second has not gone
# by in unix. This sucks, but it's not often that you'll be doing more
# than one reload per second.
time.sleep(1.1)
shutil.copyfile(sibpath(__file__, "myrebuilder2.py"),
os.path.join(self.fakelibPath, "myrebuilder.py"))
rebuild.rebuild(myrebuilder)
try:
object
except NameError:
pass
else:
b2 = myrebuilder.B()
self.assertEqual(b2.b(), 'c')
self.assertEqual(b.b(), 'c')
self.assertEqual(i.a(), 'd')
self.assertEqual(a.a(), 'b')
# more work to be done on new-style classes
# self.assertEqual(c.b(), 'c')
def testRebuild(self):
"""
Rebuilding an unchanged module.
"""
# This test would actually pass if rebuild was a no-op, but it
# ensures rebuild doesn't break stuff while being a less
# complex test than testFileRebuild.
x = crash_test_dummy.X('a')
rebuild.rebuild(crash_test_dummy, doLog=False)
# Instance rebuilding is triggered by attribute access.
x.do()
self.failUnlessIdentical(x.__class__, crash_test_dummy.X)
self.failUnlessIdentical(f, crash_test_dummy.foo)
def testComponentInteraction(self):
x = crash_test_dummy.XComponent()
x.setAdapter(crash_test_dummy.IX, crash_test_dummy.XA)
x.getComponent(crash_test_dummy.IX)
rebuild.rebuild(crash_test_dummy, 0)
newComponent = x.getComponent(crash_test_dummy.IX)
newComponent.method()
self.assertEqual(newComponent.__class__, crash_test_dummy.XA)
# Test that a duplicate registerAdapter is not allowed
from twisted.python import components
self.failUnlessRaises(ValueError, components.registerAdapter,
crash_test_dummy.XA, crash_test_dummy.X,
crash_test_dummy.IX)
def testUpdateInstance(self):
global Foo, Buz
b = Buz()
class Foo:
def foo(self):
pass
class Buz(Bar, Baz):
x = 10
rebuild.updateInstance(b)
assert hasattr(b, 'foo'), "Missing method on rebuilt instance"
assert hasattr(b, 'x'), "Missing class attribute on rebuilt instance"
def testBananaInteraction(self):
from twisted.python import rebuild
from twisted.spread import banana
rebuild.latestClass(banana.Banana)
def test_hashException(self):
"""
Rebuilding something that has a __hash__ that raises a non-TypeError
shouldn't cause rebuild to die.
"""
global unhashableObject
unhashableObject = HashRaisesRuntimeError()
def _cleanup():
global unhashableObject
unhashableObject = None
self.addCleanup(_cleanup)
rebuild.rebuild(rebuild)
self.assertEqual(unhashableObject.hashCalled, True)
class NewStyleTests(unittest.TestCase):
"""
Tests for rebuilding new-style classes of various sorts.
"""
def setUp(self):
self.m = types.ModuleType('whipping')
sys.modules['whipping'] = self.m
def tearDown(self):
del sys.modules['whipping']
del self.m
def test_slots(self):
"""
Try to rebuild a new style class with slots defined.
"""
classDefinition = (
"class SlottedClass(object):\n"
" __slots__ = ['a']\n")
exec classDefinition in self.m.__dict__
inst = self.m.SlottedClass()
inst.a = 7
exec classDefinition in self.m.__dict__
rebuild.updateInstance(inst)
self.assertEqual(inst.a, 7)
self.assertIdentical(type(inst), self.m.SlottedClass)
if sys.version_info < (2, 6):
test_slots.skip = "__class__ assignment for class with slots is only available starting Python 2.6"
def test_errorSlots(self):
"""
Try to rebuild a new style class with slots defined: this should fail.
"""
classDefinition = (
"class SlottedClass(object):\n"
" __slots__ = ['a']\n")
exec classDefinition in self.m.__dict__
inst = self.m.SlottedClass()
inst.a = 7
exec classDefinition in self.m.__dict__
self.assertRaises(rebuild.RebuildError, rebuild.updateInstance, inst)
if sys.version_info >= (2, 6):
test_errorSlots.skip = "__class__ assignment for class with slots should work starting Python 2.6"
def test_typeSubclass(self):
"""
Try to rebuild a base type subclass.
"""
classDefinition = (
"class ListSubclass(list):\n"
" pass\n")
exec classDefinition in self.m.__dict__
inst = self.m.ListSubclass()
inst.append(2)
exec classDefinition in self.m.__dict__
rebuild.updateInstance(inst)
self.assertEqual(inst[0], 2)
self.assertIdentical(type(inst), self.m.ListSubclass)
def test_instanceSlots(self):
"""
Test that when rebuilding an instance with a __slots__ attribute, it
fails accurately instead of giving a L{rebuild.RebuildError}.
"""
classDefinition = (
"class NotSlottedClass(object):\n"
" pass\n")
exec classDefinition in self.m.__dict__
inst = self.m.NotSlottedClass()
inst.__slots__ = ['a']
classDefinition = (
"class NotSlottedClass:\n"
" pass\n")
exec classDefinition in self.m.__dict__
# Moving from new-style class to old-style should fail.
self.assertRaises(TypeError, rebuild.updateInstance, inst)
| mit |
roubert/python-phonenumbers | python/phonenumbers/data/region_BE.py | 10 | 2457 | """Auto-generated file, do not edit by hand. BE metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_BE = PhoneMetadata(id='BE', country_code=32, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[1-9]\\d{7,8}', possible_number_pattern='\\d{8,9}'),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:1[0-69]|[23][2-8]|4[23]|5\\d|6[013-57-9]|71|8[1-79]|9[2-4])\\d{6}|80[2-8]\\d{5}', possible_number_pattern='\\d{8}', example_number='12345678'),
mobile=PhoneNumberDesc(national_number_pattern='4(?:6[0135-8]|[79]\\d|8[3-9])\\d{6}', possible_number_pattern='\\d{9}', example_number='470123456'),
toll_free=PhoneNumberDesc(national_number_pattern='800\\d{5}', possible_number_pattern='\\d{8}', example_number='80012345'),
premium_rate=PhoneNumberDesc(national_number_pattern='(?:70[2-467]|90[0-79])\\d{5}', possible_number_pattern='\\d{8}', example_number='90123456'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='78\\d{6}', possible_number_pattern='\\d{8}', example_number='78123456'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='(\\d{3})(\\d{2})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['4[6-9]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(\\d)(\\d{3})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['[23]|4[23]|9[2-4]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(\\d{2})(\\d{2})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['[156]|7[018]|8(?:0[1-9]|[1-79])'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(\\d{3})(\\d{2})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['(?:80|9)0'], national_prefix_formatting_rule='0\\1')],
mobile_number_portable_region=True)
| apache-2.0 |
BryanQuigley/sos | sos/report/plugins/buildah.py | 5 | 1873 | # Copyright (C) 2018 Red Hat, Inc., Jake Hunsaker <[email protected]>
# This file is part of the sos project: https://github.com/sosreport/sos
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# version 2 of the GNU General Public License.
#
# See the LICENSE file in the source distribution for further information.
from sos.report.plugins import Plugin, RedHatPlugin
class Buildah(Plugin, RedHatPlugin):
short_desc = 'Buildah container and image builder'
plugin_name = 'buildah'
packages = ('buildah',)
profiles = ('container',)
def setup(self):
subcmds = [
'containers',
'containers --all',
'images',
'images --all',
'version'
]
self.add_cmd_output(["buildah %s" % sub for sub in subcmds])
def make_chowdah(aurdah):
chowdah = self.exec_cmd(aurdah)
chowdah['auutput'] = chowdah.pop('output')
chowdah['is_wicked_pissah'] = chowdah.pop('status') == 0
return chowdah
containahs = make_chowdah('buildah containers -n')
if containahs['is_wicked_pissah']:
for containah in containahs['auutput'].splitlines():
# obligatory Tom Brady
goat = containah.split()[-1]
self.add_cmd_output('buildah inspect -t container %s' % goat,
subdir='containers')
pitchez = make_chowdah('buildah images -n')
if pitchez['is_wicked_pissah']:
for pitchah in pitchez['auutput'].splitlines():
brady = pitchah.split()[1]
self.add_cmd_output('buildah inspect -t image %s' % brady,
subdir='images')
# vim: set et ts=4 sw=4 :
| gpl-2.0 |
lebabouin/CouchPotatoServer-develop | libs/tmdb3/cache_file.py | 10 | 13285 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: cache_file.py
# Python Library
# Author: Raymond Wagner
# Purpose: Persistant file-backed cache using /tmp/ to share data
# using flock or msvcrt.locking to allow safe concurrent
# access.
#-----------------------
import struct
import errno
import json
import os
import io
from cStringIO import StringIO
from tmdb_exceptions import *
from cache_engine import CacheEngine, CacheObject
####################
# Cache File Format
#------------------
# cache version (2) unsigned short
# slot count (2) unsigned short
# slot 0: timestamp (8) double
# slot 0: lifetime (4) unsigned int
# slot 0: seek point (4) unsigned int
# slot 1: timestamp
# slot 1: lifetime index slots are IDd by their query date and
# slot 1: seek point are filled incrementally forwards. lifetime
# .... is how long after query date before the item
# .... expires, and seek point is the location of the
# slot N-2: timestamp start of data for that entry. 256 empty slots
# slot N-2: lifetime are pre-allocated, allowing fast updates.
# slot N-2: seek point when all slots are filled, the cache file is
# slot N-1: timestamp rewritten from scrach to add more slots.
# slot N-1: lifetime
# slot N-1: seek point
# block 1 (?) ASCII
# block 2
# .... blocks are just simple ASCII text, generated
# .... as independent objects by the JSON encoder
# block N-2
# block N-1
#
####################
def _donothing(*args, **kwargs):
pass
try:
import fcntl
class Flock( object ):
"""
Context manager to flock file for the duration the object exists.
Referenced file will be automatically unflocked as the interpreter
exits the context.
Supports an optional callback to process the error and optionally
suppress it.
"""
LOCK_EX = fcntl.LOCK_EX
LOCK_SH = fcntl.LOCK_SH
def __init__(self, fileobj, operation, callback=None):
self.fileobj = fileobj
self.operation = operation
self.callback = callback
def __enter__(self):
fcntl.flock(self.fileobj, self.operation)
def __exit__(self, exc_type, exc_value, exc_tb):
suppress = False
if callable(self.callback):
suppress = self.callback(exc_type, exc_value, exc_tb)
fcntl.flock(self.fileobj, fcntl.LOCK_UN)
return suppress
def parse_filename(filename):
if '$' in filename:
# replace any environmental variables
filename = os.path.expandvars(filename)
if filename.startswith('~'):
# check for home directory
return os.path.expanduser(filename)
elif filename.startswith('/'):
# check for absolute path
return filename
# return path with temp directory prepended
return '/tmp/' + filename
except ImportError:
import msvcrt
class Flock( object ):
LOCK_EX = msvcrt.LK_LOCK
LOCK_SH = msvcrt.LK_LOCK
def __init__(self, fileobj, operation, callback=None):
self.fileobj = fileobj
self.operation = operation
self.callback = callback
def __enter__(self):
self.size = os.path.getsize(self.fileobj.name)
msvcrt.locking(self.fileobj.fileno(), self.operation, self.size)
def __exit__(self, exc_type, exc_value, exc_tb):
suppress = False
if callable(self.callback):
suppress = self.callback(exc_type, exc_value, exc_tb)
msvcrt.locking(self.fileobj.fileno(), msvcrt.LK_UNLCK, self.size)
return suppress
def parse_filename(filename):
if '%' in filename:
# replace any environmental variables
filename = os.path.expandvars(filename)
if filename.startswith('~'):
# check for home directory
return os.path.expanduser(filename)
elif (ord(filename[0]) in (range(65,91)+range(99,123))) \
and (filename[1:3] == ':\\'):
# check for absolute drive path (e.g. C:\...)
return filename
elif (filename.count('\\') >= 3) and (filename.startswith('\\\\')):
# check for absolute UNC path (e.g. \\server\...)
return filename
# return path with temp directory prepended
return os.path.expandvars(os.path.join('%TEMP%',filename))
class FileCacheObject( CacheObject ):
_struct = struct.Struct('dII') # double and two ints
# timestamp, lifetime, position
@classmethod
def fromFile(cls, fd):
dat = cls._struct.unpack(fd.read(cls._struct.size))
obj = cls(None, None, dat[1], dat[0])
obj.position = dat[2]
return obj
def __init__(self, *args, **kwargs):
self._key = None
self._data = None
self._size = None
self._buff = StringIO()
super(FileCacheObject, self).__init__(*args, **kwargs)
@property
def size(self):
if self._size is None:
self._buff.seek(0,2)
size = self._buff.tell()
if size == 0:
if (self._key is None) or (self._data is None):
raise RuntimeError
json.dump([self.key, self.data], self._buff)
self._size = self._buff.tell()
self._size = size
return self._size
@size.setter
def size(self, value): self._size = value
@property
def key(self):
if self._key is None:
try:
self._key, self._data = json.loads(self._buff.getvalue())
except:
pass
return self._key
@key.setter
def key(self, value): self._key = value
@property
def data(self):
if self._data is None:
self._key, self._data = json.loads(self._buff.getvalue())
return self._data
@data.setter
def data(self, value): self._data = value
def load(self, fd):
fd.seek(self.position)
self._buff.seek(0)
self._buff.write(fd.read(self.size))
def dumpslot(self, fd):
pos = fd.tell()
fd.write(self._struct.pack(self.creation, self.lifetime, self.position))
def dumpdata(self, fd):
self.size
fd.seek(self.position)
fd.write(self._buff.getvalue())
class FileEngine( CacheEngine ):
"""Simple file-backed engine."""
name = 'file'
_struct = struct.Struct('HH') # two shorts for version and count
_version = 2
def __init__(self, parent):
super(FileEngine, self).__init__(parent)
self.configure(None)
def configure(self, filename, preallocate=256):
self.preallocate = preallocate
self.cachefile = filename
self.size = 0
self.free = 0
self.age = 0
def _init_cache(self):
# only run this once
self._init_cache = _donothing
if self.cachefile is None:
raise TMDBCacheError("No cache filename given.")
self.cachefile = parse_filename(self.cachefile)
try:
# attempt to read existing cache at filename
# handle any errors that occur
self._open('r+b')
# seems to have read fine, make sure we have write access
if not os.access(self.cachefile, os.W_OK):
raise TMDBCacheWriteError(self.cachefile)
except IOError as e:
if e.errno == errno.ENOENT:
# file does not exist, create a new one
try:
self._open('w+b')
self._write([])
except IOError as e:
if e.errno == errno.ENOENT:
# directory does not exist
raise TMDBCacheDirectoryError(self.cachefile)
elif e.errno == errno.EACCES:
# user does not have rights to create new file
raise TMDBCacheWriteError(self.cachefile)
else:
# let the unhandled error continue through
raise
elif e.errno == errno.EACCESS:
# file exists, but we do not have permission to access it
raise TMDBCacheReadError(self.cachefile)
else:
# let the unhandled error continue through
raise
def get(self, date):
self._init_cache()
self._open('r+b')
with Flock(self.cachefd, Flock.LOCK_SH): # lock for shared access
# return any new objects in the cache
return self._read(date)
def put(self, key, value, lifetime):
self._init_cache()
self._open('r+b')
with Flock(self.cachefd, Flock.LOCK_EX): # lock for exclusive access
newobjs = self._read(self.age)
newobjs.append(FileCacheObject(key, value, lifetime))
# this will cause a new file object to be opened with the proper
# access mode, however the Flock should keep the old object open
# and properly locked
self._open('r+b')
self._write(newobjs)
return newobjs
def _open(self, mode='r+b'):
# enforce binary operation
try:
if self.cachefd.mode == mode:
# already opened in requested mode, nothing to do
self.cachefd.seek(0)
return
except: pass # catch issue of no cachefile yet opened
self.cachefd = io.open(self.cachefile, mode)
def _read(self, date):
try:
self.cachefd.seek(0)
version, count = self._struct.unpack(\
self.cachefd.read(self._struct.size))
if version != self._version:
# old version, break out and well rewrite when finished
raise Exception
self.size = count
cache = []
while count:
# loop through storage definitions
obj = FileCacheObject.fromFile(self.cachefd)
cache.append(obj)
count -= 1
except:
# failed to read information, so just discard it and return empty
self.size = 0
self.free = 0
return []
# get end of file
self.cachefd.seek(0,2)
position = self.cachefd.tell()
newobjs = []
emptycount = 0
# walk backward through all, collecting new content and populating size
while len(cache):
obj = cache.pop()
if obj.creation == 0:
# unused slot, skip
emptycount += 1
elif obj.expired:
# object has passed expiration date, no sense processing
continue
elif obj.creation > date:
# used slot with new data, process
obj.size, position = position - obj.position, obj.position
newobjs.append(obj)
# update age
self.age = max(self.age, obj.creation)
elif len(newobjs):
# end of new data, break
break
# walk forward and load new content
for obj in newobjs:
obj.load(self.cachefd)
self.free = emptycount
return newobjs
def _write(self, data):
if self.free and (self.size != self.free):
# we only care about the last data point, since the rest are
# already stored in the file
data = data[-1]
# determine write position of data in cache
self.cachefd.seek(0,2)
end = self.cachefd.tell()
data.position = end
# write incremental update to free slot
self.cachefd.seek(4 + 16*(self.size-self.free))
data.dumpslot(self.cachefd)
data.dumpdata(self.cachefd)
else:
# rewrite cache file from scratch
# pull data from parent cache
data.extend(self.parent()._data.values())
data.sort(key=lambda x: x.creation)
# write header
size = len(data) + self.preallocate
self.cachefd.seek(0)
self.cachefd.truncate()
self.cachefd.write(self._struct.pack(self._version, size))
# write storage slot definitions
prev = None
for d in data:
if prev == None:
d.position = 4 + 16*size
else:
d.position = prev.position + prev.size
d.dumpslot(self.cachefd)
prev = d
# fill in allocated slots
for i in range(2**8):
self.cachefd.write(FileCacheObject._struct.pack(0, 0, 0))
# write stored data
for d in data:
d.dumpdata(self.cachefd)
self.cachefd.flush()
def expire(self, key):
pass
| gpl-3.0 |
GoogleCloudPlatform/sap-deployment-automation | third_party/github.com/ansible/awx/awxkit/awxkit/api/pages/organizations.py | 1 | 1807 | from awxkit.api.mixins import HasCreate, HasInstanceGroups, HasNotifications, DSAdapter
from awxkit.utils import random_title, suppress, PseudoNamespace
from awxkit.api.resources import resources
import awxkit.exceptions as exc
from . import base
from . import page
class Organization(HasCreate, HasInstanceGroups, HasNotifications, base.Base):
NATURAL_KEY = ('name',)
def add_admin(self, user):
if isinstance(user, page.Page):
user = user.json
with suppress(exc.NoContent):
self.related.admins.post(user)
def add_user(self, user):
if isinstance(user, page.Page):
user = user.json
with suppress(exc.NoContent):
self.related.users.post(user)
def payload(self, **kwargs):
payload = PseudoNamespace(name=kwargs.get('name') or 'Organization - {}'.format(random_title()),
description=kwargs.get('description') or random_title(10))
return payload
def create_payload(self, name='', description='', **kwargs):
payload = self.payload(name=name, description=description, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, name='', description='', **kwargs):
payload = self.create_payload(name=name, description=description, **kwargs)
return self.update_identity(Organizations(self.connection).post(payload))
page.register_page([resources.organization,
(resources.organizations, 'post')], Organization)
class Organizations(page.PageList, Organization):
pass
page.register_page([resources.organizations,
resources.user_organizations,
resources.project_organizations], Organizations)
| apache-2.0 |
canaltinova/servo | etc/ci/performance/set_s3_policy.py | 22 | 1108 | #!/usr/bin/env python3
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import boto3
def main():
parser = argparse.ArgumentParser(
description=("Set the policy of the servo-perf bucket. "
"Remember to set your S3 credentials "
"https://github.com/boto/boto3"))
parser.parse_args()
s3 = boto3.resource('s3')
BUCKET = 'servo-perf'
POLICY = """{
"Version":"2012-10-17",
"Statement":[
{
"Effect":"Allow",
"Principal":"*",
"Action":[
"s3:ListBucket",
"s3:GetBucketLocation"
],
"Resource":"arn:aws:s3:::servo-perf"
},
{
"Effect":"Allow",
"Principal":"*",
"Action":[
"s3:GetObject",
"s3:GetObjectAcl"
],
"Resource":"arn:aws:s3:::servo-perf/*"
}
]
}"""
s3.BucketPolicy(BUCKET).put(Policy=POLICY)
print("Done!")
if __name__ == "__main__":
main()
| mpl-2.0 |
xasopheno/audio_visual | audio/venv/lib/python2.7/site-packages/wheel/archive.py | 62 | 2376 | """
Archive tools for wheel.
"""
import os
import os.path
import time
import zipfile
from distutils import log
def archive_wheelfile(base_name, base_dir):
"""Archive all files under `base_dir` in a whl file and name it like
`base_name`.
"""
olddir = os.path.abspath(os.curdir)
base_name = os.path.abspath(base_name)
try:
os.chdir(base_dir)
return make_wheelfile_inner(base_name)
finally:
os.chdir(olddir)
def make_wheelfile_inner(base_name, base_dir='.'):
"""Create a whl file from all the files under 'base_dir'.
Places .dist-info at the end of the archive."""
zip_filename = base_name + ".whl"
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
# Some applications need reproducible .whl files, but they can't do this
# without forcing the timestamp of the individual ZipInfo objects. See
# issue #143.
timestamp = os.environ.get('SOURCE_DATE_EPOCH')
if timestamp is None:
date_time = None
else:
date_time = time.gmtime(int(timestamp))[0:6]
# XXX support bz2, xz when available
zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED)
score = {'WHEEL': 1, 'METADATA': 2, 'RECORD': 3}
deferred = []
def writefile(path, date_time):
st = os.stat(path)
if date_time is None:
mtime = time.gmtime(st.st_mtime)
date_time = mtime[0:6]
zinfo = zipfile.ZipInfo(path, date_time)
zinfo.external_attr = st.st_mode << 16
zinfo.compress_type = zipfile.ZIP_DEFLATED
with open(path, 'rb') as fp:
zip.writestr(zinfo, fp.read())
log.info("adding '%s'" % path)
for dirpath, dirnames, filenames in os.walk(base_dir):
# Sort the directory names so that `os.walk` will walk them in a
# defined order on the next iteration.
dirnames.sort()
for name in sorted(filenames):
path = os.path.normpath(os.path.join(dirpath, name))
if os.path.isfile(path):
if dirpath.endswith('.dist-info'):
deferred.append((score.get(name, 0), path))
else:
writefile(path, date_time)
deferred.sort()
for score, path in deferred:
writefile(path, date_time)
zip.close()
return zip_filename
| mit |
vmturbo/nova | nova/tests/unit/virt/libvirt/storage/test_lvm.py | 8 | 8203 | # Copyright 2012 NTT Data. All Rights Reserved.
# Copyright 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_concurrency import processutils
from oslo_config import cfg
from nova import exception
from nova import test
from nova import utils
from nova.virt.libvirt.storage import lvm
from nova.virt.libvirt import utils as libvirt_utils
CONF = cfg.CONF
class LvmTestCase(test.NoDBTestCase):
def test_get_volume_size(self):
executes = []
def fake_execute(*cmd, **kwargs):
executes.append(cmd)
return 123456789, None
expected_commands = [('blockdev', '--getsize64', '/dev/foo')]
self.stub_out('nova.utils.execute', fake_execute)
size = lvm.get_volume_size('/dev/foo')
self.assertEqual(expected_commands, executes)
self.assertEqual(123456789, size)
@mock.patch.object(utils, 'execute',
side_effect=processutils.ProcessExecutionError(
stderr=('blockdev: cannot open /dev/foo: '
'No such device or address')))
def test_get_volume_size_not_found(self, mock_execute):
self.assertRaises(exception.VolumeBDMPathNotFound,
lvm.get_volume_size, '/dev/foo')
@mock.patch.object(utils, 'execute',
side_effect=processutils.ProcessExecutionError(
stderr=('blockdev: cannot open /dev/foo: '
'No such file or directory')))
def test_get_volume_size_not_found_file(self, mock_execute):
self.assertRaises(exception.VolumeBDMPathNotFound,
lvm.get_volume_size, '/dev/foo')
@mock.patch.object(libvirt_utils, 'path_exists', return_value=True)
@mock.patch.object(utils, 'execute',
side_effect=processutils.ProcessExecutionError(
stderr='blockdev: i am sad in other ways'))
def test_get_volume_size_unexpectd_error(self, mock_execute,
mock_path_exists):
self.assertRaises(processutils.ProcessExecutionError,
lvm.get_volume_size, '/dev/foo')
def test_lvm_clear(self):
def fake_lvm_size(path):
return lvm_size
def fake_execute(*cmd, **kwargs):
executes.append(cmd)
self.stub_out('nova.virt.libvirt.storage.lvm.get_volume_size',
fake_lvm_size)
self.stub_out('nova.utils.execute', fake_execute)
# Test the correct dd commands are run for various sizes
lvm_size = 1
executes = []
expected_commands = [('dd', 'bs=1', 'if=/dev/zero', 'of=/dev/v1',
'seek=0', 'count=1', 'conv=fdatasync')]
lvm.clear_volume('/dev/v1')
self.assertEqual(expected_commands, executes)
lvm_size = 1024
executes = []
expected_commands = [('dd', 'bs=1024', 'if=/dev/zero', 'of=/dev/v2',
'seek=0', 'count=1', 'conv=fdatasync')]
lvm.clear_volume('/dev/v2')
self.assertEqual(expected_commands, executes)
lvm_size = 1025
executes = []
expected_commands = [('dd', 'bs=1024', 'if=/dev/zero', 'of=/dev/v3',
'seek=0', 'count=1', 'conv=fdatasync')]
expected_commands += [('dd', 'bs=1', 'if=/dev/zero', 'of=/dev/v3',
'seek=1024', 'count=1', 'conv=fdatasync')]
lvm.clear_volume('/dev/v3')
self.assertEqual(expected_commands, executes)
lvm_size = 1048576
executes = []
expected_commands = [('dd', 'bs=1048576', 'if=/dev/zero', 'of=/dev/v4',
'seek=0', 'count=1', 'oflag=direct')]
lvm.clear_volume('/dev/v4')
self.assertEqual(expected_commands, executes)
lvm_size = 1048577
executes = []
expected_commands = [('dd', 'bs=1048576', 'if=/dev/zero', 'of=/dev/v5',
'seek=0', 'count=1', 'oflag=direct')]
expected_commands += [('dd', 'bs=1', 'if=/dev/zero', 'of=/dev/v5',
'seek=1048576', 'count=1', 'conv=fdatasync')]
lvm.clear_volume('/dev/v5')
self.assertEqual(expected_commands, executes)
lvm_size = 1234567
executes = []
expected_commands = [('dd', 'bs=1048576', 'if=/dev/zero', 'of=/dev/v6',
'seek=0', 'count=1', 'oflag=direct')]
expected_commands += [('dd', 'bs=1024', 'if=/dev/zero', 'of=/dev/v6',
'seek=1024', 'count=181', 'conv=fdatasync')]
expected_commands += [('dd', 'bs=1', 'if=/dev/zero', 'of=/dev/v6',
'seek=1233920', 'count=647', 'conv=fdatasync')]
lvm.clear_volume('/dev/v6')
self.assertEqual(expected_commands, executes)
# Test volume_clear_size limits the size
lvm_size = 10485761
CONF.set_override('volume_clear_size', '1', 'libvirt')
executes = []
expected_commands = [('dd', 'bs=1048576', 'if=/dev/zero', 'of=/dev/v7',
'seek=0', 'count=1', 'oflag=direct')]
lvm.clear_volume('/dev/v7')
self.assertEqual(expected_commands, executes)
CONF.set_override('volume_clear_size', '2', 'libvirt')
lvm_size = 1048576
executes = []
expected_commands = [('dd', 'bs=1048576', 'if=/dev/zero', 'of=/dev/v9',
'seek=0', 'count=1', 'oflag=direct')]
lvm.clear_volume('/dev/v9')
self.assertEqual(expected_commands, executes)
# Test volume_clear=shred
CONF.set_override('volume_clear', 'shred', 'libvirt')
CONF.set_override('volume_clear_size', '0', 'libvirt')
lvm_size = 1048576
executes = []
expected_commands = [('shred', '-n3', '-s1048576', '/dev/va')]
lvm.clear_volume('/dev/va')
self.assertEqual(expected_commands, executes)
CONF.set_override('volume_clear', 'shred', 'libvirt')
CONF.set_override('volume_clear_size', '1', 'libvirt')
lvm_size = 10485761
executes = []
expected_commands = [('shred', '-n3', '-s1048576', '/dev/vb')]
lvm.clear_volume('/dev/vb')
self.assertEqual(expected_commands, executes)
# Test volume_clear=none does nothing
CONF.set_override('volume_clear', 'none', 'libvirt')
executes = []
expected_commands = []
lvm.clear_volume('/dev/vc')
self.assertEqual(expected_commands, executes)
@mock.patch.object(utils, 'execute',
side_effect=processutils.ProcessExecutionError(
stderr=('blockdev: cannot open /dev/foo: '
'No such file or directory')))
def test_lvm_clear_ignore_lvm_not_found(self, mock_execute):
lvm.clear_volume('/dev/foo')
def test_fail_remove_all_logical_volumes(self):
def fake_execute(*args, **kwargs):
if 'vol2' in args:
raise processutils.ProcessExecutionError('Error')
with test.nested(
mock.patch.object(lvm, 'clear_volume'),
mock.patch.object(libvirt_utils, 'execute',
side_effect=fake_execute)) as (mock_clear, mock_execute):
self.assertRaises(exception.VolumesNotRemoved,
lvm.remove_volumes,
['vol1', 'vol2', 'vol3'])
self.assertEqual(3, mock_execute.call_count)
| apache-2.0 |
ain7/www.ain7.org | ain7/annuaire/migrations/0002_auto_20160331_0126.py | 1 | 8973 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-30 23:26
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('annuaire', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('organizations', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='position',
name='office',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='positions', to='organizations.Office', verbose_name='\xe9tablissement'),
),
migrations.AddField(
model_name='phonenumber',
name='last_change_by',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='last_changed_phonenumber', to='annuaire.Person', verbose_name='Auteur de la derni\xe8re modification'),
),
migrations.AddField(
model_name='phonenumber',
name='person',
field=models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='phone_numbers', to='annuaire.Person'),
),
migrations.AddField(
model_name='personprivate',
name='last_change_by',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='last_changed_personprivate', to='annuaire.Person', verbose_name='Auteur de la derni\xe8re modification'),
),
migrations.AddField(
model_name='personprivate',
name='member_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='annuaire.MemberType', verbose_name='membre'),
),
migrations.AddField(
model_name='personprivate',
name='person',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='annuaire.Person', verbose_name='personne'),
),
migrations.AddField(
model_name='personprivate',
name='person_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='annuaire.PersonType', verbose_name='type'),
),
migrations.AddField(
model_name='person',
name='country',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='annuaire.Country', verbose_name='nationalit\xe9'),
),
migrations.AddField(
model_name='person',
name='last_change_by',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='last_changed_person', to='annuaire.Person', verbose_name='Auteur de la derni\xe8re modification'),
),
migrations.AddField(
model_name='person',
name='user',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='utilisateur'),
),
migrations.AddField(
model_name='leisureitem',
name='ain7member',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='leisure', to='annuaire.AIn7Member'),
),
migrations.AddField(
model_name='leisureitem',
name='last_change_by',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='last_changed_leisureitem', to='annuaire.Person', verbose_name='Auteur de la derni\xe8re modification'),
),
migrations.AddField(
model_name='instantmessaging',
name='person',
field=models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='instant_messagings', to='annuaire.Person'),
),
migrations.AddField(
model_name='email',
name='person',
field=models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='emails', to='annuaire.Person'),
),
migrations.AddField(
model_name='email',
name='position',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='mail', to='annuaire.Position'),
),
migrations.AddField(
model_name='educationitem',
name='ain7member',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='education', to='annuaire.AIn7Member'),
),
migrations.AddField(
model_name='educationitem',
name='last_change_by',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='last_changed_educationitem', to='annuaire.Person', verbose_name='Auteur de la derni\xe8re modification'),
),
migrations.AddField(
model_name='clubmembership',
name='club',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='memberships', to='annuaire.Club', verbose_name='club'),
),
migrations.AddField(
model_name='clubmembership',
name='member',
field=models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='club_memberships', to='annuaire.AIn7Member', verbose_name='membre'),
),
migrations.AddField(
model_name='club',
name='last_change_by',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='last_changed_club', to='annuaire.Person', verbose_name='Auteur de la derni\xe8re modification'),
),
migrations.AddField(
model_name='club',
name='school',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='clubs', to='annuaire.School', verbose_name='\xe9cole'),
),
migrations.AddField(
model_name='ain7member',
name='ceremonial_duties',
field=models.ManyToManyField(blank=True, to='annuaire.CeremonialDuty', verbose_name='fonctions honorifiques'),
),
migrations.AddField(
model_name='ain7member',
name='decorations',
field=models.ManyToManyField(blank=True, to='annuaire.Decoration', verbose_name='d\xe9corations'),
),
migrations.AddField(
model_name='ain7member',
name='last_change_by',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='last_changed_ain7member', to='annuaire.Person', verbose_name='Auteur de la derni\xe8re modification'),
),
migrations.AddField(
model_name='ain7member',
name='marital_status',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='annuaire.MaritalStatus', verbose_name='statut marital'),
),
migrations.AddField(
model_name='ain7member',
name='person',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='annuaire.Person', verbose_name='personne'),
),
migrations.AddField(
model_name='ain7member',
name='promos',
field=models.ManyToManyField(blank=True, related_name='students', to='annuaire.Promo', verbose_name='Promotions'),
),
migrations.AddField(
model_name='address',
name='country',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='annuaire.Country', verbose_name='pays'),
),
migrations.AddField(
model_name='address',
name='last_change_by',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='last_changed_address', to='annuaire.Person', verbose_name='Auteur de la derni\xe8re modification'),
),
migrations.AddField(
model_name='address',
name='person',
field=models.ForeignKey(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='annuaire.Person'),
),
migrations.AddField(
model_name='address',
name='type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='annuaire.AddressType', verbose_name='type'),
),
]
| lgpl-2.1 |
PSUdaemon/trafficserver | tests/tools/traffic-replay/h2Replay.py | 2 | 13166 | #!/bin/env python3
'''
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from threading import Thread
import sys
from multiprocessing import current_process
import sessionvalidation.sessionvalidation as sv
import lib.result as result
import extractHeader
import mainProcess
import json
from hyper import HTTP20Connection
from hyper.tls import wrap_socket, H2_NPN_PROTOCOLS, H2C_PROTOCOL
from hyper.common.bufsocket import BufferedSocket
import hyper
import socket
import logging
import h2
from h2.connection import H2Configuration
import threading
import Config
log = logging.getLogger(__name__)
bSTOP = False
hyper.tls._context = hyper.tls.init_context()
hyper.tls._context.check_hostname = False
hyper.tls._context.verify_mode = hyper.compat.ssl.CERT_NONE
class _LockedObject(object):
"""
A wrapper class that hides a specific object behind a lock.
The goal here is to provide a simple way to protect access to an object
that cannot safely be simultaneously accessed from multiple threads. The
intended use of this class is simple: take hold of it with a context
manager, which returns the protected object.
"""
def __init__(self, obj):
self.lock = threading.RLock()
self._obj = obj
def __enter__(self):
self.lock.acquire()
return self._obj
def __exit__(self, _exc_type, _exc_val, _exc_tb):
self.lock.release()
class h2ATS(HTTP20Connection):
def __init_state(self):
"""
Initializes the 'mutable state' portions of the HTTP/2 connection
object.
This method exists to enable HTTP20Connection objects to be reused if
they're closed, by resetting the connection object to its basic state
whenever it ends up closed. Any situation that needs to recreate the
connection can call this method and it will be done.
This is one of the only methods in hyper that is truly private, as
users should be strongly discouraged from messing about with connection
objects themselves.
"""
config1 = H2Configuration(
client_side=True,
header_encoding='utf-8',
validate_outbound_headers=False,
validate_inbound_headers=False,
)
self._conn = _LockedObject(h2.connection.H2Connection(config=config1))
# Streams are stored in a dictionary keyed off their stream IDs. We
# also save the most recent one for easy access without having to walk
# the dictionary.
#
# We add a set of all streams that we or the remote party forcefully
# closed with RST_STREAM, to avoid encountering issues where frames
# were already in flight before the RST was processed.
#
# Finally, we add a set of streams that recently received data. When
# using multiple threads, this avoids reading on threads that have just
# acquired the I/O lock whose streams have already had their data read
# for them by prior threads.
self.streams = {}
self.recent_stream = None
self.next_stream_id = 1
self.reset_streams = set()
self.recent_recv_streams = set()
# The socket used to send data.
self._sock = None
# Instantiate a window manager.
#self.window_manager = self.__wm_class(65535)
return
def __init__(self, host, **kwargs):
HTTP20Connection.__init__(self, host, **kwargs)
self.__init_state()
def connect(self):
"""
Connect to the server specified when the object was created. This is a
no-op if we're already connected.
Concurrency
-----------
This method is thread-safe. It may be called from multiple threads, and
is a noop for all threads apart from the first.
:returns: Nothing.
"""
#print("connecting to ATS")
with self._lock:
if self._sock is not None:
return
sni = self.host
if not self.proxy_host:
host = self.host
port = self.port
else:
host = self.proxy_host
port = self.proxy_port
sock = socket.create_connection((host, port))
if self.secure:
#assert not self.proxy_host, "Proxy with HTTPS not supported."
sock, proto = wrap_socket(sock, sni, self.ssl_context,
force_proto=self.force_proto)
else:
proto = H2C_PROTOCOL
log.debug("Selected NPN protocol: %s", proto)
assert proto in H2_NPN_PROTOCOLS or proto == H2C_PROTOCOL
self._sock = BufferedSocket(sock, self.network_buffer_size)
self._send_preamble()
def createDummyBodywithLength(numberOfbytes):
if numberOfbytes == 0:
return None
body = 'a'
while numberOfbytes != 1:
body += 'b'
numberOfbytes -= 1
return body
def handleResponse(response, *args, **kwargs):
print(response.status_code)
# resp=args[0]
#expected_output_split = resp.getHeaders().split('\r\n')[ 0].split(' ', 2)
#expected_output = (int(expected_output_split[1]), str( expected_output_split[2]))
#r = result.Result(session_filename, expected_output[0], response.status_code)
# print(r.getResultString(colorize=True))
# make sure len of the message body is greater than length
def gen():
yield 'pforpersia,champaignurbana'.encode('utf-8')
yield 'there'.encode('utf-8')
def txn_replay(session_filename, txn, proxy, result_queue, h2conn, request_IDs):
""" Replays a single transaction
:param request_session: has to be a valid requests session"""
req = txn.getRequest()
resp = txn.getResponse()
# Construct HTTP request & fire it off
txn_req_headers = req.getHeaders()
txn_req_headers_dict = extractHeader.header_to_dict(txn_req_headers)
txn_req_headers_dict['Content-MD5'] = txn._uuid # used as unique identifier
if 'body' in txn_req_headers_dict:
del txn_req_headers_dict['body']
responseID = -1
#print("Replaying session")
try:
# response = request_session.request(extractHeader.extract_txn_req_method(txn_req_headers),
# 'http://' + extractHeader.extract_host(txn_req_headers) + extractHeader.extract_GET_path(txn_req_headers),
# headers=txn_req_headers_dict,stream=False) # making stream=False raises contentdecoding exception? kill me
method = extractHeader.extract_txn_req_method(txn_req_headers)
response = None
mbody = None
#txn_req_headers_dict['Host'] = "localhost"
if 'Transfer-Encoding' in txn_req_headers_dict:
# deleting the host key, since the STUPID post/get functions are going to add host field anyway, so there will be multiple host fields in the header
# This confuses the ATS and it returns 400 "Invalid HTTP request". I don't believe this
# BUT, this is not a problem if the data is not chunked encoded.. Strange, huh?
#del txn_req_headers_dict['Host']
if 'Content-Length' in txn_req_headers_dict:
#print("ewww !")
del txn_req_headers_dict['Content-Length']
mbody = gen()
if 'Content-Length' in txn_req_headers_dict:
nBytes = int(txn_req_headers_dict['Content-Length'])
mbody = createDummyBodywithLength(nBytes)
if 'Connection' in txn_req_headers_dict:
del txn_req_headers_dict['Connection']
#str2 = extractHeader.extract_host(txn_req_headers)+ extractHeader.extract_GET_path(txn_req_headers)
# print(str2)
if method == 'GET':
responseID = h2conn.request('GET', url=extractHeader.extract_GET_path(
txn_req_headers), headers=txn_req_headers_dict, body=mbody)
# print("get response", responseID)
return responseID
# request_IDs.append(responseID)
#response = h2conn.get_response(id)
# print(response.headers)
# if 'Content-Length' in response.headers:
# content = response.read()
#print("len: {0} received {1}".format(response.headers['Content-Length'],content))
elif method == 'POST':
responseID = h2conn.request('POST', url=extractHeader.extract_GET_path(
txn_req_headers), headers=txn_req_headers_dict, body=mbody)
print("get response", responseID)
return responseID
elif method == 'HEAD':
responseID = h2conn.request('HEAD', url=extractHeader.extract_GET_path(txn_req_headers), headers=txn_req_headers_dict)
print("get response", responseID)
return responseID
except UnicodeEncodeError as e:
# these unicode errors are due to the interaction between Requests and our wiretrace data.
# TODO fix
print("UnicodeEncodeError exception")
except:
e = sys.exc_info()
print("ERROR in requests: ", e, response, session_filename)
def session_replay(input, proxy, result_queue):
global bSTOP
''' Replay all transactions in session
This entire session will be replayed in one requests.Session (so one socket / TCP connection)'''
# if timing_control:
# time.sleep(float(session._timestamp)) # allow other threads to run
while bSTOP == False:
for session in iter(input.get, 'STOP'):
print(bSTOP)
if session == 'STOP':
print("Queue is empty")
bSTOP = True
break
txn = session.returnFirstTransaction()
req = txn.getRequest()
# Construct HTTP request & fire it off
txn_req_headers = req.getHeaders()
txn_req_headers_dict = extractHeader.header_to_dict(txn_req_headers)
with h2ATS(txn_req_headers_dict['Host'], secure=True, proxy_host=Config.proxy_host, proxy_port=Config.proxy_ssl_port) as h2conn:
request_IDs = []
respList = []
for txn in session.getTransactionIter():
try:
ret = txn_replay(session._filename, txn, proxy, result_queue, h2conn, request_IDs)
respList.append(txn.getResponse())
request_IDs.append(ret)
#print("txn return value is ",ret)
except:
e = sys.exc_info()
print("ERROR in replaying: ", e, txn.getRequest().getHeaders())
for id in request_IDs:
expectedH = respList.pop(0)
# print("extracting",id)
response = h2conn.get_response(id)
#print("code {0}:{1}".format(response.status,response.headers))
response_dict = {}
if mainProcess.verbose:
for field, value in response.headers.items():
response_dict[field.decode('utf-8')] = value.decode('utf-8')
expected_output_split = expectedH.getHeaders().split('\r\n')[0].split(' ', 2)
expected_output = (int(expected_output_split[1]), str(expected_output_split[2]))
r = result.Result("", expected_output[0], response.status, response.read())
expected_Dict = extractHeader.responseHeader_to_dict(expectedH.getHeaders())
b_res, res = r.getResult(response_dict, expected_Dict, colorize=Config.colorize)
print(res)
if not b_res:
print("Received response")
print(response_dict)
print("Expected response")
print(expected_Dict)
bSTOP = True
#print("Queue is empty")
input.put('STOP')
break
def client_replay(input, proxy, result_queue, nThread):
Threads = []
for i in range(nThread):
t = Thread(target=session_replay, args=[input, proxy, result_queue])
t.start()
Threads.append(t)
for t1 in Threads:
t1.join()
| apache-2.0 |
pvagner/orca | src/orca/scripts/apps/gnome-mud/script.py | 3 | 6038 | # Orca
#
# Copyright 2005-2008 Sun Microsystems Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
"""Custom script for gnome-mud."""
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2005-2008 Sun Microsystems Inc."
__license__ = "LGPL"
import pyatspi
import orca.debug as debug
import orca.scripts.default as default
import orca.input_event as input_event
import orca.keybindings as keybindings
import orca.orca_state as orca_state
import orca.speech as speech
from orca.orca_i18n import _ # for gettext support
########################################################################
# #
# Ring List. A fixed size circular list by Flavio Catalani #
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/435902 #
# #
########################################################################
class RingList:
def __init__(self, length):
self.__data__ = []
self.__full__ = 0
self.__max__ = length
self.__cur__ = 0
def append(self, x):
if self.__full__ == 1:
for i in range (0, self.__cur__ - 1):
self.__data__[i] = self.__data__[i + 1]
self.__data__[self.__cur__ - 1] = x
else:
self.__data__.append(x)
self.__cur__ += 1
if self.__cur__ == self.__max__:
self.__full__ = 1
def get(self):
return self.__data__
def remove(self):
if (self.__cur__ > 0):
del self.__data__[self.__cur__ - 1]
self.__cur__ -= 1
def size(self):
return self.__cur__
def maxsize(self):
return self.__max__
def __str__(self):
return ''.join(self.__data__)
class Script(default.Script):
MESSAGE_LIST_LENGTH = 10
def __init__(self, app):
"""Creates a new script for the given application.
This script tries to fix some accessibility problems found in
the gnome-mud application, and also improves the user experience.
For more details see bug #
Arguments:
- app: the application to create a script for.
"""
# Set the debug level for all the methods in this script.
#
self.debugLevel = debug.LEVEL_FINEST
self.previousMessages = RingList(Script.MESSAGE_LIST_LENGTH)
# Initially populate the cyclic list with empty strings
i = 0
while i < self.previousMessages.maxsize():
self.previousMessages.append("")
i += 1
default.Script.__init__(self, app)
def setupInputEventHandlers(self):
debug.println(self.debugLevel, "gnome-mud.setupInputEventHandlers.")
default.Script.setupInputEventHandlers(self)
self.inputEventHandlers["readPreviousMessageHandler"] = \
input_event.InputEventHandler(
Script.readPreviousMessage,
_('Read the latest n messages in the incoming messages text '
'area.'))
def getAppKeyBindings(self):
"""Returns the application-specific keybindings for this script."""
keyBindings = keybindings.KeyBindings()
messageKeys = [ "F1", "F2", "F3", "F4", "F5", "F6", "F7", "F8", "F9"]
for messagekey in messageKeys:
keyBindings.add(
keybindings.KeyBinding(
messagekey,
keybindings.defaultModifierMask,
keybindings.ORCA_MODIFIER_MASK,
self.inputEventHandlers["readPreviousMessageHandler"]))
return keyBindings
def readPreviousMessage(self, inputEvent):
#This function speaks the latest n messages. Orca+F1 the latest one,
#Orca+F2 the latest two and so.
debug.println(self.debugLevel, "gnome-mud.readPreviousMessage.")
i = int(inputEvent.event_string[1:])
messageNo = Script.MESSAGE_LIST_LENGTH - i
text = ""
messages = self.previousMessages.get()
for i in range (messageNo, Script.MESSAGE_LIST_LENGTH):
message = messages[i]
text += message
speech.speak(text)
def onTextInserted(self, event):
#Whenever a new text is inserted in the incoming message text area,
#We want to speak and add it to the ringList structure only those lines
#that contain some text and if the application is the current
#locusOfFocus.
rolesList = [pyatspi.ROLE_TERMINAL,
pyatspi.ROLE_FILLER]
if self.utilities.hasMatchingHierarchy(event.source, rolesList):
if self.flatReviewContext:
self.toggleFlatReviewMode()
message = event.any_data
if message and (not message.isspace()) and message != "\n":
debug.println(debug.LEVEL_FINEST, \
message + " inserted in ringlist:")
self.previousMessages.append(message)
if event.source.getApplication() == \
orca_state.locusOfFocus.getApplication():
speech.speak(message)
else:
default.Script.onTextInserted(self, event)
| lgpl-2.1 |
jfterpstra/bluebottle | bluebottle/utils/staticfiles_finders.py | 2 | 1192 | from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTI_TENANT_DIR/greatbarier/static/images/logo.jpg
"""
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return []
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(
tenant.client_name))
local_path = safe_join(tenant_dir, tenant_path)
if os.path.exists(local_path):
if all:
return [local_path]
return local_path
return []
| bsd-3-clause |
themarkypantz/kafka | tests/kafkatest/services/security/security_config.py | 8 | 14918 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
from tempfile import mkdtemp
from shutil import rmtree
from ducktape.template import TemplateRenderer
from kafkatest.services.security.minikdc import MiniKdc
import itertools
class SslStores(object):
def __init__(self, local_scratch_dir):
self.ca_crt_path = os.path.join(local_scratch_dir, "test.ca.crt")
self.ca_jks_path = os.path.join(local_scratch_dir, "test.ca.jks")
self.ca_passwd = "test-ca-passwd"
self.truststore_path = os.path.join(local_scratch_dir, "test.truststore.jks")
self.truststore_passwd = "test-ts-passwd"
self.keystore_passwd = "test-ks-passwd"
self.key_passwd = "test-key-passwd"
# Allow upto one hour of clock skew between host and VMs
self.startdate = "-1H"
for file in [self.ca_crt_path, self.ca_jks_path, self.truststore_path]:
if os.path.exists(file):
os.remove(file)
def generate_ca(self):
"""
Generate CA private key and certificate.
"""
self.runcmd("keytool -genkeypair -alias ca -keyalg RSA -keysize 2048 -keystore %s -storetype JKS -storepass %s -keypass %s -dname CN=SystemTestCA -startdate %s" % (self.ca_jks_path, self.ca_passwd, self.ca_passwd, self.startdate))
self.runcmd("keytool -export -alias ca -keystore %s -storepass %s -storetype JKS -rfc -file %s" % (self.ca_jks_path, self.ca_passwd, self.ca_crt_path))
def generate_truststore(self):
"""
Generate JKS truststore containing CA certificate.
"""
self.runcmd("keytool -importcert -alias ca -file %s -keystore %s -storepass %s -storetype JKS -noprompt" % (self.ca_crt_path, self.truststore_path, self.truststore_passwd))
def generate_and_copy_keystore(self, node):
"""
Generate JKS keystore with certificate signed by the test CA.
The generated certificate has the node's hostname as a DNS SubjectAlternativeName.
"""
ks_dir = mkdtemp(dir="/tmp")
ks_path = os.path.join(ks_dir, "test.keystore.jks")
csr_path = os.path.join(ks_dir, "test.kafka.csr")
crt_path = os.path.join(ks_dir, "test.kafka.crt")
self.runcmd("keytool -genkeypair -alias kafka -keyalg RSA -keysize 2048 -keystore %s -storepass %s -storetype JKS -keypass %s -dname CN=systemtest -ext SAN=DNS:%s -startdate %s" % (ks_path, self.keystore_passwd, self.key_passwd, self.hostname(node), self.startdate))
self.runcmd("keytool -certreq -keystore %s -storepass %s -storetype JKS -keypass %s -alias kafka -file %s" % (ks_path, self.keystore_passwd, self.key_passwd, csr_path))
self.runcmd("keytool -gencert -keystore %s -storepass %s -storetype JKS -alias ca -infile %s -outfile %s -dname CN=systemtest -ext SAN=DNS:%s -startdate %s" % (self.ca_jks_path, self.ca_passwd, csr_path, crt_path, self.hostname(node), self.startdate))
self.runcmd("keytool -importcert -keystore %s -storepass %s -storetype JKS -alias ca -file %s -noprompt" % (ks_path, self.keystore_passwd, self.ca_crt_path))
self.runcmd("keytool -importcert -keystore %s -storepass %s -storetype JKS -keypass %s -alias kafka -file %s -noprompt" % (ks_path, self.keystore_passwd, self.key_passwd, crt_path))
node.account.copy_to(ks_path, SecurityConfig.KEYSTORE_PATH)
rmtree(ks_dir)
def hostname(self, node):
""" Hostname which may be overridden for testing validation failures
"""
return node.account.hostname
def runcmd(self, cmd):
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise RuntimeError("Command '%s' returned non-zero exit status %d: %s" % (cmd, proc.returncode, stdout))
class SecurityConfig(TemplateRenderer):
PLAINTEXT = 'PLAINTEXT'
SSL = 'SSL'
SASL_PLAINTEXT = 'SASL_PLAINTEXT'
SASL_SSL = 'SASL_SSL'
SASL_MECHANISM_GSSAPI = 'GSSAPI'
SASL_MECHANISM_PLAIN = 'PLAIN'
SASL_MECHANISM_SCRAM_SHA_256 = 'SCRAM-SHA-256'
SASL_MECHANISM_SCRAM_SHA_512 = 'SCRAM-SHA-512'
SCRAM_CLIENT_USER = "kafka-client"
SCRAM_CLIENT_PASSWORD = "client-secret"
SCRAM_BROKER_USER = "kafka-broker"
SCRAM_BROKER_PASSWORD = "broker-secret"
CONFIG_DIR = "/mnt/security"
KEYSTORE_PATH = "/mnt/security/test.keystore.jks"
TRUSTSTORE_PATH = "/mnt/security/test.truststore.jks"
JAAS_CONF_PATH = "/mnt/security/jaas.conf"
KRB5CONF_PATH = "/mnt/security/krb5.conf"
KEYTAB_PATH = "/mnt/security/keytab"
# This is initialized only when the first instance of SecurityConfig is created
ssl_stores = None
def __init__(self, context, security_protocol=None, interbroker_security_protocol=None,
client_sasl_mechanism=SASL_MECHANISM_GSSAPI, interbroker_sasl_mechanism=SASL_MECHANISM_GSSAPI,
zk_sasl=False, template_props="", static_jaas_conf=True):
"""
Initialize the security properties for the node and copy
keystore and truststore to the remote node if the transport protocol
is SSL. If security_protocol is None, the protocol specified in the
template properties file is used. If no protocol is specified in the
template properties either, PLAINTEXT is used as default.
"""
self.context = context
if not SecurityConfig.ssl_stores:
# This generates keystore/trustore files in a local scratch directory which gets
# automatically destroyed after the test is run
# Creating within the scratch directory allows us to run tests in parallel without fear of collision
SecurityConfig.ssl_stores = SslStores(context.local_scratch_dir)
SecurityConfig.ssl_stores.generate_ca()
SecurityConfig.ssl_stores.generate_truststore()
if security_protocol is None:
security_protocol = self.get_property('security.protocol', template_props)
if security_protocol is None:
security_protocol = SecurityConfig.PLAINTEXT
elif security_protocol not in [SecurityConfig.PLAINTEXT, SecurityConfig.SSL, SecurityConfig.SASL_PLAINTEXT, SecurityConfig.SASL_SSL]:
raise Exception("Invalid security.protocol in template properties: " + security_protocol)
if interbroker_security_protocol is None:
interbroker_security_protocol = security_protocol
self.interbroker_security_protocol = interbroker_security_protocol
self.has_sasl = self.is_sasl(security_protocol) or self.is_sasl(interbroker_security_protocol) or zk_sasl
self.has_ssl = self.is_ssl(security_protocol) or self.is_ssl(interbroker_security_protocol)
self.zk_sasl = zk_sasl
self.static_jaas_conf = static_jaas_conf
self.properties = {
'security.protocol' : security_protocol,
'ssl.keystore.location' : SecurityConfig.KEYSTORE_PATH,
'ssl.keystore.password' : SecurityConfig.ssl_stores.keystore_passwd,
'ssl.key.password' : SecurityConfig.ssl_stores.key_passwd,
'ssl.truststore.location' : SecurityConfig.TRUSTSTORE_PATH,
'ssl.truststore.password' : SecurityConfig.ssl_stores.truststore_passwd,
'ssl.endpoint.identification.algorithm' : 'HTTPS',
'sasl.mechanism' : client_sasl_mechanism,
'sasl.mechanism.inter.broker.protocol' : interbroker_sasl_mechanism,
'sasl.kerberos.service.name' : 'kafka'
}
def client_config(self, template_props="", node=None):
# If node is not specified, use static jaas config which will be created later.
# Otherwise use static JAAS configuration files with SASL_SSL and sasl.jaas.config
# property with SASL_PLAINTEXT so that both code paths are tested by existing tests.
# Note that this is an artibtrary choice and it is possible to run all tests with
# either static or dynamic jaas config files if required.
static_jaas_conf = node is None or (self.has_sasl and self.has_ssl)
return SecurityConfig(self.context, self.security_protocol, client_sasl_mechanism=self.client_sasl_mechanism, template_props=template_props, static_jaas_conf=static_jaas_conf)
def enable_security_protocol(self, security_protocol):
self.has_sasl = self.has_sasl or self.is_sasl(security_protocol)
self.has_ssl = self.has_ssl or self.is_ssl(security_protocol)
def setup_ssl(self, node):
node.account.ssh("mkdir -p %s" % SecurityConfig.CONFIG_DIR, allow_fail=False)
node.account.copy_to(SecurityConfig.ssl_stores.truststore_path, SecurityConfig.TRUSTSTORE_PATH)
SecurityConfig.ssl_stores.generate_and_copy_keystore(node)
def setup_sasl(self, node):
node.account.ssh("mkdir -p %s" % SecurityConfig.CONFIG_DIR, allow_fail=False)
jaas_conf_file = "jaas.conf"
java_version = node.account.ssh_capture("java -version")
if any('IBM' in line for line in java_version):
is_ibm_jdk = True
else:
is_ibm_jdk = False
jaas_conf = self.render(jaas_conf_file, node=node, is_ibm_jdk=is_ibm_jdk,
SecurityConfig=SecurityConfig,
client_sasl_mechanism=self.client_sasl_mechanism,
enabled_sasl_mechanisms=self.enabled_sasl_mechanisms,
static_jaas_conf=self.static_jaas_conf)
if self.static_jaas_conf:
node.account.create_file(SecurityConfig.JAAS_CONF_PATH, jaas_conf)
else:
self.properties['sasl.jaas.config'] = jaas_conf.replace("\n", " \\\n")
if self.has_sasl_kerberos:
node.account.copy_to(MiniKdc.LOCAL_KEYTAB_FILE, SecurityConfig.KEYTAB_PATH)
node.account.copy_to(MiniKdc.LOCAL_KRB5CONF_FILE, SecurityConfig.KRB5CONF_PATH)
def setup_node(self, node):
if self.has_ssl:
self.setup_ssl(node)
if self.has_sasl:
self.setup_sasl(node)
def setup_credentials(self, node, path, zk_connect, broker):
if broker:
self.maybe_create_scram_credentials(node, zk_connect, path, self.interbroker_sasl_mechanism,
SecurityConfig.SCRAM_BROKER_USER, SecurityConfig.SCRAM_BROKER_PASSWORD)
else:
self.maybe_create_scram_credentials(node, zk_connect, path, self.client_sasl_mechanism,
SecurityConfig.SCRAM_CLIENT_USER, SecurityConfig.SCRAM_CLIENT_PASSWORD)
def maybe_create_scram_credentials(self, node, zk_connect, path, mechanism, user_name, password):
if self.has_sasl and self.is_sasl_scram(mechanism):
cmd = "%s --zookeeper %s --entity-name %s --entity-type users --alter --add-config %s=[password=%s]" % \
(path.script("kafka-configs.sh", node), zk_connect,
user_name, mechanism, password)
node.account.ssh(cmd)
def clean_node(self, node):
if self.security_protocol != SecurityConfig.PLAINTEXT:
node.account.ssh("rm -rf %s" % SecurityConfig.CONFIG_DIR, allow_fail=False)
def get_property(self, prop_name, template_props=""):
"""
Get property value from the string representation of
a properties file.
"""
value = None
for line in template_props.split("\n"):
items = line.split("=")
if len(items) == 2 and items[0].strip() == prop_name:
value = str(items[1].strip())
return value
def is_ssl(self, security_protocol):
return security_protocol == SecurityConfig.SSL or security_protocol == SecurityConfig.SASL_SSL
def is_sasl(self, security_protocol):
return security_protocol == SecurityConfig.SASL_PLAINTEXT or security_protocol == SecurityConfig.SASL_SSL
def is_sasl_scram(self, sasl_mechanism):
return sasl_mechanism == SecurityConfig.SASL_MECHANISM_SCRAM_SHA_256 or sasl_mechanism == SecurityConfig.SASL_MECHANISM_SCRAM_SHA_512
@property
def security_protocol(self):
return self.properties['security.protocol']
@property
def client_sasl_mechanism(self):
return self.properties['sasl.mechanism']
@property
def interbroker_sasl_mechanism(self):
return self.properties['sasl.mechanism.inter.broker.protocol']
@property
def enabled_sasl_mechanisms(self):
return set([self.client_sasl_mechanism, self.interbroker_sasl_mechanism])
@property
def has_sasl_kerberos(self):
return self.has_sasl and (SecurityConfig.SASL_MECHANISM_GSSAPI in self.enabled_sasl_mechanisms)
@property
def kafka_opts(self):
if self.has_sasl:
if self.static_jaas_conf:
return "\"-Djava.security.auth.login.config=%s -Djava.security.krb5.conf=%s\"" % (SecurityConfig.JAAS_CONF_PATH, SecurityConfig.KRB5CONF_PATH)
else:
return "\"-Djava.security.krb5.conf=%s\"" % SecurityConfig.KRB5CONF_PATH
else:
return ""
def props(self, prefix=''):
"""
Return properties as string with line separators, optionally with a prefix.
This is used to append security config properties to
a properties file.
:param prefix: prefix to add to each property
:return: a string containing line-separated properties
"""
if self.security_protocol == SecurityConfig.PLAINTEXT:
return ""
if self.has_sasl and not self.static_jaas_conf and 'sasl.jaas.config' not in self.properties:
raise Exception("JAAS configuration property has not yet been initialized")
config_lines = (prefix + key + "=" + value for key, value in self.properties.iteritems())
# Extra blank lines ensure this can be appended/prepended safely
return "\n".join(itertools.chain([""], config_lines, [""]))
def __str__(self):
"""
Return properties as a string with line separators.
"""
return self.props()
| apache-2.0 |
andyliuliming/WALinuxAgent | azurelinuxagent/common/protocol/imds.py | 2 | 10005 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
import json
import re
import azurelinuxagent.common.utils.restutil as restutil
from azurelinuxagent.common.exception import HttpError
from azurelinuxagent.common.future import ustr
import azurelinuxagent.common.logger as logger
from azurelinuxagent.common.protocol.restapi import DataContract, set_properties
from azurelinuxagent.common.utils.flexible_version import FlexibleVersion
IMDS_ENDPOINT = '169.254.169.254'
APIVERSION = '2018-02-01'
BASE_URI = "http://{0}/metadata/instance/{1}?api-version={2}"
IMDS_IMAGE_ORIGIN_UNKNOWN = 0
IMDS_IMAGE_ORIGIN_CUSTOM = 1
IMDS_IMAGE_ORIGIN_ENDORSED = 2
IMDS_IMAGE_ORIGIN_PLATFORM = 3
def get_imds_client():
return ImdsClient()
# A *slightly* future proof list of endorsed distros.
# -> e.g. I have predicted the future and said that 20.04-LTS will exist
# and is endored.
#
# See https://docs.microsoft.com/en-us/azure/virtual-machines/linux/endorsed-distros for
# more details.
#
# This is not an exhaustive list. This is a best attempt to mark images as
# endorsed or not. Image publishers do not encode all of the requisite information
# in their publisher, offer, sku, and version to definitively mark something as
# endorsed or not. This is not perfect, but it is approximately 98% perfect.
ENDORSED_IMAGE_INFO_MATCHER_JSON = """{
"CANONICAL": {
"UBUNTUSERVER": {
"List": [
"14.04.0-LTS",
"14.04.1-LTS",
"14.04.2-LTS",
"14.04.3-LTS",
"14.04.4-LTS",
"14.04.5-LTS",
"14.04.6-LTS",
"14.04.7-LTS",
"14.04.8-LTS",
"16.04-LTS",
"16.04.0-LTS",
"18.04-LTS",
"20.04-LTS",
"22.04-LTS"
]
}
},
"COREOS": {
"COREOS": {
"STABLE": { "Minimum": "494.4.0" }
}
},
"CREDATIV": {
"DEBIAN": { "Minimum": "7" }
},
"OPENLOGIC": {
"CENTOS": {
"Minimum": "6.3",
"List": [
"7-LVM",
"7-RAW"
]
},
"CENTOS-HPC": { "Minimum": "6.3" }
},
"REDHAT": {
"RHEL": {
"Minimum": "6.7",
"List": [
"7-LVM",
"7-RAW"
]
},
"RHEL-HANA": { "Minimum": "6.7" },
"RHEL-SAP": { "Minimum": "6.7" },
"RHEL-SAP-APPS": { "Minimum": "6.7" },
"RHEL-SAP-HANA": { "Minimum": "6.7" }
},
"SUSE": {
"SLES": {
"List": [
"11-SP4",
"11-SP5",
"11-SP6",
"12-SP1",
"12-SP2",
"12-SP3",
"12-SP4",
"12-SP5",
"12-SP6"
]
},
"SLES-BYOS": {
"List": [
"11-SP4",
"11-SP5",
"11-SP6",
"12-SP1",
"12-SP2",
"12-SP3",
"12-SP4",
"12-SP5",
"12-SP6"
]
},
"SLES-SAP": {
"List": [
"11-SP4",
"11-SP5",
"11-SP6",
"12-SP1",
"12-SP2",
"12-SP3",
"12-SP4",
"12-SP5",
"12-SP6"
]
}
}
}"""
class ImageInfoMatcher(object):
def __init__(self, doc):
self.doc = json.loads(doc)
def is_match(self, publisher, offer, sku, version):
def _is_match_walk(doci, keys):
key = keys.pop(0).upper()
if key is None:
return False
if key not in doci:
return False
if 'List' in doci[key] and keys[0] in doci[key]['List']:
return True
if 'Match' in doci[key] and re.match(doci[key]['Match'], keys[0]):
return True
if 'Minimum' in doci[key]:
try:
return FlexibleVersion(keys[0]) >= FlexibleVersion(doci[key]['Minimum'])
except ValueError:
pass
return _is_match_walk(doci[key], keys)
return _is_match_walk(self.doc, [ publisher, offer, sku, version ])
class ComputeInfo(DataContract):
__matcher = ImageInfoMatcher(ENDORSED_IMAGE_INFO_MATCHER_JSON)
def __init__(self,
location=None,
name=None,
offer=None,
osType=None,
placementGroupId=None,
platformFaultDomain=None,
placementUpdateDomain=None,
publisher=None,
resourceGroupName=None,
sku=None,
subscriptionId=None,
tags=None,
version=None,
vmId=None,
vmSize=None,
vmScaleSetName=None,
zone=None):
self.location = location
self.name = name
self.offer = offer
self.osType = osType
self.placementGroupId = placementGroupId
self.platformFaultDomain = platformFaultDomain
self.platformUpdateDomain = placementUpdateDomain
self.publisher = publisher
self.resourceGroupName = resourceGroupName
self.sku = sku
self.subscriptionId = subscriptionId
self.tags = tags
self.version = version
self.vmId = vmId
self.vmSize = vmSize
self.vmScaleSetName = vmScaleSetName
self.zone = zone
@property
def image_info(self):
return "{0}:{1}:{2}:{3}".format(self.publisher, self.offer, self.sku, self.version)
@property
def image_origin(self):
"""
An integer value describing the origin of the image.
0 -> unknown
1 -> custom - user created image
2 -> endorsed - See https://docs.microsoft.com/en-us/azure/virtual-machines/linux/endorsed-distros
3 -> platform - non-endorsed image that is available in the Azure Marketplace.
"""
try:
if self.publisher == "":
return IMDS_IMAGE_ORIGIN_CUSTOM
if ComputeInfo.__matcher.is_match(self.publisher, self.offer, self.sku, self.version):
return IMDS_IMAGE_ORIGIN_ENDORSED
else:
return IMDS_IMAGE_ORIGIN_PLATFORM
except Exception as e:
logger.warn("Could not determine the image origin from IMDS: {0}", str(e))
return IMDS_IMAGE_ORIGIN_UNKNOWN
class ImdsClient(object):
def __init__(self, version=APIVERSION):
self._api_version = version
self._headers = {
'User-Agent': restutil.HTTP_USER_AGENT,
'Metadata': True,
}
self._health_headers = {
'User-Agent': restutil.HTTP_USER_AGENT_HEALTH,
'Metadata': True,
}
pass
@property
def compute_url(self):
return BASE_URI.format(IMDS_ENDPOINT, 'compute', self._api_version)
@property
def instance_url(self):
return BASE_URI.format(IMDS_ENDPOINT, '', self._api_version)
def get_compute(self):
"""
Fetch compute information.
:return: instance of a ComputeInfo
:rtype: ComputeInfo
"""
resp = restutil.http_get(self.compute_url, headers=self._headers)
if restutil.request_failed(resp):
raise HttpError("{0} - GET: {1}".format(resp.status, self.compute_url))
data = resp.read()
data = json.loads(ustr(data, encoding="utf-8"))
compute_info = ComputeInfo()
set_properties('compute', compute_info, data)
return compute_info
def validate(self):
"""
Determines whether the metadata instance api returns 200, and the response
is valid: compute should contain location, name, subscription id, and vm size
and network should contain mac address and private ip address.
:return: Tuple<is_healthy:bool, error_response:str>
is_healthy: True when validation succeeds, False otherwise
error_response: validation failure details to assist with debugging
"""
# ensure we get a 200
resp = restutil.http_get(self.instance_url, headers=self._health_headers)
if restutil.request_failed(resp):
return False, "{0}".format(restutil.read_response_error(resp))
# ensure the response is valid json
data = resp.read()
try:
json_data = json.loads(ustr(data, encoding="utf-8"))
except Exception as e:
return False, "JSON parsing failed: {0}".format(ustr(e))
# ensure all expected fields are present and have a value
try:
# TODO: compute fields cannot be verified yet since we need to exclude rdfe vms (#1249)
self.check_field(json_data, 'network')
self.check_field(json_data['network'], 'interface')
self.check_field(json_data['network']['interface'][0], 'macAddress')
self.check_field(json_data['network']['interface'][0], 'ipv4')
self.check_field(json_data['network']['interface'][0]['ipv4'], 'ipAddress')
self.check_field(json_data['network']['interface'][0]['ipv4']['ipAddress'][0], 'privateIpAddress')
except ValueError as v:
return False, ustr(v)
return True, ''
@staticmethod
def check_field(dict_obj, field):
if field not in dict_obj or dict_obj[field] is None:
raise ValueError('Missing field: [{0}]'.format(field))
if len(dict_obj[field]) == 0:
raise ValueError('Empty field: [{0}]'.format(field))
| apache-2.0 |
noamelf/Open-Knesset | persons/migrations/0005_auto__add_field_person_img_url__add_field_person_phone__add_field_pers.py | 14 | 19706 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Person.img_url'
db.add_column('persons_person', 'img_url',
self.gf('django.db.models.fields.URLField')(default='', max_length=200, blank=True),
keep_default=False)
# Adding field 'Person.phone'
db.add_column('persons_person', 'phone',
self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True),
keep_default=False)
# Adding field 'Person.fax'
db.add_column('persons_person', 'fax',
self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True),
keep_default=False)
# Adding field 'Person.email'
db.add_column('persons_person', 'email',
self.gf('django.db.models.fields.EmailField')(max_length=75, null=True, blank=True),
keep_default=False)
# Adding field 'Person.family_status'
db.add_column('persons_person', 'family_status',
self.gf('django.db.models.fields.CharField')(max_length=10, null=True, blank=True),
keep_default=False)
# Adding field 'Person.number_of_children'
db.add_column('persons_person', 'number_of_children',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
# Adding field 'Person.date_of_birth'
db.add_column('persons_person', 'date_of_birth',
self.gf('django.db.models.fields.DateField')(null=True, blank=True),
keep_default=False)
# Adding field 'Person.place_of_birth'
db.add_column('persons_person', 'place_of_birth',
self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'Person.date_of_death'
db.add_column('persons_person', 'date_of_death',
self.gf('django.db.models.fields.DateField')(null=True, blank=True),
keep_default=False)
# Adding field 'Person.year_of_aliyah'
db.add_column('persons_person', 'year_of_aliyah',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
# Adding field 'Person.place_of_residence'
db.add_column('persons_person', 'place_of_residence',
self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'Person.area_of_residence'
db.add_column('persons_person', 'area_of_residence',
self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'Person.place_of_residence_lat'
db.add_column('persons_person', 'place_of_residence_lat',
self.gf('django.db.models.fields.CharField')(max_length=16, null=True, blank=True),
keep_default=False)
# Adding field 'Person.place_of_residence_lon'
db.add_column('persons_person', 'place_of_residence_lon',
self.gf('django.db.models.fields.CharField')(max_length=16, null=True, blank=True),
keep_default=False)
# Adding field 'Person.residence_centrality'
db.add_column('persons_person', 'residence_centrality',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
# Adding field 'Person.residence_economy'
db.add_column('persons_person', 'residence_economy',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
# Adding field 'Person.gender'
db.add_column('persons_person', 'gender',
self.gf('django.db.models.fields.CharField')(max_length=1, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Person.img_url'
db.delete_column('persons_person', 'img_url')
# Deleting field 'Person.phone'
db.delete_column('persons_person', 'phone')
# Deleting field 'Person.fax'
db.delete_column('persons_person', 'fax')
# Deleting field 'Person.email'
db.delete_column('persons_person', 'email')
# Deleting field 'Person.family_status'
db.delete_column('persons_person', 'family_status')
# Deleting field 'Person.number_of_children'
db.delete_column('persons_person', 'number_of_children')
# Deleting field 'Person.date_of_birth'
db.delete_column('persons_person', 'date_of_birth')
# Deleting field 'Person.place_of_birth'
db.delete_column('persons_person', 'place_of_birth')
# Deleting field 'Person.date_of_death'
db.delete_column('persons_person', 'date_of_death')
# Deleting field 'Person.year_of_aliyah'
db.delete_column('persons_person', 'year_of_aliyah')
# Deleting field 'Person.place_of_residence'
db.delete_column('persons_person', 'place_of_residence')
# Deleting field 'Person.area_of_residence'
db.delete_column('persons_person', 'area_of_residence')
# Deleting field 'Person.place_of_residence_lat'
db.delete_column('persons_person', 'place_of_residence_lat')
# Deleting field 'Person.place_of_residence_lon'
db.delete_column('persons_person', 'place_of_residence_lon')
# Deleting field 'Person.residence_centrality'
db.delete_column('persons_person', 'residence_centrality')
# Deleting field 'Person.residence_economy'
db.delete_column('persons_person', 'residence_economy')
# Deleting field 'Person.gender'
db.delete_column('persons_person', 'gender')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'mks.member': {
'Meta': {'ordering': "['name']", 'object_name': 'Member'},
'area_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'average_monthly_committee_presence': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'average_weekly_presence_hours': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'backlinks_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'bills_stats_approved': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'bills_stats_first': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'bills_stats_pre': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'bills_stats_proposed': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'blog': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['planet.Blog']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'current_party': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'members'", 'null': 'True', 'to': "orm['mks.Party']"}),
'current_role_descriptions': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_of_death': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'family_status': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_children': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parties': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'all_members'", 'symmetrical': 'False', 'through': "orm['mks.Membership']", 'to': "orm['mks.Party']"}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'place_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence_lat': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'place_of_residence_lon': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'residence_centrality': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'residence_economy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'year_of_aliyah': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'mks.membership': {
'Meta': {'object_name': 'Membership'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"}),
'party': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Party']"}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'mks.party': {
'Meta': {'ordering': "('-number_of_seats',)", 'object_name': 'Party'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_members': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'number_of_seats': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'persons.person': {
'Meta': {'ordering': "('name',)", 'object_name': 'Person'},
'area_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_of_death': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'family_status': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'mk': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'person'", 'null': 'True', 'to': "orm['mks.Member']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_children': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'place_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence_lat': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'place_of_residence_lon': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'residence_centrality': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'residence_economy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'titles': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'persons'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['persons.Title']"}),
'year_of_aliyah': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'persons.personalias': {
'Meta': {'object_name': 'PersonAlias'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['persons.Person']"})
},
'persons.processedprotocolpart': {
'Meta': {'object_name': 'ProcessedProtocolPart'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'protocol_part_id': ('django.db.models.fields.IntegerField', [], {})
},
'persons.role': {
'Meta': {'object_name': 'Role'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'roles'", 'to': "orm['persons.Person']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'})
},
'persons.title': {
'Meta': {'object_name': 'Title'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'planet.blog': {
'Meta': {'ordering': "('title', 'url')", 'object_name': 'Blog'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '1024', 'db_index': 'True'})
}
}
complete_apps = ['persons'] | bsd-3-clause |
JPFrancoia/scikit-learn | sklearn/preprocessing/data.py | 13 | 70436 | # Authors: Alexandre Gramfort <[email protected]>
# Mathieu Blondel <[email protected]>
# Olivier Grisel <[email protected]>
# Andreas Mueller <[email protected]>
# Eric Martin <[email protected]>
# Giorgio Patrini <[email protected]>
# License: BSD 3 clause
from itertools import chain, combinations
import numbers
import warnings
import numpy as np
from scipy import sparse
from ..base import BaseEstimator, TransformerMixin
from ..externals import six
from ..utils import check_array
from ..utils import deprecated
from ..utils.extmath import row_norms
from ..utils.extmath import _incremental_mean_and_var
from ..utils.fixes import combinations_with_replacement as combinations_w_r
from ..utils.fixes import bincount
from ..utils.sparsefuncs_fast import (inplace_csr_row_normalize_l1,
inplace_csr_row_normalize_l2)
from ..utils.sparsefuncs import (inplace_column_scale,
mean_variance_axis, incr_mean_variance_axis,
min_max_axis)
from ..utils.validation import check_is_fitted, FLOAT_DTYPES
zip = six.moves.zip
map = six.moves.map
range = six.moves.range
__all__ = [
'Binarizer',
'KernelCenterer',
'MinMaxScaler',
'MaxAbsScaler',
'Normalizer',
'OneHotEncoder',
'RobustScaler',
'StandardScaler',
'add_dummy_feature',
'binarize',
'normalize',
'scale',
'robust_scale',
'maxabs_scale',
'minmax_scale',
]
DEPRECATION_MSG_1D = (
"Passing 1d arrays as data is deprecated in 0.17 and will "
"raise ValueError in 0.19. Reshape your data either using "
"X.reshape(-1, 1) if your data has a single feature or "
"X.reshape(1, -1) if it contains a single sample."
)
def _handle_zeros_in_scale(scale, copy=True):
''' Makes sure that whenever scale is zero, we handle it correctly.
This happens in most scalers when we have constant features.'''
# if we are fitting on 1D arrays, scale might be a scalar
if np.isscalar(scale):
if scale == .0:
scale = 1.
return scale
elif isinstance(scale, np.ndarray):
if copy:
# New array to avoid side-effects
scale = scale.copy()
scale[scale == 0.0] = 1.0
return scale
def scale(X, axis=0, with_mean=True, with_std=True, copy=True):
"""Standardize a dataset along any axis
Center to the mean and component wise scale to unit variance.
Read more in the :ref:`User Guide <preprocessing_scaler>`.
Parameters
----------
X : {array-like, sparse matrix}
The data to center and scale.
axis : int (0 by default)
axis used to compute the means and standard deviations along. If 0,
independently standardize each feature, otherwise (if 1) standardize
each sample.
with_mean : boolean, True by default
If True, center the data before scaling.
with_std : boolean, True by default
If True, scale the data to unit variance (or equivalently,
unit standard deviation).
copy : boolean, optional, default True
set to False to perform inplace row normalization and avoid a
copy (if the input is already a numpy array or a scipy.sparse
CSC matrix and if axis is 1).
Notes
-----
This implementation will refuse to center scipy.sparse matrices
since it would make them non-sparse and would potentially crash the
program with memory exhaustion problems.
Instead the caller is expected to either set explicitly
`with_mean=False` (in that case, only variance scaling will be
performed on the features of the CSC matrix) or to call `X.toarray()`
if he/she expects the materialized dense array to fit in memory.
To avoid memory copy the caller should pass a CSC matrix.
See also
--------
StandardScaler: Performs scaling to unit variance using the``Transformer`` API
(e.g. as part of a preprocessing :class:`sklearn.pipeline.Pipeline`).
""" # noqa
X = check_array(X, accept_sparse='csc', copy=copy, ensure_2d=False,
warn_on_dtype=True, estimator='the scale function',
dtype=FLOAT_DTYPES)
if sparse.issparse(X):
if with_mean:
raise ValueError(
"Cannot center sparse matrices: pass `with_mean=False` instead"
" See docstring for motivation and alternatives.")
if axis != 0:
raise ValueError("Can only scale sparse matrix on axis=0, "
" got axis=%d" % axis)
if with_std:
_, var = mean_variance_axis(X, axis=0)
var = _handle_zeros_in_scale(var, copy=False)
inplace_column_scale(X, 1 / np.sqrt(var))
else:
X = np.asarray(X)
if with_mean:
mean_ = np.mean(X, axis)
if with_std:
scale_ = np.std(X, axis)
# Xr is a view on the original array that enables easy use of
# broadcasting on the axis in which we are interested in
Xr = np.rollaxis(X, axis)
if with_mean:
Xr -= mean_
mean_1 = Xr.mean(axis=0)
# Verify that mean_1 is 'close to zero'. If X contains very
# large values, mean_1 can also be very large, due to a lack of
# precision of mean_. In this case, a pre-scaling of the
# concerned feature is efficient, for instance by its mean or
# maximum.
if not np.allclose(mean_1, 0):
warnings.warn("Numerical issues were encountered "
"when centering the data "
"and might not be solved. Dataset may "
"contain too large values. You may need "
"to prescale your features.")
Xr -= mean_1
if with_std:
scale_ = _handle_zeros_in_scale(scale_, copy=False)
Xr /= scale_
if with_mean:
mean_2 = Xr.mean(axis=0)
# If mean_2 is not 'close to zero', it comes from the fact that
# scale_ is very small so that mean_2 = mean_1/scale_ > 0, even
# if mean_1 was close to zero. The problem is thus essentially
# due to the lack of precision of mean_. A solution is then to
# subtract the mean again:
if not np.allclose(mean_2, 0):
warnings.warn("Numerical issues were encountered "
"when scaling the data "
"and might not be solved. The standard "
"deviation of the data is probably "
"very close to 0. ")
Xr -= mean_2
return X
class MinMaxScaler(BaseEstimator, TransformerMixin):
"""Transforms features by scaling each feature to a given range.
This estimator scales and translates each feature individually such
that it is in the given range on the training set, i.e. between
zero and one.
The transformation is given by::
X_std = (X - X.min(axis=0)) / (X.max(axis=0) - X.min(axis=0))
X_scaled = X_std * (max - min) + min
where min, max = feature_range.
This transformation is often used as an alternative to zero mean,
unit variance scaling.
Read more in the :ref:`User Guide <preprocessing_scaler>`.
Parameters
----------
feature_range : tuple (min, max), default=(0, 1)
Desired range of transformed data.
copy : boolean, optional, default True
Set to False to perform inplace row normalization and avoid a
copy (if the input is already a numpy array).
Attributes
----------
min_ : ndarray, shape (n_features,)
Per feature adjustment for minimum.
scale_ : ndarray, shape (n_features,)
Per feature relative scaling of the data.
.. versionadded:: 0.17
*scale_* attribute.
data_min_ : ndarray, shape (n_features,)
Per feature minimum seen in the data
.. versionadded:: 0.17
*data_min_* instead of deprecated *data_min*.
data_max_ : ndarray, shape (n_features,)
Per feature maximum seen in the data
.. versionadded:: 0.17
*data_max_* instead of deprecated *data_max*.
data_range_ : ndarray, shape (n_features,)
Per feature range ``(data_max_ - data_min_)`` seen in the data
.. versionadded:: 0.17
*data_range_* instead of deprecated *data_range*.
See also
--------
minmax_scale: Equivalent function without the object oriented API.
"""
def __init__(self, feature_range=(0, 1), copy=True):
self.feature_range = feature_range
self.copy = copy
@property
@deprecated("Attribute data_range will be removed in "
"0.19. Use ``data_range_`` instead")
def data_range(self):
return self.data_range_
@property
@deprecated("Attribute data_min will be removed in "
"0.19. Use ``data_min_`` instead")
def data_min(self):
return self.data_min_
def _reset(self):
"""Reset internal data-dependent state of the scaler, if necessary.
__init__ parameters are not touched.
"""
# Checking one attribute is enough, becase they are all set together
# in partial_fit
if hasattr(self, 'scale_'):
del self.scale_
del self.min_
del self.n_samples_seen_
del self.data_min_
del self.data_max_
del self.data_range_
def fit(self, X, y=None):
"""Compute the minimum and maximum to be used for later scaling.
Parameters
----------
X : array-like, shape [n_samples, n_features]
The data used to compute the per-feature minimum and maximum
used for later scaling along the features axis.
"""
# Reset internal state before fitting
self._reset()
return self.partial_fit(X, y)
def partial_fit(self, X, y=None):
"""Online computation of min and max on X for later scaling.
All of X is processed as a single batch. This is intended for cases
when `fit` is not feasible due to very large number of `n_samples`
or because X is read from a continuous stream.
Parameters
----------
X : array-like, shape [n_samples, n_features]
The data used to compute the mean and standard deviation
used for later scaling along the features axis.
y : Passthrough for ``Pipeline`` compatibility.
"""
feature_range = self.feature_range
if feature_range[0] >= feature_range[1]:
raise ValueError("Minimum of desired feature range must be smaller"
" than maximum. Got %s." % str(feature_range))
if sparse.issparse(X):
raise TypeError("MinMaxScaler does no support sparse input. "
"You may consider to use MaxAbsScaler instead.")
X = check_array(X, copy=self.copy, ensure_2d=False, warn_on_dtype=True,
estimator=self, dtype=FLOAT_DTYPES)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
data_min = np.min(X, axis=0)
data_max = np.max(X, axis=0)
# First pass
if not hasattr(self, 'n_samples_seen_'):
self.n_samples_seen_ = X.shape[0]
# Next steps
else:
data_min = np.minimum(self.data_min_, data_min)
data_max = np.maximum(self.data_max_, data_max)
self.n_samples_seen_ += X.shape[0]
data_range = data_max - data_min
self.scale_ = ((feature_range[1] - feature_range[0]) /
_handle_zeros_in_scale(data_range))
self.min_ = feature_range[0] - data_min * self.scale_
self.data_min_ = data_min
self.data_max_ = data_max
self.data_range_ = data_range
return self
def transform(self, X):
"""Scaling features of X according to feature_range.
Parameters
----------
X : array-like, shape [n_samples, n_features]
Input data that will be transformed.
"""
check_is_fitted(self, 'scale_')
X = check_array(X, copy=self.copy, ensure_2d=False, dtype=FLOAT_DTYPES)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
X *= self.scale_
X += self.min_
return X
def inverse_transform(self, X):
"""Undo the scaling of X according to feature_range.
Parameters
----------
X : array-like, shape [n_samples, n_features]
Input data that will be transformed. It cannot be sparse.
"""
check_is_fitted(self, 'scale_')
X = check_array(X, copy=self.copy, ensure_2d=False, dtype=FLOAT_DTYPES)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
X -= self.min_
X /= self.scale_
return X
def minmax_scale(X, feature_range=(0, 1), axis=0, copy=True):
"""Transforms features by scaling each feature to a given range.
This estimator scales and translates each feature individually such
that it is in the given range on the training set, i.e. between
zero and one.
The transformation is given by::
X_std = (X - X.min(axis=0)) / (X.max(axis=0) - X.min(axis=0))
X_scaled = X_std * (max - min) + min
where min, max = feature_range.
This transformation is often used as an alternative to zero mean,
unit variance scaling.
Read more in the :ref:`User Guide <preprocessing_scaler>`.
.. versionadded:: 0.17
*minmax_scale* function interface
to :class:`sklearn.preprocessing.MinMaxScaler`.
Parameters
----------
feature_range : tuple (min, max), default=(0, 1)
Desired range of transformed data.
axis : int (0 by default)
axis used to scale along. If 0, independently scale each feature,
otherwise (if 1) scale each sample.
copy : boolean, optional, default is True
Set to False to perform inplace scaling and avoid a copy (if the input
is already a numpy array).
See also
--------
MinMaxScaler: Performs scaling to a given range using the``Transformer`` API
(e.g. as part of a preprocessing :class:`sklearn.pipeline.Pipeline`).
""" # noqa
# To allow retro-compatibility, we handle here the case of 1D-input
# From 0.17, 1D-input are deprecated in scaler objects
# Although, we want to allow the users to keep calling this function
# with 1D-input.
# Cast input to array, as we need to check ndim. Prior to 0.17, that was
# done inside the scaler object fit_transform.
# If copy is required, it will be done inside the scaler object.
X = check_array(X, copy=False, ensure_2d=False, warn_on_dtype=True,
dtype=FLOAT_DTYPES)
original_ndim = X.ndim
if original_ndim == 1:
X = X.reshape(X.shape[0], 1)
s = MinMaxScaler(feature_range=feature_range, copy=copy)
if axis == 0:
X = s.fit_transform(X)
else:
X = s.fit_transform(X.T).T
if original_ndim == 1:
X = X.ravel()
return X
class StandardScaler(BaseEstimator, TransformerMixin):
"""Standardize features by removing the mean and scaling to unit variance
Centering and scaling happen independently on each feature by computing
the relevant statistics on the samples in the training set. Mean and
standard deviation are then stored to be used on later data using the
`transform` method.
Standardization of a dataset is a common requirement for many
machine learning estimators: they might behave badly if the
individual feature do not more or less look like standard normally
distributed data (e.g. Gaussian with 0 mean and unit variance).
For instance many elements used in the objective function of
a learning algorithm (such as the RBF kernel of Support Vector
Machines or the L1 and L2 regularizers of linear models) assume that
all features are centered around 0 and have variance in the same
order. If a feature has a variance that is orders of magnitude larger
that others, it might dominate the objective function and make the
estimator unable to learn from other features correctly as expected.
This scaler can also be applied to sparse CSR or CSC matrices by passing
`with_mean=False` to avoid breaking the sparsity structure of the data.
Read more in the :ref:`User Guide <preprocessing_scaler>`.
Parameters
----------
with_mean : boolean, True by default
If True, center the data before scaling.
This does not work (and will raise an exception) when attempted on
sparse matrices, because centering them entails building a dense
matrix which in common use cases is likely to be too large to fit in
memory.
with_std : boolean, True by default
If True, scale the data to unit variance (or equivalently,
unit standard deviation).
copy : boolean, optional, default True
If False, try to avoid a copy and do inplace scaling instead.
This is not guaranteed to always work inplace; e.g. if the data is
not a NumPy array or scipy.sparse CSR matrix, a copy may still be
returned.
Attributes
----------
scale_ : ndarray, shape (n_features,)
Per feature relative scaling of the data.
.. versionadded:: 0.17
*scale_* is recommended instead of deprecated *std_*.
mean_ : array of floats with shape [n_features]
The mean value for each feature in the training set.
var_ : array of floats with shape [n_features]
The variance for each feature in the training set. Used to compute
`scale_`
n_samples_seen_ : int
The number of samples processed by the estimator. Will be reset on
new calls to fit, but increments across ``partial_fit`` calls.
See also
--------
scale: Equivalent function without the object oriented API.
:class:`sklearn.decomposition.PCA`
Further removes the linear correlation across features with 'whiten=True'.
""" # noqa
def __init__(self, copy=True, with_mean=True, with_std=True):
self.with_mean = with_mean
self.with_std = with_std
self.copy = copy
@property
@deprecated("Attribute ``std_`` will be removed in 0.19. "
"Use ``scale_`` instead")
def std_(self):
return self.scale_
def _reset(self):
"""Reset internal data-dependent state of the scaler, if necessary.
__init__ parameters are not touched.
"""
# Checking one attribute is enough, becase they are all set together
# in partial_fit
if hasattr(self, 'scale_'):
del self.scale_
del self.n_samples_seen_
del self.mean_
del self.var_
def fit(self, X, y=None):
"""Compute the mean and std to be used for later scaling.
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
The data used to compute the mean and standard deviation
used for later scaling along the features axis.
y: Passthrough for ``Pipeline`` compatibility.
"""
# Reset internal state before fitting
self._reset()
return self.partial_fit(X, y)
def partial_fit(self, X, y=None):
"""Online computation of mean and std on X for later scaling.
All of X is processed as a single batch. This is intended for cases
when `fit` is not feasible due to very large number of `n_samples`
or because X is read from a continuous stream.
The algorithm for incremental mean and std is given in Equation 1.5a,b
in Chan, Tony F., Gene H. Golub, and Randall J. LeVeque. "Algorithms
for computing the sample variance: Analysis and recommendations."
The American Statistician 37.3 (1983): 242-247:
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
The data used to compute the mean and standard deviation
used for later scaling along the features axis.
y: Passthrough for ``Pipeline`` compatibility.
"""
X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy,
ensure_2d=False, warn_on_dtype=True,
estimator=self, dtype=FLOAT_DTYPES)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
# Even in the case of `with_mean=False`, we update the mean anyway
# This is needed for the incremental computation of the var
# See incr_mean_variance_axis and _incremental_mean_variance_axis
if sparse.issparse(X):
if self.with_mean:
raise ValueError(
"Cannot center sparse matrices: pass `with_mean=False` "
"instead. See docstring for motivation and alternatives.")
if self.with_std:
# First pass
if not hasattr(self, 'n_samples_seen_'):
self.mean_, self.var_ = mean_variance_axis(X, axis=0)
self.n_samples_seen_ = X.shape[0]
# Next passes
else:
self.mean_, self.var_, self.n_samples_seen_ = \
incr_mean_variance_axis(X, axis=0,
last_mean=self.mean_,
last_var=self.var_,
last_n=self.n_samples_seen_)
else:
self.mean_ = None
self.var_ = None
else:
# First pass
if not hasattr(self, 'n_samples_seen_'):
self.mean_ = .0
self.n_samples_seen_ = 0
if self.with_std:
self.var_ = .0
else:
self.var_ = None
self.mean_, self.var_, self.n_samples_seen_ = \
_incremental_mean_and_var(X, self.mean_, self.var_,
self.n_samples_seen_)
if self.with_std:
self.scale_ = _handle_zeros_in_scale(np.sqrt(self.var_))
else:
self.scale_ = None
return self
def transform(self, X, y=None, copy=None):
"""Perform standardization by centering and scaling
Parameters
----------
X : array-like, shape [n_samples, n_features]
The data used to scale along the features axis.
"""
check_is_fitted(self, 'scale_')
copy = copy if copy is not None else self.copy
X = check_array(X, accept_sparse='csr', copy=copy,
ensure_2d=False, warn_on_dtype=True,
estimator=self, dtype=FLOAT_DTYPES)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
if sparse.issparse(X):
if self.with_mean:
raise ValueError(
"Cannot center sparse matrices: pass `with_mean=False` "
"instead. See docstring for motivation and alternatives.")
if self.scale_ is not None:
inplace_column_scale(X, 1 / self.scale_)
else:
if self.with_mean:
X -= self.mean_
if self.with_std:
X /= self.scale_
return X
def inverse_transform(self, X, copy=None):
"""Scale back the data to the original representation
Parameters
----------
X : array-like, shape [n_samples, n_features]
The data used to scale along the features axis.
"""
check_is_fitted(self, 'scale_')
copy = copy if copy is not None else self.copy
if sparse.issparse(X):
if self.with_mean:
raise ValueError(
"Cannot uncenter sparse matrices: pass `with_mean=False` "
"instead See docstring for motivation and alternatives.")
if not sparse.isspmatrix_csr(X):
X = X.tocsr()
copy = False
if copy:
X = X.copy()
if self.scale_ is not None:
inplace_column_scale(X, self.scale_)
else:
X = np.asarray(X)
if copy:
X = X.copy()
if self.with_std:
X *= self.scale_
if self.with_mean:
X += self.mean_
return X
class MaxAbsScaler(BaseEstimator, TransformerMixin):
"""Scale each feature by its maximum absolute value.
This estimator scales and translates each feature individually such
that the maximal absolute value of each feature in the
training set will be 1.0. It does not shift/center the data, and
thus does not destroy any sparsity.
This scaler can also be applied to sparse CSR or CSC matrices.
.. versionadded:: 0.17
Parameters
----------
copy : boolean, optional, default is True
Set to False to perform inplace scaling and avoid a copy (if the input
is already a numpy array).
Attributes
----------
scale_ : ndarray, shape (n_features,)
Per feature relative scaling of the data.
.. versionadded:: 0.17
*scale_* attribute.
max_abs_ : ndarray, shape (n_features,)
Per feature maximum absolute value.
n_samples_seen_ : int
The number of samples processed by the estimator. Will be reset on
new calls to fit, but increments across ``partial_fit`` calls.
See also
--------
maxabs_scale: Equivalent function without the object oriented API.
"""
def __init__(self, copy=True):
self.copy = copy
def _reset(self):
"""Reset internal data-dependent state of the scaler, if necessary.
__init__ parameters are not touched.
"""
# Checking one attribute is enough, becase they are all set together
# in partial_fit
if hasattr(self, 'scale_'):
del self.scale_
del self.n_samples_seen_
del self.max_abs_
def fit(self, X, y=None):
"""Compute the maximum absolute value to be used for later scaling.
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
The data used to compute the per-feature minimum and maximum
used for later scaling along the features axis.
"""
# Reset internal state before fitting
self._reset()
return self.partial_fit(X, y)
def partial_fit(self, X, y=None):
"""Online computation of max absolute value of X for later scaling.
All of X is processed as a single batch. This is intended for cases
when `fit` is not feasible due to very large number of `n_samples`
or because X is read from a continuous stream.
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
The data used to compute the mean and standard deviation
used for later scaling along the features axis.
y: Passthrough for ``Pipeline`` compatibility.
"""
X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy,
ensure_2d=False, estimator=self, dtype=FLOAT_DTYPES)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
if sparse.issparse(X):
mins, maxs = min_max_axis(X, axis=0)
max_abs = np.maximum(np.abs(mins), np.abs(maxs))
else:
max_abs = np.abs(X).max(axis=0)
# First pass
if not hasattr(self, 'n_samples_seen_'):
self.n_samples_seen_ = X.shape[0]
# Next passes
else:
max_abs = np.maximum(self.max_abs_, max_abs)
self.n_samples_seen_ += X.shape[0]
self.max_abs_ = max_abs
self.scale_ = _handle_zeros_in_scale(max_abs)
return self
def transform(self, X, y=None):
"""Scale the data
Parameters
----------
X : {array-like, sparse matrix}
The data that should be scaled.
"""
check_is_fitted(self, 'scale_')
X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy,
ensure_2d=False, estimator=self, dtype=FLOAT_DTYPES)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
if sparse.issparse(X):
inplace_column_scale(X, 1.0 / self.scale_)
else:
X /= self.scale_
return X
def inverse_transform(self, X):
"""Scale back the data to the original representation
Parameters
----------
X : {array-like, sparse matrix}
The data that should be transformed back.
"""
check_is_fitted(self, 'scale_')
X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy,
ensure_2d=False, estimator=self, dtype=FLOAT_DTYPES)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
if sparse.issparse(X):
inplace_column_scale(X, self.scale_)
else:
X *= self.scale_
return X
def maxabs_scale(X, axis=0, copy=True):
"""Scale each feature to the [-1, 1] range without breaking the sparsity.
This estimator scales each feature individually such
that the maximal absolute value of each feature in the
training set will be 1.0.
This scaler can also be applied to sparse CSR or CSC matrices.
Parameters
----------
axis : int (0 by default)
axis used to scale along. If 0, independently scale each feature,
otherwise (if 1) scale each sample.
copy : boolean, optional, default is True
Set to False to perform inplace scaling and avoid a copy (if the input
is already a numpy array).
See also
--------
MaxAbsScaler: Performs scaling to the [-1, 1] range using the``Transformer`` API
(e.g. as part of a preprocessing :class:`sklearn.pipeline.Pipeline`).
""" # noqa
# To allow retro-compatibility, we handle here the case of 1D-input
# From 0.17, 1D-input are deprecated in scaler objects
# Although, we want to allow the users to keep calling this function
# with 1D-input.
# Cast input to array, as we need to check ndim. Prior to 0.17, that was
# done inside the scaler object fit_transform.
# If copy is required, it will be done inside the scaler object.
X = check_array(X, accept_sparse=('csr', 'csc'), copy=False,
ensure_2d=False, dtype=FLOAT_DTYPES)
original_ndim = X.ndim
if original_ndim == 1:
X = X.reshape(X.shape[0], 1)
s = MaxAbsScaler(copy=copy)
if axis == 0:
X = s.fit_transform(X)
else:
X = s.fit_transform(X.T).T
if original_ndim == 1:
X = X.ravel()
return X
class RobustScaler(BaseEstimator, TransformerMixin):
"""Scale features using statistics that are robust to outliers.
This Scaler removes the median and scales the data according to
the quantile range (defaults to IQR: Interquartile Range).
The IQR is the range between the 1st quartile (25th quantile)
and the 3rd quartile (75th quantile).
Centering and scaling happen independently on each feature (or each
sample, depending on the `axis` argument) by computing the relevant
statistics on the samples in the training set. Median and interquartile
range are then stored to be used on later data using the `transform`
method.
Standardization of a dataset is a common requirement for many
machine learning estimators. Typically this is done by removing the mean
and scaling to unit variance. However, outliers can often influence the
sample mean / variance in a negative way. In such cases, the median and
the interquartile range often give better results.
.. versionadded:: 0.17
Read more in the :ref:`User Guide <preprocessing_scaler>`.
Parameters
----------
with_centering : boolean, True by default
If True, center the data before scaling.
This does not work (and will raise an exception) when attempted on
sparse matrices, because centering them entails building a dense
matrix which in common use cases is likely to be too large to fit in
memory.
with_scaling : boolean, True by default
If True, scale the data to interquartile range.
quantile_range : tuple (q_min, q_max), 0.0 < q_min < q_max < 100.0
Default: (25.0, 75.0) = (1st quantile, 3rd quantile) = IQR
Quantile range used to calculate ``scale_``.
.. versionadded:: 0.18
copy : boolean, optional, default is True
If False, try to avoid a copy and do inplace scaling instead.
This is not guaranteed to always work inplace; e.g. if the data is
not a NumPy array or scipy.sparse CSR matrix, a copy may still be
returned.
Attributes
----------
center_ : array of floats
The median value for each feature in the training set.
scale_ : array of floats
The (scaled) interquartile range for each feature in the training set.
.. versionadded:: 0.17
*scale_* attribute.
See also
--------
robust_scale: Equivalent function without the object oriented API.
:class:`sklearn.decomposition.PCA`
Further removes the linear correlation across features with
'whiten=True'.
Notes
-----
See examples/preprocessing/plot_robust_scaling.py for an example.
https://en.wikipedia.org/wiki/Median_(statistics)
https://en.wikipedia.org/wiki/Interquartile_range
"""
def __init__(self, with_centering=True, with_scaling=True,
quantile_range=(25.0, 75.0), copy=True):
self.with_centering = with_centering
self.with_scaling = with_scaling
self.quantile_range = quantile_range
self.copy = copy
def _check_array(self, X, copy):
"""Makes sure centering is not enabled for sparse matrices."""
X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy,
ensure_2d=False, estimator=self, dtype=FLOAT_DTYPES)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
if sparse.issparse(X):
if self.with_centering:
raise ValueError(
"Cannot center sparse matrices: use `with_centering=False`"
" instead. See docstring for motivation and alternatives.")
return X
def fit(self, X, y=None):
"""Compute the median and quantiles to be used for scaling.
Parameters
----------
X : array-like, shape [n_samples, n_features]
The data used to compute the median and quantiles
used for later scaling along the features axis.
"""
if sparse.issparse(X):
raise TypeError("RobustScaler cannot be fitted on sparse inputs")
X = self._check_array(X, self.copy)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
if self.with_centering:
self.center_ = np.median(X, axis=0)
if self.with_scaling:
q_min, q_max = self.quantile_range
if not 0 <= q_min <= q_max <= 100:
raise ValueError("Invalid quantile range: %s" %
str(self.quantile_range))
q = np.percentile(X, self.quantile_range, axis=0)
self.scale_ = (q[1] - q[0])
self.scale_ = _handle_zeros_in_scale(self.scale_, copy=False)
return self
def transform(self, X, y=None):
"""Center and scale the data
Parameters
----------
X : array-like
The data used to scale along the specified axis.
"""
if self.with_centering:
check_is_fitted(self, 'center_')
if self.with_scaling:
check_is_fitted(self, 'scale_')
X = self._check_array(X, self.copy)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
if sparse.issparse(X):
if self.with_scaling:
inplace_column_scale(X, 1.0 / self.scale_)
else:
if self.with_centering:
X -= self.center_
if self.with_scaling:
X /= self.scale_
return X
def inverse_transform(self, X):
"""Scale back the data to the original representation
Parameters
----------
X : array-like
The data used to scale along the specified axis.
"""
if self.with_centering:
check_is_fitted(self, 'center_')
if self.with_scaling:
check_is_fitted(self, 'scale_')
X = self._check_array(X, self.copy)
if X.ndim == 1:
warnings.warn(DEPRECATION_MSG_1D, DeprecationWarning)
if sparse.issparse(X):
if self.with_scaling:
inplace_column_scale(X, self.scale_)
else:
if self.with_scaling:
X *= self.scale_
if self.with_centering:
X += self.center_
return X
def robust_scale(X, axis=0, with_centering=True, with_scaling=True,
quantile_range=(25.0, 75.0), copy=True):
"""Standardize a dataset along any axis
Center to the median and component wise scale
according to the interquartile range.
Read more in the :ref:`User Guide <preprocessing_scaler>`.
Parameters
----------
X : array-like
The data to center and scale.
axis : int (0 by default)
axis used to compute the medians and IQR along. If 0,
independently scale each feature, otherwise (if 1) scale
each sample.
with_centering : boolean, True by default
If True, center the data before scaling.
with_scaling : boolean, True by default
If True, scale the data to unit variance (or equivalently,
unit standard deviation).
quantile_range : tuple (q_min, q_max), 0.0 < q_min < q_max < 100.0
Default: (25.0, 75.0) = (1st quantile, 3rd quantile) = IQR
Quantile range used to calculate ``scale_``.
.. versionadded:: 0.18
copy : boolean, optional, default is True
set to False to perform inplace row normalization and avoid a
copy (if the input is already a numpy array or a scipy.sparse
CSR matrix and if axis is 1).
Notes
-----
This implementation will refuse to center scipy.sparse matrices
since it would make them non-sparse and would potentially crash the
program with memory exhaustion problems.
Instead the caller is expected to either set explicitly
`with_centering=False` (in that case, only variance scaling will be
performed on the features of the CSR matrix) or to call `X.toarray()`
if he/she expects the materialized dense array to fit in memory.
To avoid memory copy the caller should pass a CSR matrix.
See also
--------
RobustScaler: Performs centering and scaling using the ``Transformer`` API
(e.g. as part of a preprocessing :class:`sklearn.pipeline.Pipeline`).
"""
s = RobustScaler(with_centering=with_centering, with_scaling=with_scaling,
quantile_range=quantile_range, copy=copy)
if axis == 0:
return s.fit_transform(X)
else:
return s.fit_transform(X.T).T
class PolynomialFeatures(BaseEstimator, TransformerMixin):
"""Generate polynomial and interaction features.
Generate a new feature matrix consisting of all polynomial combinations
of the features with degree less than or equal to the specified degree.
For example, if an input sample is two dimensional and of the form
[a, b], the degree-2 polynomial features are [1, a, b, a^2, ab, b^2].
Parameters
----------
degree : integer
The degree of the polynomial features. Default = 2.
interaction_only : boolean, default = False
If true, only interaction features are produced: features that are
products of at most ``degree`` *distinct* input features (so not
``x[1] ** 2``, ``x[0] * x[2] ** 3``, etc.).
include_bias : boolean
If True (default), then include a bias column, the feature in which
all polynomial powers are zero (i.e. a column of ones - acts as an
intercept term in a linear model).
Examples
--------
>>> X = np.arange(6).reshape(3, 2)
>>> X
array([[0, 1],
[2, 3],
[4, 5]])
>>> poly = PolynomialFeatures(2)
>>> poly.fit_transform(X)
array([[ 1., 0., 1., 0., 0., 1.],
[ 1., 2., 3., 4., 6., 9.],
[ 1., 4., 5., 16., 20., 25.]])
>>> poly = PolynomialFeatures(interaction_only=True)
>>> poly.fit_transform(X)
array([[ 1., 0., 1., 0.],
[ 1., 2., 3., 6.],
[ 1., 4., 5., 20.]])
Attributes
----------
powers_ : array, shape (n_output_features, n_input_features)
powers_[i, j] is the exponent of the jth input in the ith output.
n_input_features_ : int
The total number of input features.
n_output_features_ : int
The total number of polynomial output features. The number of output
features is computed by iterating over all suitably sized combinations
of input features.
Notes
-----
Be aware that the number of features in the output array scales
polynomially in the number of features of the input array, and
exponentially in the degree. High degrees can cause overfitting.
See :ref:`examples/linear_model/plot_polynomial_interpolation.py
<sphx_glr_auto_examples_linear_model_plot_polynomial_interpolation.py>`
"""
def __init__(self, degree=2, interaction_only=False, include_bias=True):
self.degree = degree
self.interaction_only = interaction_only
self.include_bias = include_bias
@staticmethod
def _combinations(n_features, degree, interaction_only, include_bias):
comb = (combinations if interaction_only else combinations_w_r)
start = int(not include_bias)
return chain.from_iterable(comb(range(n_features), i)
for i in range(start, degree + 1))
@property
def powers_(self):
check_is_fitted(self, 'n_input_features_')
combinations = self._combinations(self.n_input_features_, self.degree,
self.interaction_only,
self.include_bias)
return np.vstack(bincount(c, minlength=self.n_input_features_)
for c in combinations)
def get_feature_names(self, input_features=None):
"""
Return feature names for output features
Parameters
----------
input_features : list of string, length n_features, optional
String names for input features if available. By default,
"x0", "x1", ... "xn_features" is used.
Returns
-------
output_feature_names : list of string, length n_output_features
"""
powers = self.powers_
if input_features is None:
input_features = ['x%d' % i for i in range(powers.shape[1])]
feature_names = []
for row in powers:
inds = np.where(row)[0]
if len(inds):
name = " ".join("%s^%d" % (input_features[ind], exp)
if exp != 1 else input_features[ind]
for ind, exp in zip(inds, row[inds]))
else:
name = "1"
feature_names.append(name)
return feature_names
def fit(self, X, y=None):
"""
Compute number of output features.
"""
n_samples, n_features = check_array(X).shape
combinations = self._combinations(n_features, self.degree,
self.interaction_only,
self.include_bias)
self.n_input_features_ = n_features
self.n_output_features_ = sum(1 for _ in combinations)
return self
def transform(self, X, y=None):
"""Transform data to polynomial features
Parameters
----------
X : array-like, shape [n_samples, n_features]
The data to transform, row by row.
Returns
-------
XP : np.ndarray shape [n_samples, NP]
The matrix of features, where NP is the number of polynomial
features generated from the combination of inputs.
"""
check_is_fitted(self, ['n_input_features_', 'n_output_features_'])
X = check_array(X, dtype=FLOAT_DTYPES)
n_samples, n_features = X.shape
if n_features != self.n_input_features_:
raise ValueError("X shape does not match training shape")
# allocate output data
XP = np.empty((n_samples, self.n_output_features_), dtype=X.dtype)
combinations = self._combinations(n_features, self.degree,
self.interaction_only,
self.include_bias)
for i, c in enumerate(combinations):
XP[:, i] = X[:, c].prod(1)
return XP
def normalize(X, norm='l2', axis=1, copy=True, return_norm=False):
"""Scale input vectors individually to unit norm (vector length).
Read more in the :ref:`User Guide <preprocessing_normalization>`.
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
The data to normalize, element by element.
scipy.sparse matrices should be in CSR format to avoid an
un-necessary copy.
norm : 'l1', 'l2', or 'max', optional ('l2' by default)
The norm to use to normalize each non zero sample (or each non-zero
feature if axis is 0).
axis : 0 or 1, optional (1 by default)
axis used to normalize the data along. If 1, independently normalize
each sample, otherwise (if 0) normalize each feature.
copy : boolean, optional, default True
set to False to perform inplace row normalization and avoid a
copy (if the input is already a numpy array or a scipy.sparse
CSR matrix and if axis is 1).
return_norm : boolean, default False
whether to return the computed norms
See also
--------
Normalizer: Performs normalization using the ``Transformer`` API
(e.g. as part of a preprocessing :class:`sklearn.pipeline.Pipeline`).
"""
if norm not in ('l1', 'l2', 'max'):
raise ValueError("'%s' is not a supported norm" % norm)
if axis == 0:
sparse_format = 'csc'
elif axis == 1:
sparse_format = 'csr'
else:
raise ValueError("'%d' is not a supported axis" % axis)
X = check_array(X, sparse_format, copy=copy, warn_on_dtype=True,
estimator='the normalize function', dtype=FLOAT_DTYPES)
if axis == 0:
X = X.T
if sparse.issparse(X):
if norm == 'l1':
inplace_csr_row_normalize_l1(X)
elif norm == 'l2':
inplace_csr_row_normalize_l2(X)
elif norm == 'max':
_, norms = min_max_axis(X, 1)
norms = norms.repeat(np.diff(X.indptr))
mask = norms != 0
X.data[mask] /= norms[mask]
else:
if norm == 'l1':
norms = np.abs(X).sum(axis=1)
elif norm == 'l2':
norms = row_norms(X)
elif norm == 'max':
norms = np.max(X, axis=1)
norms = _handle_zeros_in_scale(norms, copy=False)
X /= norms[:, np.newaxis]
if axis == 0:
X = X.T
if return_norm:
return X, norms
else:
return X
class Normalizer(BaseEstimator, TransformerMixin):
"""Normalize samples individually to unit norm.
Each sample (i.e. each row of the data matrix) with at least one
non zero component is rescaled independently of other samples so
that its norm (l1 or l2) equals one.
This transformer is able to work both with dense numpy arrays and
scipy.sparse matrix (use CSR format if you want to avoid the burden of
a copy / conversion).
Scaling inputs to unit norms is a common operation for text
classification or clustering for instance. For instance the dot
product of two l2-normalized TF-IDF vectors is the cosine similarity
of the vectors and is the base similarity metric for the Vector
Space Model commonly used by the Information Retrieval community.
Read more in the :ref:`User Guide <preprocessing_normalization>`.
Parameters
----------
norm : 'l1', 'l2', or 'max', optional ('l2' by default)
The norm to use to normalize each non zero sample.
copy : boolean, optional, default True
set to False to perform inplace row normalization and avoid a
copy (if the input is already a numpy array or a scipy.sparse
CSR matrix).
Notes
-----
This estimator is stateless (besides constructor parameters), the
fit method does nothing but is useful when used in a pipeline.
See also
--------
normalize: Equivalent function without the object oriented API.
"""
def __init__(self, norm='l2', copy=True):
self.norm = norm
self.copy = copy
def fit(self, X, y=None):
"""Do nothing and return the estimator unchanged
This method is just there to implement the usual API and hence
work in pipelines.
"""
X = check_array(X, accept_sparse='csr')
return self
def transform(self, X, y=None, copy=None):
"""Scale each non zero row of X to unit norm
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
The data to normalize, row by row. scipy.sparse matrices should be
in CSR format to avoid an un-necessary copy.
"""
copy = copy if copy is not None else self.copy
X = check_array(X, accept_sparse='csr')
return normalize(X, norm=self.norm, axis=1, copy=copy)
def binarize(X, threshold=0.0, copy=True):
"""Boolean thresholding of array-like or scipy.sparse matrix
Read more in the :ref:`User Guide <preprocessing_binarization>`.
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
The data to binarize, element by element.
scipy.sparse matrices should be in CSR or CSC format to avoid an
un-necessary copy.
threshold : float, optional (0.0 by default)
Feature values below or equal to this are replaced by 0, above it by 1.
Threshold may not be less than 0 for operations on sparse matrices.
copy : boolean, optional, default True
set to False to perform inplace binarization and avoid a copy
(if the input is already a numpy array or a scipy.sparse CSR / CSC
matrix and if axis is 1).
See also
--------
Binarizer: Performs binarization using the ``Transformer`` API
(e.g. as part of a preprocessing :class:`sklearn.pipeline.Pipeline`).
"""
X = check_array(X, accept_sparse=['csr', 'csc'], copy=copy)
if sparse.issparse(X):
if threshold < 0:
raise ValueError('Cannot binarize a sparse matrix with threshold '
'< 0')
cond = X.data > threshold
not_cond = np.logical_not(cond)
X.data[cond] = 1
X.data[not_cond] = 0
X.eliminate_zeros()
else:
cond = X > threshold
not_cond = np.logical_not(cond)
X[cond] = 1
X[not_cond] = 0
return X
class Binarizer(BaseEstimator, TransformerMixin):
"""Binarize data (set feature values to 0 or 1) according to a threshold
Values greater than the threshold map to 1, while values less than
or equal to the threshold map to 0. With the default threshold of 0,
only positive values map to 1.
Binarization is a common operation on text count data where the
analyst can decide to only consider the presence or absence of a
feature rather than a quantified number of occurrences for instance.
It can also be used as a pre-processing step for estimators that
consider boolean random variables (e.g. modelled using the Bernoulli
distribution in a Bayesian setting).
Read more in the :ref:`User Guide <preprocessing_binarization>`.
Parameters
----------
threshold : float, optional (0.0 by default)
Feature values below or equal to this are replaced by 0, above it by 1.
Threshold may not be less than 0 for operations on sparse matrices.
copy : boolean, optional, default True
set to False to perform inplace binarization and avoid a copy (if
the input is already a numpy array or a scipy.sparse CSR matrix).
Notes
-----
If the input is a sparse matrix, only the non-zero values are subject
to update by the Binarizer class.
This estimator is stateless (besides constructor parameters), the
fit method does nothing but is useful when used in a pipeline.
See also
--------
binarize: Equivalent function without the object oriented API.
"""
def __init__(self, threshold=0.0, copy=True):
self.threshold = threshold
self.copy = copy
def fit(self, X, y=None):
"""Do nothing and return the estimator unchanged
This method is just there to implement the usual API and hence
work in pipelines.
"""
check_array(X, accept_sparse='csr')
return self
def transform(self, X, y=None, copy=None):
"""Binarize each element of X
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
The data to binarize, element by element.
scipy.sparse matrices should be in CSR format to avoid an
un-necessary copy.
"""
copy = copy if copy is not None else self.copy
return binarize(X, threshold=self.threshold, copy=copy)
class KernelCenterer(BaseEstimator, TransformerMixin):
"""Center a kernel matrix
Let K(x, z) be a kernel defined by phi(x)^T phi(z), where phi is a
function mapping x to a Hilbert space. KernelCenterer centers (i.e.,
normalize to have zero mean) the data without explicitly computing phi(x).
It is equivalent to centering phi(x) with
sklearn.preprocessing.StandardScaler(with_std=False).
Read more in the :ref:`User Guide <kernel_centering>`.
"""
def fit(self, K, y=None):
"""Fit KernelCenterer
Parameters
----------
K : numpy array of shape [n_samples, n_samples]
Kernel matrix.
Returns
-------
self : returns an instance of self.
"""
K = check_array(K, dtype=FLOAT_DTYPES)
n_samples = K.shape[0]
self.K_fit_rows_ = np.sum(K, axis=0) / n_samples
self.K_fit_all_ = self.K_fit_rows_.sum() / n_samples
return self
def transform(self, K, y=None, copy=True):
"""Center kernel matrix.
Parameters
----------
K : numpy array of shape [n_samples1, n_samples2]
Kernel matrix.
copy : boolean, optional, default True
Set to False to perform inplace computation.
Returns
-------
K_new : numpy array of shape [n_samples1, n_samples2]
"""
check_is_fitted(self, 'K_fit_all_')
K = check_array(K, copy=copy, dtype=FLOAT_DTYPES)
K_pred_cols = (np.sum(K, axis=1) /
self.K_fit_rows_.shape[0])[:, np.newaxis]
K -= self.K_fit_rows_
K -= K_pred_cols
K += self.K_fit_all_
return K
@property
def _pairwise(self):
return True
def add_dummy_feature(X, value=1.0):
"""Augment dataset with an additional dummy feature.
This is useful for fitting an intercept term with implementations which
cannot otherwise fit it directly.
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
Data.
value : float
Value to use for the dummy feature.
Returns
-------
X : {array, sparse matrix}, shape [n_samples, n_features + 1]
Same data with dummy feature added as first column.
Examples
--------
>>> from sklearn.preprocessing import add_dummy_feature
>>> add_dummy_feature([[0, 1], [1, 0]])
array([[ 1., 0., 1.],
[ 1., 1., 0.]])
"""
X = check_array(X, accept_sparse=['csc', 'csr', 'coo'], dtype=FLOAT_DTYPES)
n_samples, n_features = X.shape
shape = (n_samples, n_features + 1)
if sparse.issparse(X):
if sparse.isspmatrix_coo(X):
# Shift columns to the right.
col = X.col + 1
# Column indices of dummy feature are 0 everywhere.
col = np.concatenate((np.zeros(n_samples), col))
# Row indices of dummy feature are 0, ..., n_samples-1.
row = np.concatenate((np.arange(n_samples), X.row))
# Prepend the dummy feature n_samples times.
data = np.concatenate((np.ones(n_samples) * value, X.data))
return sparse.coo_matrix((data, (row, col)), shape)
elif sparse.isspmatrix_csc(X):
# Shift index pointers since we need to add n_samples elements.
indptr = X.indptr + n_samples
# indptr[0] must be 0.
indptr = np.concatenate((np.array([0]), indptr))
# Row indices of dummy feature are 0, ..., n_samples-1.
indices = np.concatenate((np.arange(n_samples), X.indices))
# Prepend the dummy feature n_samples times.
data = np.concatenate((np.ones(n_samples) * value, X.data))
return sparse.csc_matrix((data, indices, indptr), shape)
else:
klass = X.__class__
return klass(add_dummy_feature(X.tocoo(), value))
else:
return np.hstack((np.ones((n_samples, 1)) * value, X))
def _transform_selected(X, transform, selected="all", copy=True):
"""Apply a transform function to portion of selected features
Parameters
----------
X : {array-like, sparse matrix}, shape [n_samples, n_features]
Dense array or sparse matrix.
transform : callable
A callable transform(X) -> X_transformed
copy : boolean, optional
Copy X even if it could be avoided.
selected: "all" or array of indices or mask
Specify which features to apply the transform to.
Returns
-------
X : array or sparse matrix, shape=(n_samples, n_features_new)
"""
X = check_array(X, accept_sparse='csc', copy=copy, dtype=FLOAT_DTYPES)
if isinstance(selected, six.string_types) and selected == "all":
return transform(X)
if len(selected) == 0:
return X
n_features = X.shape[1]
ind = np.arange(n_features)
sel = np.zeros(n_features, dtype=bool)
sel[np.asarray(selected)] = True
not_sel = np.logical_not(sel)
n_selected = np.sum(sel)
if n_selected == 0:
# No features selected.
return X
elif n_selected == n_features:
# All features selected.
return transform(X)
else:
X_sel = transform(X[:, ind[sel]])
X_not_sel = X[:, ind[not_sel]]
if sparse.issparse(X_sel) or sparse.issparse(X_not_sel):
return sparse.hstack((X_sel, X_not_sel))
else:
return np.hstack((X_sel, X_not_sel))
class OneHotEncoder(BaseEstimator, TransformerMixin):
"""Encode categorical integer features using a one-hot aka one-of-K scheme.
The input to this transformer should be a matrix of integers, denoting
the values taken on by categorical (discrete) features. The output will be
a sparse matrix where each column corresponds to one possible value of one
feature. It is assumed that input features take on values in the range
[0, n_values).
This encoding is needed for feeding categorical data to many scikit-learn
estimators, notably linear models and SVMs with the standard kernels.
Note: a one-hot encoding of y labels should use a LabelBinarizer
instead.
Read more in the :ref:`User Guide <preprocessing_categorical_features>`.
Parameters
----------
n_values : 'auto', int or array of ints
Number of values per feature.
- 'auto' : determine value range from training data.
- int : number of categorical values per feature.
Each feature value should be in ``range(n_values)``
- array : ``n_values[i]`` is the number of categorical values in
``X[:, i]``. Each feature value should be
in ``range(n_values[i])``
categorical_features : "all" or array of indices or mask
Specify what features are treated as categorical.
- 'all' (default): All features are treated as categorical.
- array of indices: Array of categorical feature indices.
- mask: Array of length n_features and with dtype=bool.
Non-categorical features are always stacked to the right of the matrix.
dtype : number type, default=np.float
Desired dtype of output.
sparse : boolean, default=True
Will return sparse matrix if set True else will return an array.
handle_unknown : str, 'error' or 'ignore'
Whether to raise an error or ignore if a unknown categorical feature is
present during transform.
Attributes
----------
active_features_ : array
Indices for active features, meaning values that actually occur
in the training set. Only available when n_values is ``'auto'``.
feature_indices_ : array of shape (n_features,)
Indices to feature ranges.
Feature ``i`` in the original data is mapped to features
from ``feature_indices_[i]`` to ``feature_indices_[i+1]``
(and then potentially masked by `active_features_` afterwards)
n_values_ : array of shape (n_features,)
Maximum number of values per feature.
Examples
--------
Given a dataset with three features and two samples, we let the encoder
find the maximum value per feature and transform the data to a binary
one-hot encoding.
>>> from sklearn.preprocessing import OneHotEncoder
>>> enc = OneHotEncoder()
>>> enc.fit([[0, 0, 3], [1, 1, 0], [0, 2, 1], \
[1, 0, 2]]) # doctest: +ELLIPSIS
OneHotEncoder(categorical_features='all', dtype=<... 'numpy.float64'>,
handle_unknown='error', n_values='auto', sparse=True)
>>> enc.n_values_
array([2, 3, 4])
>>> enc.feature_indices_
array([0, 2, 5, 9])
>>> enc.transform([[0, 1, 1]]).toarray()
array([[ 1., 0., 0., 1., 0., 0., 1., 0., 0.]])
See also
--------
sklearn.feature_extraction.DictVectorizer : performs a one-hot encoding of
dictionary items (also handles string-valued features).
sklearn.feature_extraction.FeatureHasher : performs an approximate one-hot
encoding of dictionary items or strings.
sklearn.preprocessing.LabelBinarizer : binarizes labels in a one-vs-all
fashion.
sklearn.preprocessing.MultiLabelBinarizer : transforms between iterable of
iterables and a multilabel format, e.g. a (samples x classes) binary
matrix indicating the presence of a class label.
sklearn.preprocessing.LabelEncoder : encodes labels with values between 0
and n_classes-1.
"""
def __init__(self, n_values="auto", categorical_features="all",
dtype=np.float64, sparse=True, handle_unknown='error'):
self.n_values = n_values
self.categorical_features = categorical_features
self.dtype = dtype
self.sparse = sparse
self.handle_unknown = handle_unknown
def fit(self, X, y=None):
"""Fit OneHotEncoder to X.
Parameters
----------
X : array-like, shape [n_samples, n_feature]
Input array of type int.
Returns
-------
self
"""
self.fit_transform(X)
return self
def _fit_transform(self, X):
"""Assumes X contains only categorical features."""
X = check_array(X, dtype=np.int)
if np.any(X < 0):
raise ValueError("X needs to contain only non-negative integers.")
n_samples, n_features = X.shape
if (isinstance(self.n_values, six.string_types) and
self.n_values == 'auto'):
n_values = np.max(X, axis=0) + 1
elif isinstance(self.n_values, numbers.Integral):
if (np.max(X, axis=0) >= self.n_values).any():
raise ValueError("Feature out of bounds for n_values=%d"
% self.n_values)
n_values = np.empty(n_features, dtype=np.int)
n_values.fill(self.n_values)
else:
try:
n_values = np.asarray(self.n_values, dtype=int)
except (ValueError, TypeError):
raise TypeError("Wrong type for parameter `n_values`. Expected"
" 'auto', int or array of ints, got %r"
% type(X))
if n_values.ndim < 1 or n_values.shape[0] != X.shape[1]:
raise ValueError("Shape mismatch: if n_values is an array,"
" it has to be of shape (n_features,).")
self.n_values_ = n_values
n_values = np.hstack([[0], n_values])
indices = np.cumsum(n_values)
self.feature_indices_ = indices
column_indices = (X + indices[:-1]).ravel()
row_indices = np.repeat(np.arange(n_samples, dtype=np.int32),
n_features)
data = np.ones(n_samples * n_features)
out = sparse.coo_matrix((data, (row_indices, column_indices)),
shape=(n_samples, indices[-1]),
dtype=self.dtype).tocsr()
if (isinstance(self.n_values, six.string_types) and
self.n_values == 'auto'):
mask = np.array(out.sum(axis=0)).ravel() != 0
active_features = np.where(mask)[0]
out = out[:, active_features]
self.active_features_ = active_features
return out if self.sparse else out.toarray()
def fit_transform(self, X, y=None):
"""Fit OneHotEncoder to X, then transform X.
Equivalent to self.fit(X).transform(X), but more convenient and more
efficient. See fit for the parameters, transform for the return value.
"""
return _transform_selected(X, self._fit_transform,
self.categorical_features, copy=True)
def _transform(self, X):
"""Assumes X contains only categorical features."""
X = check_array(X, dtype=np.int)
if np.any(X < 0):
raise ValueError("X needs to contain only non-negative integers.")
n_samples, n_features = X.shape
indices = self.feature_indices_
if n_features != indices.shape[0] - 1:
raise ValueError("X has different shape than during fitting."
" Expected %d, got %d."
% (indices.shape[0] - 1, n_features))
# We use only those categorical features of X that are known using fit.
# i.e lesser than n_values_ using mask.
# This means, if self.handle_unknown is "ignore", the row_indices and
# col_indices corresponding to the unknown categorical feature are
# ignored.
mask = (X < self.n_values_).ravel()
if np.any(~mask):
if self.handle_unknown not in ['error', 'ignore']:
raise ValueError("handle_unknown should be either error or "
"unknown got %s" % self.handle_unknown)
if self.handle_unknown == 'error':
raise ValueError("unknown categorical feature present %s "
"during transform." % X.ravel()[~mask])
column_indices = (X + indices[:-1]).ravel()[mask]
row_indices = np.repeat(np.arange(n_samples, dtype=np.int32),
n_features)[mask]
data = np.ones(np.sum(mask))
out = sparse.coo_matrix((data, (row_indices, column_indices)),
shape=(n_samples, indices[-1]),
dtype=self.dtype).tocsr()
if (isinstance(self.n_values, six.string_types) and
self.n_values == 'auto'):
out = out[:, self.active_features_]
return out if self.sparse else out.toarray()
def transform(self, X):
"""Transform X using one-hot encoding.
Parameters
----------
X : array-like, shape [n_samples, n_features]
Input array of type int.
Returns
-------
X_out : sparse matrix if sparse=True else a 2-d array, dtype=int
Transformed input.
"""
return _transform_selected(X, self._transform,
self.categorical_features, copy=True)
| bsd-3-clause |
bcui6611/healthchecker | Cheetah/Utils/Indenter.py | 17 | 4145 | """
Indentation maker.
@@TR: this code is unsupported and largely undocumented ...
This version is based directly on code by Robert Kuzelj
<[email protected]> and uses his directive syntax. Some classes and
attributes have been renamed. Indentation is output via
$self._CHEETAH__indenter.indent() to prevent '_indenter' being looked up on the
searchList and another one being found. The directive syntax will
soon be changed somewhat.
"""
import re
import sys
def indentize(source):
return IndentProcessor().process(source)
class IndentProcessor(object):
"""Preprocess #indent tags."""
LINE_SEP = '\n'
ARGS = "args"
INDENT_DIR = re.compile(r'[ \t]*#indent[ \t]*(?P<args>.*)')
DIRECTIVE = re.compile(r"[ \t]*#")
WS = "ws"
WHITESPACES = re.compile(r"(?P<ws>[ \t]*)")
INC = "++"
DEC = "--"
SET = "="
CHAR = "char"
ON = "on"
OFF = "off"
PUSH = "push"
POP = "pop"
def process(self, _txt):
result = []
for line in _txt.splitlines():
match = self.INDENT_DIR.match(line)
if match:
#is indention directive
args = match.group(self.ARGS).strip()
if args == self.ON:
line = "#silent $self._CHEETAH__indenter.on()"
elif args == self.OFF:
line = "#silent $self._CHEETAH__indenter.off()"
elif args == self.INC:
line = "#silent $self._CHEETAH__indenter.inc()"
elif args == self.DEC:
line = "#silent $self._CHEETAH__indenter.dec()"
elif args.startswith(self.SET):
level = int(args[1:])
line = "#silent $self._CHEETAH__indenter.setLevel(%(level)d)" % {"level":level}
elif args.startswith('chars'):
self.indentChars = eval(args.split('=')[1])
line = "#silent $self._CHEETAH__indenter.setChars(%(level)d)" % {"level":level}
elif args.startswith(self.PUSH):
line = "#silent $self._CHEETAH__indenter.push()"
elif args.startswith(self.POP):
line = "#silent $self._CHEETAH__indenter.pop()"
else:
match = self.DIRECTIVE.match(line)
if not match:
#is not another directive
match = self.WHITESPACES.match(line)
if match:
size = len(match.group("ws").expandtabs(4))
line = ("${self._CHEETAH__indenter.indent(%(size)d)}" % {"size":size}) + line.lstrip()
else:
line = "${self._CHEETAH__indenter.indent(0)}" + line
result.append(line)
return self.LINE_SEP.join(result)
class Indenter(object):
"""
A class that keeps track of the current indentation level.
.indent() returns the appropriate amount of indentation.
"""
On = 1
Level = 0
Chars = ' '
LevelStack = []
def on(self):
self.On = 1
def off(self):
self.On = 0
def inc(self):
self.Level += 1
def dec(self):
"""decrement can only be applied to values greater zero
values below zero don't make any sense at all!"""
if self.Level > 0:
self.Level -= 1
def push(self):
self.LevelStack.append(self.Level)
def pop(self):
"""the levestack can not become -1. any attempt to do so
sets the level to 0!"""
if len(self.LevelStack) > 0:
self.Level = self.LevelStack.pop()
else:
self.Level = 0
def setLevel(self, _level):
"""the leve can't be less than zero. any attempt to do so
sets the level automatically to zero!"""
if _level < 0:
self.Level = 0
else:
self.Level = _level
def setChar(self, _chars):
self.Chars = _chars
def indent(self, _default=0):
if self.On:
return self.Chars * self.Level
return " " * _default
| apache-2.0 |
xkmato/rapidpro-tools | fix-contact-names.py | 2 | 1954 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
from __future__ import (unicode_literals, absolute_import,
division, print_function)
import os
from docopt import docopt
from rapidpro_tools import logger, change_logging_level
from rapidpro_tools.contacts import (export_contact_names_to,
fix_contact_names_from)
help = ("""Usage: update-contacts.py [-v] [-h] -f FILE (export|import)
-h --help Display this help message
-v --verbose Display DEBUG messages
-f --file=<file> File Path to export/import CSV data to/from
This script exports all contact names to a CSV file or import and update"""
""" contact names based on a CSV file""")
def main(arguments):
debug = arguments.get('--verbose') or False
change_logging_level(debug)
logger.info("Starting fix-contacts-names script...{}"
.format(" [DEBUG mode]" if debug else ""))
options = {
'export': arguments.get('export') or False,
'import': arguments.get('import') or False,
'file': arguments.get('--file') or None,
}
if options['export'] + options['import'] != 1:
logger.error("You must specify whether to export or import data")
return 1
if not options['file']:
logger.error("You must specify a file path")
return 1
if options['import'] and not os.path.exists(options['file']):
logger.error("The filepath `{}` does not exist."
.format(options['file']))
return 1
if options['export']:
with open(options['file'], 'w') as fio:
export_contact_names_to(fio)
if options['import']:
with open(options['file'], 'r') as fio:
fix_contact_names_from(fio)
logger.info("-- All done. :)")
if __name__ == '__main__':
main(docopt(help, version=0.1))
| cc0-1.0 |
Russell-IO/ansible | lib/ansible/modules/remote_management/oneview/oneview_san_manager.py | 146 | 7717 | #!/usr/bin/python
# Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: oneview_san_manager
short_description: Manage OneView SAN Manager resources
description:
- Provides an interface to manage SAN Manager resources. Can create, update, or delete.
version_added: "2.4"
requirements:
- hpOneView >= 3.1.1
author:
- Felipe Bulsoni (@fgbulsoni)
- Thiago Miotto (@tmiotto)
- Adriane Cardozo (@adriane-cardozo)
options:
state:
description:
- Indicates the desired state for the Uplink Set resource.
- C(present) ensures data properties are compliant with OneView.
- C(absent) removes the resource from OneView, if it exists.
- C(connection_information_set) updates the connection information for the SAN Manager. This operation is non-idempotent.
default: present
choices: [present, absent, connection_information_set]
data:
description:
- List with SAN Manager properties.
required: true
extends_documentation_fragment:
- oneview
- oneview.validateetag
'''
EXAMPLES = '''
- name: Creates a Device Manager for the Brocade SAN provider with the given hostname and credentials
oneview_san_manager:
config: /etc/oneview/oneview_config.json
state: present
data:
providerDisplayName: Brocade Network Advisor
connectionInfo:
- name: Host
value: 172.18.15.1
- name: Port
value: 5989
- name: Username
value: username
- name: Password
value: password
- name: UseSsl
value: true
delegate_to: localhost
- name: Ensure a Device Manager for the Cisco SAN Provider is present
oneview_san_manager:
config: /etc/oneview/oneview_config.json
state: present
data:
name: 172.18.20.1
providerDisplayName: Cisco
connectionInfo:
- name: Host
value: 172.18.20.1
- name: SnmpPort
value: 161
- name: SnmpUserName
value: admin
- name: SnmpAuthLevel
value: authnopriv
- name: SnmpAuthProtocol
value: sha
- name: SnmpAuthString
value: password
delegate_to: localhost
- name: Sets the SAN Manager connection information
oneview_san_manager:
config: /etc/oneview/oneview_config.json
state: connection_information_set
data:
connectionInfo:
- name: Host
value: '172.18.15.1'
- name: Port
value: '5989'
- name: Username
value: 'username'
- name: Password
value: 'password'
- name: UseSsl
value: true
delegate_to: localhost
- name: Refreshes the SAN Manager
oneview_san_manager:
config: /etc/oneview/oneview_config.json
state: present
data:
name: 172.18.15.1
refreshState: RefreshPending
delegate_to: localhost
- name: Delete the SAN Manager recently created
oneview_san_manager:
config: /etc/oneview/oneview_config.json
state: absent
data:
name: '172.18.15.1'
delegate_to: localhost
'''
RETURN = '''
san_manager:
description: Has the OneView facts about the SAN Manager.
returned: On state 'present'. Can be null.
type: dict
'''
from ansible.module_utils.oneview import OneViewModuleBase, OneViewModuleValueError
class SanManagerModule(OneViewModuleBase):
MSG_CREATED = 'SAN Manager created successfully.'
MSG_UPDATED = 'SAN Manager updated successfully.'
MSG_DELETED = 'SAN Manager deleted successfully.'
MSG_ALREADY_PRESENT = 'SAN Manager is already present.'
MSG_ALREADY_ABSENT = 'SAN Manager is already absent.'
MSG_SAN_MANAGER_PROVIDER_DISPLAY_NAME_NOT_FOUND = "The provider '{0}' was not found."
argument_spec = dict(
state=dict(type='str', default='present', choices=['absent', 'present', 'connection_information_set']),
data=dict(type='dict', required=True)
)
def __init__(self):
super(SanManagerModule, self).__init__(additional_arg_spec=self.argument_spec, validate_etag_support=True)
self.resource_client = self.oneview_client.san_managers
def execute_module(self):
if self.data.get('connectionInfo'):
for connection_hash in self.data.get('connectionInfo'):
if connection_hash.get('name') == 'Host':
resource_name = connection_hash.get('value')
elif self.data.get('name'):
resource_name = self.data.get('name')
else:
msg = 'A "name" or "connectionInfo" must be provided inside the "data" field for this operation. '
msg += 'If a "connectionInfo" is provided, the "Host" name is considered as the "name" for the resource.'
raise OneViewModuleValueError(msg.format())
resource = self.resource_client.get_by_name(resource_name)
if self.state == 'present':
changed, msg, san_manager = self._present(resource)
return dict(changed=changed, msg=msg, ansible_facts=dict(san_manager=san_manager))
elif self.state == 'absent':
return self.resource_absent(resource, method='remove')
elif self.state == 'connection_information_set':
changed, msg, san_manager = self._connection_information_set(resource)
return dict(changed=changed, msg=msg, ansible_facts=dict(san_manager=san_manager))
def _present(self, resource):
if not resource:
provider_uri = self.data.get('providerUri', self._get_provider_uri_by_display_name(self.data))
return True, self.MSG_CREATED, self.resource_client.add(self.data, provider_uri)
else:
merged_data = resource.copy()
merged_data.update(self.data)
# Remove 'connectionInfo' from comparison, since it is not possible to validate it.
resource.pop('connectionInfo', None)
merged_data.pop('connectionInfo', None)
if self.compare(resource, merged_data):
return False, self.MSG_ALREADY_PRESENT, resource
else:
updated_san_manager = self.resource_client.update(resource=merged_data, id_or_uri=resource['uri'])
return True, self.MSG_UPDATED, updated_san_manager
def _connection_information_set(self, resource):
if not resource:
return self._present(resource)
else:
merged_data = resource.copy()
merged_data.update(self.data)
merged_data.pop('refreshState', None)
if not self.data.get('connectionInfo', None):
raise OneViewModuleValueError('A connectionInfo field is required for this operation.')
updated_san_manager = self.resource_client.update(resource=merged_data, id_or_uri=resource['uri'])
return True, self.MSG_UPDATED, updated_san_manager
def _get_provider_uri_by_display_name(self, data):
display_name = data.get('providerDisplayName')
provider_uri = self.resource_client.get_provider_uri(display_name)
if not provider_uri:
raise OneViewModuleValueError(self.MSG_SAN_MANAGER_PROVIDER_DISPLAY_NAME_NOT_FOUND.format(display_name))
return provider_uri
def main():
SanManagerModule().run()
if __name__ == '__main__':
main()
| gpl-3.0 |
amitgroup/parts-net | scripts/scripy1.py | 1 | 5953 | from __future__ import division, print_function, absolute_import
#from pnet.vzlog import default as vz
import numpy as np
import amitgroup as ag
import itertools as itr
import sys
import os
import pnet
import time
def test(ims, labels, net):
yhat = net.classify(ims)
return yhat == labels
if pnet.parallel.main(__name__):
ag.set_verbose(True)
print("1")
import argparse
parser = argparse.ArgumentParser()
#parser.add_argument('seed', metavar='<seed>', type=int, help='Random seed')
#parser.add_argument('param', metavar='<param>', type=string)
parser.add_argument('model',metavar='<model file>',type=argparse.FileType('rb'), help='Filename of model file')
print("ohhh")
parser.add_argument('data',metavar='<mnist data file>',type=argparse.FileType('rb'),help='Filename of data file')
parser.add_argument('label',metavar='<mnist data file>',type=argparse.FileType('rb'),help='Filename of data file')
parser.add_argument('numOfClassModel',metavar='<numOfClassModel>', type=int, help='num Of Class Model')
args = parser.parse_args()
param = args.model
numOfClassModel = args.numOfClassModel
param = args.data
data = np.load(param)
label = np.load(args.label)
net = pnet.PartsNet.load(args.model)
unsup_training_times = []
sup_training_times = []
testing_times = []
error_rates = []
all_num_parts = []
ims10k = data[:10000]
label10k = np.array(label[:10000])
np.save('a.npy',label10k)
ims2k = data[10000:12000]
label2k = np.array(label[10000:12000])
np.save('b.npy',label2k)
print(ims2k.shape)
digits = range(10)
sup_ims = []
sup_labels = []
# Load supervised training data
for d in digits:
ims0 = ims10k[label10k == d]
sup_ims.append(ims0)
sup_labels.append(d * np.ones(len(ims0), dtype=np.int64))
sup_ims = np.concatenate(sup_ims, axis=0)
sup_labels = np.concatenate(sup_labels, axis=0)
print("=================")
print(sup_ims.shape)
print(sup_labels)
for classifier in 'mixture', 'svm':
for rotspread in [0, 1]:
net.layers[0]._settings['rotation_spreading_radius'] = rotspread
print('Classifier:', classifier, 'Rotational spreading:', rotspread)
if classifier == 'mixture':
cl = pnet.MixtureClassificationLayer(n_components=numOfClassModel, min_prob=1e-5)
elif classifier == 'svm':
cl = pnet.SVMClassificationLayer(C=None)
clnet = pnet.PartsNet([net, cl])
start1 = time.time()
print('Training supervised...')
print(sup_ims.shape)
clnet.train(sup_ims, sup_labels)
print('Done.')
end1 = time.time()
#print("Now testing...")
### Test ######################################################################
corrects = 0
total = 0
if 0:
test_ims, test_labels = mnist_data['test_image'], mnist_data['test_label']
else:
test_ims = ims2k
test_labels = label2k
with gv.Timer("Split to batches"):
ims_batches = np.array_split(test_ims, 10)
labels_batches = np.array_split(test_labels, 10)
def format_error_rate(pr):
return "{:.2f}%".format(100*(1-pr))
#with gv.Timer('Testing'):
start2 = time.time()
args = (tup+(clnet,) for tup in itr.izip(ims_batches, labels_batches))
for i, res in enumerate(pnet.parallel.starmap(test, args)):
corrects += res.sum()
total += res.size
pr = corrects / total
end2 = time.time()
error_rate = 1.0 - pr
num_parts = 0#net.layers[1].num_parts
error_rates.append(error_rate)
print(training_seed, 'error rate', error_rate * 100, 'num parts', num_parts)#, 'num parts 2', net.layers[3].num_parts)
unsup_training_times.append(end0 - start0)
sup_training_times.append(end1 - start1)
testing_times.append(end2 - start2)
#print('times', end0-start0, end1-start1, end2-start2)
all_num_parts.append(num_parts)
#vz.section('MNIST')
#gv.img.save_image(vz.generate_filename(), test_ims[0])
#gv.img.save_image(vz.generate_filename(), test_ims[1])
#gv.img.save_image(vz.generate_filename(), test_ims[2])
# Vz
#net.infoplot(vz)
if 0:
print(r"{ppl} & {depth} & {num_parts} & {unsup_time:.1f} & {test_time:.1f} & ${rate:.2f} \pm {std:.2f}$ \\".format(
ppl=2,
depth=maxdepth,
num_parts=r'${:.0f} \pm {:.0f}$'.format(np.mean(all_num_parts), np.std(all_num_parts)),
unsup_time=np.median(unsup_training_times) / 60,
#sup_time=np.median(sup_training_times),
test_time=np.median(testing_times) / 60,
rate=100*np.mean(error_rates),
std=100*np.std(error_rates)))
print(r"{ppl} {depth} {num_parts} {unsup_time} {test_time} {rate} {std}".format(
ppl=2,
depth=maxdepth,
num_parts=r'${:.0f} \pm {:.0f}$'.format(np.mean(all_num_parts), np.std(all_num_parts)),
unsup_time=np.median(unsup_training_times) / 60,
#sup_time=np.median(sup_training_times),
test_time=np.median(testing_times) / 60,
rate=100*np.mean(error_rates),
std=100*np.std(error_rates)))
#np.savez('gdata2-{}-{}-{}.npz'.format(maxdepth, split_criterion, split_entropy), all_num_parts=all_num_parts, unsup_time=unsup_training_times, test_time=testing_times, rates=error_rates)
print('mean error rate', np.mean(error_rates) * 100)
#net.save(args.model)
| bsd-3-clause |
scripni/rethinkdb | external/v8_3.30.33.16/build/gyp/pylib/gyp/MSVSVersion.py | 486 | 15539 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Handle version information related to Visual Stuio."""
import errno
import os
import re
import subprocess
import sys
import gyp
import glob
class VisualStudioVersion(object):
"""Information regarding a version of Visual Studio."""
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj,
path, sdk_based, default_toolset=None):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
self.project_version = project_version
self.flat_sln = flat_sln
self.uses_vcxproj = uses_vcxproj
self.path = path
self.sdk_based = sdk_based
self.default_toolset = default_toolset
def ShortName(self):
return self.short_name
def Description(self):
"""Get the full description of the version."""
return self.description
def SolutionVersion(self):
"""Get the version number of the sln files."""
return self.solution_version
def ProjectVersion(self):
"""Get the version number of the vcproj or vcxproj files."""
return self.project_version
def FlatSolution(self):
return self.flat_sln
def UsesVcxproj(self):
"""Returns true if this version uses a vcxproj file."""
return self.uses_vcxproj
def ProjectExtension(self):
"""Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
def Path(self):
"""Returns the path to Visual Studio installation."""
return self.path
def ToolPath(self, tool):
"""Returns the path to a given compiler tool. """
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
def DefaultToolset(self):
"""Returns the msbuild toolset version that will be used in the absence
of a user override."""
return self.default_toolset
def SetupScript(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
# Check if we are running in the SDK command line environment and use
# the setup script from the SDK if so. |target_arch| should be either
# 'x86' or 'x64'.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
if self.sdk_based and sdk_dir:
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
'/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
if self.short_name == '2013' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# VS2013 non-Express has a x64-x86 cross that we want to prefer.
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
# Otherwise, the standard x86 compiler.
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
assert target_arch == 'x64'
arg = 'x86_amd64'
# Use the 64-on-64 compiler if we're not using an express
# edition and we're running on a 64bit OS.
if self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
arg = 'amd64'
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
While ideally we might use the win32 module, we would like gyp to be
python neutral, so for instance cygwin python lacks this module.
Arguments:
sysdir: The system subdirectory to attempt to launch reg.exe from.
key: The registry key to read from.
value: The particular value to read.
Return:
stdout from reg.exe, or None for failure.
"""
# Skip if not on Windows or Python Win32 setup issue
if sys.platform not in ('win32', 'cygwin'):
return None
# Setup params to pass to and attempt to launch reg.exe
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
'query', key]
if value:
cmd.extend(['/v', value])
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
# Note that the error text may be in [1] in some cases
text = p.communicate()[0]
# Check return code from reg.exe; officially 0==success and 1==error
if p.returncode:
return None
return text
def _RegistryQuery(key, value=None):
"""Use reg.exe to read a particular key through _RegistryQueryBase.
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
that fails, it falls back to System32. Sysnative is available on Vista and
up and available on Windows Server 2003 and XP through KB patch 942589. Note
that Sysnative will always fail if using 64-bit python due to it being a
virtual directory and System32 will work correctly in the first place.
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
Arguments:
key: The registry key.
value: The particular registry value to read (optional).
Return:
stdout from reg.exe, or None for failure.
"""
text = None
try:
text = _RegistryQueryBase('Sysnative', key, value)
except OSError, e:
if e.errno == errno.ENOENT:
text = _RegistryQueryBase('System32', key, value)
else:
raise
return text
def _RegistryGetValue(key, value):
"""Use reg.exe to obtain the value of a registry key.
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure.
"""
text = _RegistryQuery(key, value)
if not text:
return None
# Extract value.
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
if not match:
return None
return match.group(1)
def _RegistryKeyExists(key):
"""Use reg.exe to see if a key exists.
Args:
key: The registry key to check.
Return:
True if the key exists
"""
if not _RegistryQuery(key):
return False
return True
def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation.
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
passed in that doesn't match a value in versions python will throw a error.
"""
if path:
path = os.path.normpath(path)
versions = {
'2013': VisualStudioVersion('2013',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2013e': VisualStudioVersion('2013e',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2012': VisualStudioVersion('2012',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2012e': VisualStudioVersion('2012e',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2010': VisualStudioVersion('2010',
'Visual Studio 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e',
'Visual C++ Express 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2008': VisualStudioVersion('2008',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2008e': VisualStudioVersion('2008e',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005': VisualStudioVersion('2005',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005e': VisualStudioVersion('2005e',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
}
return versions[str(name)]
def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == 'cygwin':
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
return path
def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions.
Returns:
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
Only versions 8-10 are considered.
Possibilities are:
2005(e) - Visual Studio 2005 (8)
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (11)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
'8.0': '2005',
'9.0': '2008',
'10.0': '2010',
'11.0': '2012',
'12.0': '2013',
}
versions = []
for version in versions_to_check:
# Old method of searching for which VS version is installed
# We don't use the 2010-encouraged-way because we also want to get the
# path to the binaries, which it doesn't offer.
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Microsoft\VCExpress\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], 'InstallDir')
if not path:
continue
path = _ConvertToCygpath(path)
# Check for full.
full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, '*express.exe')
if not force_express and os.path.exists(full_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version],
os.path.join(path, '..', '..')))
# Check for express.
elif glob.glob(express_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..', '..')))
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
path = _ConvertToCygpath(path)
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
return versions
def SelectVisualStudioVersion(version='auto'):
"""Select which version of Visual Studio projects to generate.
Arguments:
version: Hook to allow caller to force a particular version (vs auto).
Returns:
An object representing a visual studio project format version.
"""
# In auto mode, check environment variable for override.
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
'auto': ('10.0', '12.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
'2008e': ('9.0',),
'2010': ('10.0',),
'2010e': ('10.0',),
'2012': ('11.0',),
'2012e': ('11.0',),
'2013': ('12.0',),
'2013e': ('12.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
msvs_version = os.environ.get('GYP_MSVS_VERSION')
if not msvs_version:
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
'set to a particular version (e.g. 2010e).')
return _CreateVersion(msvs_version, override_path, sdk_based=True)
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
if not versions:
if version == 'auto':
# Default to 2005 if we couldn't find anything
return _CreateVersion('2005', None)
else:
return _CreateVersion(version, None)
return versions[0]
| agpl-3.0 |
l1ll1/cvl-fabric-launcher | pyinstaller-2.1/PyInstaller/lib/unittest2/case.py | 11 | 43241 | """Test case implementation"""
import sys
import difflib
import pprint
import re
import unittest
import warnings
if sys.version_info[:2] == (2,3):
from sets import Set as set
from sets import ImmutableSet as frozenset
from unittest2 import result
from unittest2.util import\
safe_repr, safe_str, strclass,\
unorderable_list_difference
from unittest2.compatibility import wraps
__unittest = True
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestResult.skip() or one of the skipping decorators
instead of raising this directly.
"""
class _ExpectedFailure(Exception):
"""
Raise this when a test is expected to fail.
This is an implementation detail.
"""
def __init__(self, exc_info):
# can't use super because Python 2.4 exceptions are old style
Exception.__init__(self)
self.exc_info = exc_info
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
def _id(obj):
return obj
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not (isinstance(test_item, type) and issubclass(test_item, TestCase)):
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
skip_wrapper = wraps(test_item)(skip_wrapper)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(func):
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception:
raise _ExpectedFailure(sys.exc_info())
raise _UnexpectedSuccess
wrapper = wraps(func)(wrapper)
return wrapper
class _AssertRaisesContext(object):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __init__(self, expected, test_case, expected_regexp=None):
self.expected = expected
self.failureException = test_case.failureException
self.expected_regexp = expected_regexp
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise self.failureException(
"%s not raised" % (exc_name,))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regexp is None:
return True
expected_regexp = self.expected_regexp
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
return True
class _TypeEqualityDict(object):
def __init__(self, testcase):
self.testcase = testcase
self._store = {}
def __setitem__(self, key, value):
self._store[key] = value
def __getitem__(self, key):
value = self._store[key]
if isinstance(value, basestring):
return getattr(self.testcase, value)
return value
def get(self, key, default=None):
if key in self._store:
return self[key]
return default
class TestCase(unittest.TestCase):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
"""
# This attribute determines which exception will be raised when
# the instance's assertion methods fail; test methods raising this
# exception will be deemed to have 'failed' rather than 'errored'
failureException = AssertionError
# This attribute sets the maximum length of a diff in failure messages
# by assert methods using difflib. It is looked up as an instance attribute
# so can be configured by individual tests if required.
maxDiff = 80*8
# This attribute determines whether long messages (including repr of
# objects used in assert methods) will be printed on failure in *addition*
# to any explicit message passed.
longMessage = True
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._resultForDoCleanups = None
try:
testMethod = getattr(self, methodName)
except AttributeError:
raise ValueError("no such test method in %s: %s" % \
(self.__class__, methodName))
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = _TypeEqualityDict(self)
self.addTypeEqualityFunc(dict, 'assertDictEqual')
self.addTypeEqualityFunc(list, 'assertListEqual')
self.addTypeEqualityFunc(tuple, 'assertTupleEqual')
self.addTypeEqualityFunc(set, 'assertSetEqual')
self.addTypeEqualityFunc(frozenset, 'assertSetEqual')
self.addTypeEqualityFunc(unicode, 'assertMultiLineEqual')
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
def setUpClass(cls):
"Hook method for setting up class fixture before running tests in the class."
setUpClass = classmethod(setUpClass)
def tearDownClass(cls):
"Hook method for deconstructing the class fixture after running all tests in the class."
tearDownClass = classmethod(tearDownClass)
def tearDown(self):
"Hook method for deconstructing the test fixture after testing it."
def countTestCases(self):
return 1
def defaultTestResult(self):
return result.TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self._testMethodDoc
return doc and doc.split("\n")[0].strip() or None
def id(self):
return "%s.%s" % (strclass(self.__class__), self._testMethodName)
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self._testMethodName == other._testMethodName
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._testMethodName))
def __str__(self):
return "%s (%s)" % (self._testMethodName, strclass(self.__class__))
def __repr__(self):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
def _addSkip(self, result, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
addSkip(self, reason)
else:
warnings.warn("Use of a TestResult without an addSkip method is deprecated",
DeprecationWarning, 2)
result.addSuccess(self)
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
self._resultForDoCleanups = result
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, skip_why)
finally:
result.stopTest(self)
return
try:
success = False
try:
self.setUp()
except SkipTest, e:
self._addSkip(result, str(e))
except Exception:
result.addError(self, sys.exc_info())
else:
try:
testMethod()
except self.failureException:
result.addFailure(self, sys.exc_info())
except _ExpectedFailure, e:
addExpectedFailure = getattr(result, 'addExpectedFailure', None)
if addExpectedFailure is not None:
addExpectedFailure(self, e.exc_info)
else:
warnings.warn("Use of a TestResult without an addExpectedFailure method is deprecated",
DeprecationWarning)
result.addSuccess(self)
except _UnexpectedSuccess:
addUnexpectedSuccess = getattr(result, 'addUnexpectedSuccess', None)
if addUnexpectedSuccess is not None:
addUnexpectedSuccess(self)
else:
warnings.warn("Use of a TestResult without an addUnexpectedSuccess method is deprecated",
DeprecationWarning)
result.addFailure(self, sys.exc_info())
except SkipTest, e:
self._addSkip(result, str(e))
except Exception:
result.addError(self, sys.exc_info())
else:
success = True
try:
self.tearDown()
except Exception:
result.addError(self, sys.exc_info())
success = False
cleanUpSuccess = self.doCleanups()
success = success and cleanUpSuccess
if success:
result.addSuccess(self)
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
result = self._resultForDoCleanups
ok = True
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
try:
function(*args, **kwargs)
except Exception:
ok = False
result.addError(self, sys.exc_info())
return ok
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self._testMethodName)()
self.tearDown()
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
function(*args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
raise SkipTest(reason)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException(msg)
def assertFalse(self, expr, msg=None):
"Fail the test if the expression is true."
if expr:
msg = self._formatMessage(msg, "%s is not False" % safe_repr(expr))
raise self.failureException(msg)
def assertTrue(self, expr, msg=None):
"""Fail the test unless the expression is true."""
if not expr:
msg = self._formatMessage(msg, "%s is not True" % safe_repr(expr))
raise self.failureException(msg)
def _formatMessage(self, msg, standardMsg):
"""Honour the longMessage attribute when generating failure messages.
If longMessage is False this means:
* Use only an explicit message if it is provided
* Otherwise use the standard message for the assert
If longMessage is True:
* Use the standard message
* If an explicit message is provided, plus ' : ' and the explicit message
"""
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
return '%s : %s' % (standardMsg, msg)
except UnicodeDecodeError:
return '%s : %s' % (safe_str(standardMsg), safe_str(msg))
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
"""Fail unless an exception of class excClass is thrown
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
thrown, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
If called with callableObj omitted or None, will return a
context object used like this::
with self.assertRaises(SomeException):
do_something()
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
with self.assertRaises(SomeException) as cm:
do_something()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, 3)
"""
if callableObj is None:
return _AssertRaisesContext(excClass, self)
try:
callableObj(*args, **kwargs)
except excClass:
return
if hasattr(excClass,'__name__'):
excName = excClass.__name__
else:
excName = str(excClass)
raise self.failureException, "%s not raised" % excName
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
Returns: A callable accepting (first, second, msg=None) that will
raise a failure exception if first != second with a useful human
readable error message for those types.
"""
#
# NOTE(gregory.p.smith): I considered isinstance(first, type(second))
# and vice versa. I opted for the conservative approach in case
# subclasses are not intended to be compared in detail to their super
# class instances using a type equality func. This means testing
# subtypes won't automagically use the detailed comparison. Callers
# should use their type specific assertSpamEqual method to compare
# subclasses if the detailed comparison is desired and appropriate.
# See the discussion in http://bugs.python.org/issue2578.
#
if type(first) is type(second):
asserter = self._type_equality_funcs.get(type(first))
if asserter is not None:
return asserter
return self._baseAssertEqual
def _baseAssertEqual(self, first, second, msg=None):
"""The default assertEqual implementation, not type specific."""
if not first == second:
standardMsg = '%s != %s' % (safe_repr(first), safe_repr(second))
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '=='
operator.
"""
assertion_func = self._getAssertEqualityFunc(first, second)
assertion_func(first, second, msg=msg)
def assertNotEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '=='
operator.
"""
if not first != second:
msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
safe_repr(second)))
raise self.failureException(msg)
def assertAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if first == second:
# shortcut
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(first - second) <= delta:
return
standardMsg = '%s != %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if round(abs(second-first), places) == 0:
return
standardMsg = '%s != %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are equal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is less than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
Objects that are equal automatically fail.
"""
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (first == second) and abs(first - second) > delta:
return
standardMsg = '%s == %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if not (first == second) and round(abs(second-first), places) != 0:
return
standardMsg = '%s == %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
# Synonyms for assertion methods
# The plurals are undocumented. Keep them that way to discourage use.
# Do not add more. Do not remove.
# Going through a deprecation cycle on these would annoy many people.
assertEquals = assertEqual
assertNotEquals = assertNotEqual
assertAlmostEquals = assertAlmostEqual
assertNotAlmostEquals = assertNotAlmostEqual
assert_ = assertTrue
# These fail* assertion method names are pending deprecation and will
# be a DeprecationWarning in 3.2; http://bugs.python.org/issue2578
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
('Please use %s instead.' % original_func.__name__),
PendingDeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
failUnlessEqual = _deprecate(assertEqual)
failIfEqual = _deprecate(assertNotEqual)
failUnlessAlmostEqual = _deprecate(assertAlmostEqual)
failIfAlmostEqual = _deprecate(assertNotAlmostEqual)
failUnless = _deprecate(assertTrue)
failUnlessRaises = _deprecate(assertRaises)
failIf = _deprecate(assertFalse)
def assertSequenceEqual(self, seq1, seq2,
msg=None, seq_type=None, max_diff=80*8):
"""An equality assertion for ordered sequences (like lists and tuples).
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
seq1: The first sequence to compare.
seq2: The second sequence to compare.
seq_type: The expected datatype of the sequences, or None if no
datatype should be enforced.
msg: Optional message to use on failure instead of a list of
differences.
max_diff: Maximum size off the diff, larger diffs are not shown
"""
if seq_type is not None:
seq_type_name = seq_type.__name__
if not isinstance(seq1, seq_type):
raise self.failureException('First sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq1)))
if not isinstance(seq2, seq_type):
raise self.failureException('Second sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq2)))
else:
seq_type_name = "sequence"
differing = None
try:
len1 = len(seq1)
except (TypeError, NotImplementedError):
differing = 'First %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
try:
len2 = len(seq2)
except (TypeError, NotImplementedError):
differing = 'Second %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
if seq1 == seq2:
return
seq1_repr = repr(seq1)
seq2_repr = repr(seq2)
if len(seq1_repr) > 30:
seq1_repr = seq1_repr[:30] + '...'
if len(seq2_repr) > 30:
seq2_repr = seq2_repr[:30] + '...'
elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)
differing = '%ss differ: %s != %s\n' % elements
for i in xrange(min(len1, len2)):
try:
item1 = seq1[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of first %s\n' %
(i, seq_type_name))
break
try:
item2 = seq2[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of second %s\n' %
(i, seq_type_name))
break
if item1 != item2:
differing += ('\nFirst differing element %d:\n%s\n%s\n' %
(i, item1, item2))
break
else:
if (len1 == len2 and seq_type is None and
type(seq1) != type(seq2)):
# The sequences are the same, but have differing types.
return
if len1 > len2:
differing += ('\nFirst %s contains %d additional '
'elements.\n' % (seq_type_name, len1 - len2))
try:
differing += ('First extra element %d:\n%s\n' %
(len2, seq1[len2]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of first %s\n' % (len2, seq_type_name))
elif len1 < len2:
differing += ('\nSecond %s contains %d additional '
'elements.\n' % (seq_type_name, len2 - len1))
try:
differing += ('First extra element %d:\n%s\n' %
(len1, seq2[len1]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of second %s\n' % (len1, seq_type_name))
standardMsg = differing
diffMsg = '\n' + '\n'.join(
difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def _truncateMessage(self, message, diff):
max_diff = self.maxDiff
if max_diff is None or len(diff) <= max_diff:
return message + diff
return message + (DIFF_OMITTED % len(diff))
def assertListEqual(self, list1, list2, msg=None):
"""A list-specific equality assertion.
Args:
list1: The first list to compare.
list2: The second list to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(list1, list2, msg, seq_type=list)
def assertTupleEqual(self, tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion.
Args:
tuple1: The first tuple to compare.
tuple2: The second tuple to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(tuple1, tuple2, msg, seq_type=tuple)
def assertSetEqual(self, set1, set2, msg=None):
"""A set-specific equality assertion.
Args:
set1: The first set to compare.
set2: The second set to compare.
msg: Optional message to use on failure instead of a list of
differences.
assertSetEqual uses ducktyping to support
different types of sets, and is optimized for sets specifically
(parameters must support a difference method).
"""
try:
difference1 = set1.difference(set2)
except TypeError, e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError, e:
self.fail('first argument does not support set difference: %s' % e)
try:
difference2 = set2.difference(set1)
except TypeError, e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError, e:
self.fail('second argument does not support set difference: %s' % e)
if not (difference1 or difference2):
return
lines = []
if difference1:
lines.append('Items in the first set but not the second:')
for item in difference1:
lines.append(repr(item))
if difference2:
lines.append('Items in the second set but not the first:')
for item in difference2:
lines.append(repr(item))
standardMsg = '\n'.join(lines)
self.fail(self._formatMessage(msg, standardMsg))
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b), but with a nicer default message."""
if member in container:
standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertIs(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is b), but with a nicer default message."""
if expr1 is not expr2:
standardMsg = '%s is not %s' % (safe_repr(expr1), safe_repr(expr2))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNot(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is not b), but with a nicer default message."""
if expr1 is expr2:
standardMsg = 'unexpectedly identical: %s' % (safe_repr(expr1),)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictEqual(self, d1, d2, msg=None):
self.assert_(isinstance(d1, dict), 'First argument is not a dictionary')
self.assert_(isinstance(d2, dict), 'Second argument is not a dictionary')
if d1 != d2:
standardMsg = '%s != %s' % (safe_repr(d1, True), safe_repr(d2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(d1).splitlines(),
pprint.pformat(d2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictContainsSubset(self, expected, actual, msg=None):
"""Checks whether actual is a superset of expected."""
missing = []
mismatched = []
for key, value in expected.iteritems():
if key not in actual:
missing.append(key)
else:
try:
are_equal = (value == actual[key])
except UnicodeDecodeError:
are_equal = False
if not are_equal:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(actual[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join([safe_repr(m) for m in
missing])
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
def assertItemsEqual(self, expected_seq, actual_seq, msg=None):
"""An unordered sequence specific comparison. It asserts that
expected_seq and actual_seq contain the same elements. It is
the equivalent of::
self.assertEqual(sorted(expected_seq), sorted(actual_seq))
Raises with an error message listing which elements of expected_seq
are missing from actual_seq and vice versa if any.
Asserts that each element has the same count in both sequences.
Example:
- [0, 1, 1] and [1, 0, 1] compare equal.
- [0, 0, 1] and [0, 1] compare unequal.
"""
try:
expected = expected_seq[:]
expected.sort()
actual = actual_seq[:]
actual.sort()
except TypeError:
# Unsortable items (example: set(), complex(), ...)
expected = list(expected_seq)
actual = list(actual_seq)
missing, unexpected = unorderable_list_difference(
expected, actual, ignore_duplicate=False
)
else:
return self.assertSequenceEqual(expected, actual, msg=msg)
errors = []
if missing:
errors.append('Expected, but missing:\n %s' %
safe_repr(missing))
if unexpected:
errors.append('Unexpected, but present:\n %s' %
safe_repr(unexpected))
if errors:
standardMsg = '\n'.join(errors)
self.fail(self._formatMessage(msg, standardMsg))
def assertMultiLineEqual(self, first, second, msg=None):
"""Assert that two multi-line strings are equal."""
self.assert_(isinstance(first, basestring), (
'First argument is not a string'))
self.assert_(isinstance(second, basestring), (
'Second argument is not a string'))
if first != second:
standardMsg = '%s != %s' % (safe_repr(first, True), safe_repr(second, True))
diff = '\n' + ''.join(difflib.ndiff(first.splitlines(True),
second.splitlines(True)))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertLess(self, a, b, msg=None):
"""Just like self.assertTrue(a < b), but with a nicer default message."""
if not a < b:
standardMsg = '%s not less than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default message."""
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a >= b), but with a nicer default message."""
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNone(self, obj, msg=None):
"""Same as self.assertTrue(obj is None), with a nicer default message."""
if obj is not None:
standardMsg = '%s is not None' % (safe_repr(obj),)
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNotNone(self, obj, msg=None):
"""Included for symmetry with assertIsNone."""
if obj is None:
standardMsg = 'unexpectedly None'
self.fail(self._formatMessage(msg, standardMsg))
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Included for symmetry with assertIsInstance."""
if isinstance(obj, cls):
standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertRaisesRegexp(self, expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regexp.
Args:
expected_exception: Exception class expected to be raised.
expected_regexp: Regexp (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
args: Extra args.
kwargs: Extra kwargs.
"""
if callable_obj is None:
return _AssertRaisesContext(expected_exception, self, expected_regexp)
try:
callable_obj(*args, **kwargs)
except expected_exception, exc_value:
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
else:
if hasattr(expected_exception, '__name__'):
excName = expected_exception.__name__
else:
excName = str(expected_exception)
raise self.failureException, "%s not raised" % excName
def assertRegexpMatches(self, text, expected_regexp, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(text):
msg = msg or "Regexp didn't match"
msg = '%s: %r not found in %r' % (msg, expected_regexp.pattern, text)
raise self.failureException(msg)
def assertNotRegexpMatches(self, text, unexpected_regexp, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regexp, basestring):
unexpected_regexp = re.compile(unexpected_regexp)
match = unexpected_regexp.search(text)
if match:
msg = msg or "Regexp matched"
msg = '%s: %r matches %r in %r' % (msg,
text[match.start():match.end()],
unexpected_regexp.pattern,
text)
raise self.failureException(msg)
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
unittest framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None, description=None):
super(FunctionTestCase, self).__init__()
self._setUpFunc = setUp
self._tearDownFunc = tearDown
self._testFunc = testFunc
self._description = description
def setUp(self):
if self._setUpFunc is not None:
self._setUpFunc()
def tearDown(self):
if self._tearDownFunc is not None:
self._tearDownFunc()
def runTest(self):
self._testFunc()
def id(self):
return self._testFunc.__name__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._setUpFunc == other._setUpFunc and \
self._tearDownFunc == other._tearDownFunc and \
self._testFunc == other._testFunc and \
self._description == other._description
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._setUpFunc, self._tearDownFunc,
self._testFunc, self._description))
def __str__(self):
return "%s (%s)" % (strclass(self.__class__),
self._testFunc.__name__)
def __repr__(self):
return "<%s testFunc=%s>" % (strclass(self.__class__),
self._testFunc)
def shortDescription(self):
if self._description is not None:
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None
| gpl-3.0 |
mgenereu/linux | tools/perf/scripts/python/sctop.py | 1996 | 2102 | # system call top
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
raw_syscalls__sys_enter(**locals())
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
| gpl-2.0 |
achang97/YouTunes | lib/python2.7/site-packages/werkzeug/contrib/fixers.py | 104 | 10179 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.fixers
~~~~~~~~~~~~~~~~~~~~~~~
.. versionadded:: 0.5
This module includes various helpers that fix bugs in web servers. They may
be necessary for some versions of a buggy web server but not others. We try
to stay updated with the status of the bugs as good as possible but you have
to make sure whether they fix the problem you encounter.
If you notice bugs in webservers not fixed in this module consider
contributing a patch.
:copyright: Copyright 2009 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
from werkzeug.http import parse_options_header, parse_cache_control_header, \
parse_set_header
from werkzeug.useragents import UserAgent
from werkzeug.datastructures import Headers, ResponseCacheControl
class CGIRootFix(object):
"""Wrap the application in this middleware if you are using FastCGI or CGI
and you have problems with your app root being set to the cgi script's path
instead of the path users are going to visit
.. versionchanged:: 0.9
Added `app_root` parameter and renamed from `LighttpdCGIRootFix`.
:param app: the WSGI application
:param app_root: Defaulting to ``'/'``, you can set this to something else
if your app is mounted somewhere else.
"""
def __init__(self, app, app_root='/'):
self.app = app
self.app_root = app_root
def __call__(self, environ, start_response):
# only set PATH_INFO for older versions of Lighty or if no
# server software is provided. That's because the test was
# added in newer Werkzeug versions and we don't want to break
# people's code if they are using this fixer in a test that
# does not set the SERVER_SOFTWARE key.
if 'SERVER_SOFTWARE' not in environ or \
environ['SERVER_SOFTWARE'] < 'lighttpd/1.4.28':
environ['PATH_INFO'] = environ.get('SCRIPT_NAME', '') + \
environ.get('PATH_INFO', '')
environ['SCRIPT_NAME'] = self.app_root.strip('/')
return self.app(environ, start_response)
# backwards compatibility
LighttpdCGIRootFix = CGIRootFix
class PathInfoFromRequestUriFix(object):
"""On windows environment variables are limited to the system charset
which makes it impossible to store the `PATH_INFO` variable in the
environment without loss of information on some systems.
This is for example a problem for CGI scripts on a Windows Apache.
This fixer works by recreating the `PATH_INFO` from `REQUEST_URI`,
`REQUEST_URL`, or `UNENCODED_URL` (whatever is available). Thus the
fix can only be applied if the webserver supports either of these
variables.
:param app: the WSGI application
"""
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
for key in 'REQUEST_URL', 'REQUEST_URI', 'UNENCODED_URL':
if key not in environ:
continue
request_uri = unquote(environ[key])
script_name = unquote(environ.get('SCRIPT_NAME', ''))
if request_uri.startswith(script_name):
environ['PATH_INFO'] = request_uri[len(script_name):] \
.split('?', 1)[0]
break
return self.app(environ, start_response)
class ProxyFix(object):
"""This middleware can be applied to add HTTP proxy support to an
application that was not designed with HTTP proxies in mind. It
sets `REMOTE_ADDR`, `HTTP_HOST` from `X-Forwarded` headers. While
Werkzeug-based applications already can use
:py:func:`werkzeug.wsgi.get_host` to retrieve the current host even if
behind proxy setups, this middleware can be used for applications which
access the WSGI environment directly.
If you have more than one proxy server in front of your app, set
`num_proxies` accordingly.
Do not use this middleware in non-proxy setups for security reasons.
The original values of `REMOTE_ADDR` and `HTTP_HOST` are stored in
the WSGI environment as `werkzeug.proxy_fix.orig_remote_addr` and
`werkzeug.proxy_fix.orig_http_host`.
:param app: the WSGI application
:param num_proxies: the number of proxy servers in front of the app.
"""
def __init__(self, app, num_proxies=1):
self.app = app
self.num_proxies = num_proxies
def get_remote_addr(self, forwarded_for):
"""Selects the new remote addr from the given list of ips in
X-Forwarded-For. By default it picks the one that the `num_proxies`
proxy server provides. Before 0.9 it would always pick the first.
.. versionadded:: 0.8
"""
if len(forwarded_for) >= self.num_proxies:
return forwarded_for[-self.num_proxies]
def __call__(self, environ, start_response):
getter = environ.get
forwarded_proto = getter('HTTP_X_FORWARDED_PROTO', '')
forwarded_for = getter('HTTP_X_FORWARDED_FOR', '').split(',')
forwarded_host = getter('HTTP_X_FORWARDED_HOST', '')
environ.update({
'werkzeug.proxy_fix.orig_wsgi_url_scheme': getter('wsgi.url_scheme'),
'werkzeug.proxy_fix.orig_remote_addr': getter('REMOTE_ADDR'),
'werkzeug.proxy_fix.orig_http_host': getter('HTTP_HOST')
})
forwarded_for = [x for x in [x.strip() for x in forwarded_for] if x]
remote_addr = self.get_remote_addr(forwarded_for)
if remote_addr is not None:
environ['REMOTE_ADDR'] = remote_addr
if forwarded_host:
environ['HTTP_HOST'] = forwarded_host
if forwarded_proto:
environ['wsgi.url_scheme'] = forwarded_proto
return self.app(environ, start_response)
class HeaderRewriterFix(object):
"""This middleware can remove response headers and add others. This
is for example useful to remove the `Date` header from responses if you
are using a server that adds that header, no matter if it's present or
not or to add `X-Powered-By` headers::
app = HeaderRewriterFix(app, remove_headers=['Date'],
add_headers=[('X-Powered-By', 'WSGI')])
:param app: the WSGI application
:param remove_headers: a sequence of header keys that should be
removed.
:param add_headers: a sequence of ``(key, value)`` tuples that should
be added.
"""
def __init__(self, app, remove_headers=None, add_headers=None):
self.app = app
self.remove_headers = set(x.lower() for x in (remove_headers or ()))
self.add_headers = list(add_headers or ())
def __call__(self, environ, start_response):
def rewriting_start_response(status, headers, exc_info=None):
new_headers = []
for key, value in headers:
if key.lower() not in self.remove_headers:
new_headers.append((key, value))
new_headers += self.add_headers
return start_response(status, new_headers, exc_info)
return self.app(environ, rewriting_start_response)
class InternetExplorerFix(object):
"""This middleware fixes a couple of bugs with Microsoft Internet
Explorer. Currently the following fixes are applied:
- removing of `Vary` headers for unsupported mimetypes which
causes troubles with caching. Can be disabled by passing
``fix_vary=False`` to the constructor.
see: http://support.microsoft.com/kb/824847/en-us
- removes offending headers to work around caching bugs in
Internet Explorer if `Content-Disposition` is set. Can be
disabled by passing ``fix_attach=False`` to the constructor.
If it does not detect affected Internet Explorer versions it won't touch
the request / response.
"""
# This code was inspired by Django fixers for the same bugs. The
# fix_vary and fix_attach fixers were originally implemented in Django
# by Michael Axiak and is available as part of the Django project:
# http://code.djangoproject.com/ticket/4148
def __init__(self, app, fix_vary=True, fix_attach=True):
self.app = app
self.fix_vary = fix_vary
self.fix_attach = fix_attach
def fix_headers(self, environ, headers, status=None):
if self.fix_vary:
header = headers.get('content-type', '')
mimetype, options = parse_options_header(header)
if mimetype not in ('text/html', 'text/plain', 'text/sgml'):
headers.pop('vary', None)
if self.fix_attach and 'content-disposition' in headers:
pragma = parse_set_header(headers.get('pragma', ''))
pragma.discard('no-cache')
header = pragma.to_header()
if not header:
headers.pop('pragma', '')
else:
headers['Pragma'] = header
header = headers.get('cache-control', '')
if header:
cc = parse_cache_control_header(header,
cls=ResponseCacheControl)
cc.no_cache = None
cc.no_store = False
header = cc.to_header()
if not header:
headers.pop('cache-control', '')
else:
headers['Cache-Control'] = header
def run_fixed(self, environ, start_response):
def fixing_start_response(status, headers, exc_info=None):
headers = Headers(headers)
self.fix_headers(environ, headers, status)
return start_response(status, headers.to_wsgi_list(), exc_info)
return self.app(environ, fixing_start_response)
def __call__(self, environ, start_response):
ua = UserAgent(environ)
if ua.browser != 'msie':
return self.app(environ, start_response)
return self.run_fixed(environ, start_response)
| mit |
OptiPop/external_chromium_org | tools/telemetry/telemetry/core/browser_credentials_unittest.py | 47 | 2272 | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import tempfile
import unittest
from telemetry.core import browser_credentials
SIMPLE_CREDENTIALS_STRING = """
{
"google": {
"username": "example",
"password": "asdf"
}
}
"""
class BackendStub(object):
def __init__(self, credentials_type):
self.login_needed_called = None
self.login_no_longer_needed_called = None
self.credentials_type = credentials_type
def LoginNeeded(self, config, _, tab):
self.login_needed_called = (config, tab)
return True
def LoginNoLongerNeeded(self, tab):
self.login_no_longer_needed_called = (tab, )
class TestBrowserCredentials(unittest.TestCase):
def testCredentialsInfrastructure(self):
google_backend = BackendStub("google")
othersite_backend = BackendStub("othersite")
browser_cred = browser_credentials.BrowserCredentials(
[google_backend,
othersite_backend])
try:
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(SIMPLE_CREDENTIALS_STRING)
browser_cred.credentials_path = f.name
# Should true because it has a password and a backend.
self.assertTrue(browser_cred.CanLogin('google'))
# Should be false succeed because it has no password.
self.assertFalse(browser_cred.CanLogin('othersite'))
# Should fail because it has no backend.
self.assertRaises(
Exception,
lambda: browser_cred.CanLogin('foobar'))
tab = {}
ret = browser_cred.LoginNeeded(tab, 'google')
self.assertTrue(ret)
self.assertTrue(google_backend.login_needed_called is not None)
self.assertEqual(tab, google_backend.login_needed_called[0])
self.assertEqual("example",
google_backend.login_needed_called[1]["username"])
self.assertEqual("asdf",
google_backend.login_needed_called[1]["password"])
browser_cred.LoginNoLongerNeeded(tab, 'google')
self.assertTrue(google_backend.login_no_longer_needed_called is not None)
self.assertEqual(tab, google_backend.login_no_longer_needed_called[0])
finally:
os.remove(f.name)
| bsd-3-clause |
sbidoul/buildbot | worker/buildbot_worker/compat.py | 9 | 2329 | # coding=utf-8
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Helpers for handling compatibility differences
between Python 2 and Python 3.
"""
from __future__ import absolute_import
from __future__ import print_function
from future.utils import text_type
if str != bytes:
# On Python 3 and higher, str and bytes
# are not equivalent. We must use StringIO for
# doing io on native strings.
from io import StringIO as NativeStringIO
else:
# On Python 2 and older, str and bytes
# are equivalent. We must use BytesIO for
# doing io on native strings.
from io import BytesIO as NativeStringIO
def bytes2NativeString(x, encoding='utf-8'):
"""
Convert C{bytes} to a native C{str}.
On Python 3 and higher, str and bytes
are not equivalent. In this case, decode
the bytes, and return a native string.
On Python 2 and lower, str and bytes
are equivalent. In this case, just
just return the native string.
@param x: a string of type C{bytes}
@param encoding: an optional codec, default: 'utf-8'
@return: a string of type C{str}
"""
if isinstance(x, bytes) and str != bytes:
return x.decode(encoding)
return x
def unicode2bytes(x, encoding='utf-8'):
"""
Convert a unicode string to C{bytes}.
@param x: a unicode string, of type C{unicode} on Python 2,
or C{str} on Python 3.
@param encoding: an optional codec, default: 'utf-8'
@return: a string of type C{bytes}
"""
if isinstance(x, text_type):
x = x.encode(encoding)
return x
__all__ = [
"NativeStringIO",
"bytes2NativeString",
"unicode2bytes"
]
| gpl-2.0 |
incaser/odoo-odoo | addons/procurement_jit/procurement_jit.py | 244 | 1543 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2013 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class procurement_order(osv.osv):
_inherit = "procurement.order"
def create(self, cr, uid, vals, context=None):
context = context or {}
procurement_id = super(procurement_order, self).create(cr, uid, vals, context=context)
if not context.get('procurement_autorun_defer'):
self.run(cr, uid, [procurement_id], context=context)
self.check(cr, uid, [procurement_id], context=context)
return procurement_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ProjectSWGCore/NGECore2 | scripts/mobiles/generic/faction/imperial/hardened_swamp_trooper_25.py | 2 | 1496 | import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from resources.datatables import FactionStatus
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('crackdown_swamp_trooper_hard')
mobileTemplate.setLevel(25)
mobileTemplate.setDifficulty(Difficulty.ELITE)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("imperial")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(False)
mobileTemplate.setFaction("imperial")
mobileTemplate.setFactionStatus(FactionStatus.Combatant)
templates = Vector()
templates.add('object/mobile/shared_dressed_swamp_trooper.iff')
templates.add('object/mobile/shared_dressed_swamp_trooper_m.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/ranged/carbine/shared_carbine_e11.iff', WeaponType.CARBINE, 1.0, 15, 'energy')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('rangedShot')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('hardened_swamp_troop_25', mobileTemplate)
return | lgpl-3.0 |
ActiveState/code | recipes/Python/576531_Circle/recipe-576531.py | 1 | 15215 | #On the name of ALLAH and may the blessing and peace of Allah
#be upon the Messenger of Allah Mohamed Salla Allahu Aliahi Wassalam.
#Author :Fouad Teniou
#Date : 08/10/08
#Version : 2.4
""" Class of an equation of a circle of the form Ax^2 + Ay^2 + Dx + Ey + F = 0 (A !=0)
it represents a circle or a point or has no graph , depending of the radius value. And a class
of an equation for the circle of radius r and centred at point (x0,y0). """
import math
class Circle(object):
""" Class that represent an equation of a circle
with A,D,E,F constants properties """
def __init__(self, Avalue,Dvalue,Evalue,Fvalue):
""" Circle construction takes A,D,E,F Constants """
self.__A = float(Avalue)
self.__D = float(Dvalue)
self.__E = float(Evalue)
self.__F = float(Fvalue)
self._b = chr(253)
self._a = self._checkSign(self.__A)
self._d= self._checkSign(self.__D)
self._e = self._checkSign(self.__E)
self._f = self._checkSign(self.__F)
self._g = ((self.__D/self.__A)/2)
self._g1= self.__D/2
self._h =((self.__E/self.__A)/2)
self._h1 = self.__E/2
self._i = self._checkSign(self._g)
self._j = self._checkSign(self._h)
self._k = (-self.__F/self.__A + self._g**2 +self._h**2)
self._k1= (-self.__F + self._g1**2 +self._h1**2)
self._l = "%2.2f" % math.sqrt(abs(self._k))
self._l1 = "%2.2f" % math.sqrt(abs(self._k1))
self._m = "(x%s%s)%s+(y%s%s)%s = %s" % \
(self._i,self._g,self._b,self._j,self._h,self._b,self._k)
self._m1 = "(x%s%s)%s+(y%s%s)%s = %s" % \
(self._i,self._g1,self._b,self._j,self._h1,self._b,self._k1)
self._n = "(%s,%s)" % (-self._g,-self._h)
self._n1 = "(%s,%s)" % (-self._g1,-self._h1)
def __str__(self):
""" String representation of the circle equation,
standard form , centre and radius """
try:
math.sqrt(self._k)
#Circle raises zero degenerate case
assert math.sqrt(self._k) != 0,"The graph is the single point %s" % \
Circle.centre(self)
if self.__A == 0:
return "\n<Equation of a circle : x%s + y%s %s %sx %s %sy %s %s = 0 \
\n\n%s %35s %25s \n\n%s %22s %24s\n" %\
(self._b,self._b,self._d,int(self.D),self._e, \
int(self.E),self._f,int(self.F),
'Standard form','Centre(x0,y0)','Radius r' \
self._m1,Circle.centre(self),Circle.radius(self))
else:
return "\n<Equation of a circle : %sx%s + %sy%s %s %sx %s %sy %s %s = 0 \
\n\n%s %35s %25s \n\n%s %22s %24s\n" %\
(int(self.A)self._b,int(self.A),self._b,self._d,int(self.D),self._e, \
int(self.E),self._f,int(self.F),
'Standard form', 'Centre(x0,y0)','Radius r' \
self._m,Circle.centre(self),Circle.radius(self))
#Circle raises Negative number degenerate case
except ValueError:
raise ValueError,\
" r%s < 0 : Degenerate case has no graph" % self._b
def getA(self):
""" Get method for A attribute """
if self.__A != 0:
return self.__A
else:
raise ValueError,\
" A value should be different than zero "
def setA(self,value):
""" Set method for A attribute """
self.__A = value
def delA(self):
""" Delete method for A attribute """
del self.__A
#Create A property
A = property(getA,setA,delA,"A constant")
def getD(self):
""" Get method for D attribute """
return self.__D
def setD(self,value):
""" Set method for D attribute """
self.__D = value
def delD(self):
""" Delete method for D attribute """
del self.__D
#Create D property
D = property(getD,setD,delD,"D constant")
def getE(self):
""" Get method for E attribute """
return self.__E
def setE(self,value):
""" Set method for E attribute """
self.__E = value
def delE(self):
""" Delete method for E attribute """
del self.__E
#Create E property
E = property(getE,setE,delE,"E constant")
def getF(self):
""" Get method for F attribute """
return self.__F
def setF(self,value):
""" Set method for F attribute """
self.__F = value
def delF(self):
""" Delete method for F attribute """
del self.__F
#Create F property
F = property(getF,setF,delF,"F constant")
def _checkSign(self,value):
""" Utility method to check the values’ signs and return a sign string """
if value >= 0:
return "+"
else:
return ""
def radius(self):
""" Compute radius of a circle """
if self.__A == 1:
return self._l1
else:
return self._l
def centre(self):
""" Compute centre(x0,y0) of a circle """
if self.__A == 1:
return self._n1
else:
return self._n
class Equation(Circle):
"""Class that represent a radius and the centre of a circle """
def __init__(self,x,y,radius):
"""Equation construction takes centre(xValue,yValue
and radius"""
self.__x = float(x)
self.__y = float(y)
self.__radius = float(radius)
self._o = chr(253)
self._p = self.__radius**2
self._q = self._checkSign(-self.__x)
self._r = self._checkSign(-self.__y)
self._s = "(x%s%s)%s + (y%s%s)%s = %s " % \
(self._q,-self.__x,self._o,self._r,-self.__y,self._o,self._p)
self._t = self.__x**2 + self.__y**2 -self._p
self._u = self._checkSign(self._t)
self._v = "x%s + y%s %s %sx %s %sy %s %s = 0 " % \
(self._o,self._o,self._q,-self.__x*2,self._r,-self.__y*2,self._u,self._t)
def __str__(self):
""" String representation of the circle equation, standard form ,centre and radius """
#Equation raises radius value < 0
assert self.__radius > 0, "<Radius value should be greater than zero"
return ( "\n<Equation for the circle of radius (%s)\
centred at (%s,%s) is : \n\n%s < -- > %s" ) % \
(self.__radius,self.__x,self.__y,self._s,self._v)
if __name__ == "__main__":
circle1 = Circle(16,40,16,-7)
print circle1
#Though students might use only values of radius and circle
print radius.circle1()
print centre.circle1()
circle2 = Circle(2,24,0,-81)
print circle2
del circle2.A
circle2.A = 1
print circle2
equation = Equation(2,5,3)
print equation
for doc in (Circle.A,Circle.D,Circle.E,Circle.F):
print doc.__doc__,doc.fget.func_name,doc.fset.func_name,doc.fdel.func_name
########################################################################################
#Version : Python 3.2
#import math
#class Circle(object):
# """ Class that represent an equation of a circle
# with A,D,E,F constants properties"""
#
# def __init__(self,Avalue,Dvalue,Evalue,Fvalue):
# """ Circle constructor takes A,D,F,E constants """
#
# self.__A = float(Avalue)
# self.__D = float(Dvalue)
# self.__E = float(Evalue)
# self.__F = float(Fvalue)
#
# self._b = chr(178)
# self._a = self._checkSign(self.__A)
# self._d = self._checkSign(self.__D)
# self._e = self._checkSign(self.__E)
# self._f = self._checkSign(self.__F)
# self._g = ((self.__D/self.__A)/2)
# self._g1 = self.D/2
# self._h = ((self.__E/self.__A)/2)
# self._h1 = self.E/2
# self._i = self._checkSign(self._g)
# self._j = self._checkSign(self._h)
# self._k = (-self.__F/self.__A +self._g**2 + self._h**2)
# self._k1= (-self.__F +self._g1**2 + self._h1**2)
# self._l = "%2.2f" % math.sqrt(abs(self._k))
# self._l1= "%2.2f" % math.sqrt(abs(self._k1))
# self._m = "(x%s%s)%s+(y%s%s)%s = %s" % \
# (self._i,self._g,self._b,self._j,self._h,self._b,self._k)
# self._m1 ="(x%s%s)%s+(y%s%s)%s = %s" % \
# (self._i,self._g1,self._b,self._j,self._h1,self._b,self._k1)
# self._n = "(%s,%s)" % (-self._g,-self._h)
# self._n1= "(%s,%s)" % (-self._g1,-self._h1)
#
#
# def squared(self):
# self._w =(-self.__F/self.__A +((self.__D/self.__A)/2)**2 + ((self.__E/self.__A)/2)**2)
# return self._w
# def standardForm(self):
# return "(x%s%s)%s+(y%s%s)%s = %s" % \
# (self._checkSign(((self.__D/self.__A)/2)),((self.__D/self.__A)/2),chr(178),self._checkSign(((self.__E/self.__A)/2)),((self.__E/self.__A)/2),chr(178),(-self.__F/self.__A +((self.__D/self.__A)/2)**2 + ((self.__E/self.__A)/2)**2))
#
# def __str__(self):
# """ String representation of the circle equation,
# standard form, centre and radius"""
#
# try:
# math.sqrt(Circle.squared(self))
#
# #Circle raises zero degenerate case
# assert math.sqrt(Circle.squared(self)) != 0,"The graph is the single point %s" % \
# Circle.centre(self)
# if self.__A == 1:
#
# return "\n<Equation of a circle : x%s + y%s %s %sx %s %sy %s %s = 0 \
# \n\n%s %35s %25s \n\n%s %22s %24s\n" %\
# (self._b,self._b,self._d,int(self.D),self._e,\
# int(self.E),self._f,int(self.F),
# "Standard form","Center(x0,y0)","Radius r",\
# self._m1,Circle.centre(self),Circle.radius(self))
# else:
# return "\n<Equation of a circle : %sx%s + %sy%s %s %sx %s %sy %s %s = 0 \
# \n\n%s %35s %25s \n\n%s %22s %24s\n" %\
# (int(self.A),self._b,int(self.A),self._b,self._d,int(self.D),self._e,\
# int(self.E),self._f,int(self.F),
# "Standard form","Center(x0,y0)","Radius r",\
# Circle.standardForm(self),Circle.centre(self),Circle.radius(self))
#
# #Circle raises Negative number degenerate case
# except ValueError:
# raise ValueError("r%s < 0 : Degenerate case has no graph" % self._b)
#
# def getA(self):
# """ Get method for A attribute """
# if self.__A !=0:
# return self.__A
# else:
# raise ValueError("A value should be differtent than zero")
#
# def setA(self,value):
# """ Set method for A attribute """
#
# self.__A = value
#
# def delA(self):
# """Delete method for A attrobute"""
#
# del self.__A
#
# #Create a property
# A = property(getA,setA,delA,"A constant")
#
# def getD(self):
# """ Get method for D attribute """
#
# return self.__D
#
# def setD(self,value):
# """ Set method for D attribute """
#
# self.__D = value
#
# def delD(self):
# """Delete method for D attrobute"""
# del self.__D
#
# #Create a property
# D = property(getD,setD,delD,"D constant")
# def getE(self):
# """ Get method for E attribute """
# return self.__E
#
# def setE(self,value):
# """ Set method for E attribute """
#
# self.__E = value
#
# def delE(self):
# """Delete method for E attrobute"""
#
# del self.__E
#
# #Create a property
# E = property(getE,setE,delE,"E constant")
#
# def getF(self):
# """ Get method for F attribute """
#
# return self.__F
#
# def setF(self,value):
# """ Set method for F attribute """
#
# self.__F = value
#
# def delF(self):
# """Delete method for F attrobute"""
#
# del self.__F
#
# #Create a property
# F = property(getF,setF,delF,"F constant")
#
# def _checkSign(self,value):
# """ Utility method to check the values's sign
# and return a sign string"""
#
# if value >= 0:
# return "+"
# else :
# return ""
#
# def radius(self):
# """ Computes radius of a circle """
# if self.__A ==1:
# return self._l1
# else:
# return "%2.2f" % math.sqrt(abs(Circle.squared(self)))
#
# def centre(self):
# """ Computes centre(x0,y0) of a circle """
# if self.__A == 1:
# return self._n1
# else:
# return "(%s,%s)" % (-((self.__D/self.__A)/2),-((self.__E/self.__A)/2))
#
#
#
#class Equation(Circle):
# """ class that represent a radius and the centre of a circle """
#
# def __init__(self,x,y,radius):
# """ Equation construction takes centre(xValue,yValue)
# and radius """
#
# self.__x = float(x)
# self.__y = float(y)
# self.__radius = float(radius)
#
# self._o = chr(178)
# self._p = self.__radius**2
# self._q = self._checkSign(-self.__x)
# self._r = self._checkSign(-self.__y)
# self._s = "(x%s%s)%s+(y%s%s)%s = %s" % \
# (self._q,-self.__x,self._o,self._r,-self.__y,self._o,self._p)
# self._t = self.__x**2 + self.__y**2 - self._p
# self._u = self._checkSign(self._t)
# self._v = "x%s + y%s %s%sx %s%sy %s%s = 0" % \
# (self._o,self._o,self._q,-self.__x*2,self._r,-self.__y*2,self._u,self._t)
#
# def __str__(self):
# """ String representation of the circle equation, standard form,
# centre and radius"""
#
# #Equation raises radius value < 0
# assert self.__radius > 0, "<radius value should be greater than zero"
#
# return ("\n<Equation for the circle of radius (%s)\
# centred at(%s,%s) is :\n\n%s <--> %s") %\
# (self.__radius,self.__x,self.__y,self._s,self._v )
#
#
#if __name__ == "__main__":
# circle1 = Circle(10,40,16,-7)
# print(circle1)
#
# print(circle1.radius())
# print(circle1.centre())
# circle1.delA
# circle1.A=1
# print(circle1)
# circle3 = Circle(5,24,0,-81)
# print(circle3)
#
# circle3.E =80
# print(circle3)
#
# equation = Equation(2,5,3)
# print(equation)
#
#
# for doc in (Circle.A,Circle.D,Circle.E,Circle.F):
# print(doc.__doc__,"=",doc.fget.__name__,doc.fset.__name__,doc.fdel.__name__)
#######################################################################################
#<Equation of a circle : 10x² + 10y² + 40x + 16y -7 = 0
#Standard form Center(x0,y0) Radius r
#(x+2.0)²+(y+0.8)² = 5.34 (-2.0,-0.8) 2.31
#2.31
#(-2.0,-0.8)
#<Equation of a circle : x² + y² + 40x + 16y -7 = 0
#Standard form Center(x0,y0) Radius r
#(x+20.0)²+(y+8.0)² = 471.0 (-20.0,-8.0) 21.70
#<Equation of a circle : 5x² + 5y² + 24x + 0y -81 = 0
#Standard form Center(x0,y0) Radius r
#(x+2.4)²+(y+0.0)² = 21.96 (-2.4,-0.0) 4.69
#<Equation of a circle : 5x² + 5y² + 24x + 80y -81 = 0
#Standard form Center(x0,y0) Radius r
#(x+2.4)²+(y+8.0)² = 85.96 (-2.4,-8.0) 9.27
#<Equation for the circle of radius (3.0) centred at(2.0,5.0) is :
#(x-2.0)²+(y-5.0)² = 9.0 <--> x² + y² -4.0x -10.0y +20.0 = 0
#A constant = getA setA delA
#D constant = getD setD delD
#E constant = getE setE delE
#F constant = getF setF delF
| mit |
jdramani/servo | python/mach/mach/registrar.py | 46 | 3774 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, unicode_literals
from .base import MachError
INVALID_COMMAND_CONTEXT = r'''
It looks like you tried to run a mach command from an invalid context. The %s
command failed to meet the following conditions: %s
Run |mach help| to show a list of all commands available to the current context.
'''.lstrip()
class MachRegistrar(object):
"""Container for mach command and config providers."""
def __init__(self):
self.command_handlers = {}
self.commands_by_category = {}
self.settings_providers = set()
self.categories = {}
self.require_conditions = False
def register_command_handler(self, handler):
name = handler.name
if not handler.category:
raise MachError('Cannot register a mach command without a '
'category: %s' % name)
if handler.category not in self.categories:
raise MachError('Cannot register a command to an undefined '
'category: %s -> %s' % (name, handler.category))
self.command_handlers[name] = handler
self.commands_by_category[handler.category].add(name)
def register_settings_provider(self, cls):
self.settings_providers.add(cls)
def register_category(self, name, title, description, priority=50):
self.categories[name] = (title, description, priority)
self.commands_by_category[name] = set()
@classmethod
def _condition_failed_message(cls, name, conditions):
msg = ['\n']
for c in conditions:
part = [' %s' % c.__name__]
if c.__doc__ is not None:
part.append(c.__doc__)
msg.append(' - '.join(part))
return INVALID_COMMAND_CONTEXT % (name, '\n'.join(msg))
def _run_command_handler(self, handler, context=None, debug_command=False, **kwargs):
cls = handler.cls
if handler.pass_context and not context:
raise Exception('mach command class requires context.')
if handler.pass_context:
instance = cls(context)
else:
instance = cls()
if handler.conditions:
fail_conditions = []
for c in handler.conditions:
if not c(instance):
fail_conditions.append(c)
if fail_conditions:
print(self._condition_failed_message(handler.name, fail_conditions))
return 1
fn = getattr(instance, handler.method)
if debug_command:
import pdb
result = pdb.runcall(fn, **kwargs)
else:
result = fn(**kwargs)
result = result or 0
assert isinstance(result, (int, long))
return result
def dispatch(self, name, context=None, argv=None, **kwargs):
"""Dispatch/run a command.
Commands can use this to call other commands.
"""
# TODO handler.subcommand_handlers are ignored
handler = self.command_handlers[name]
if handler.parser:
parser = handler.parser
# save and restore existing defaults so **kwargs don't persist across
# subsequent invocations of Registrar.dispatch()
old_defaults = parser._defaults.copy()
parser.set_defaults(**kwargs)
kwargs, _ = parser.parse_known_args(argv or [])
kwargs = vars(kwargs)
parser._defaults = old_defaults
return self._run_command_handler(handler, context=context, **kwargs)
Registrar = MachRegistrar()
| mpl-2.0 |
jm-begon/scikit-learn | examples/cluster/plot_kmeans_silhouette_analysis.py | 242 | 5885 | """
===============================================================================
Selecting the number of clusters with silhouette analysis on KMeans clustering
===============================================================================
Silhouette analysis can be used to study the separation distance between the
resulting clusters. The silhouette plot displays a measure of how close each
point in one cluster is to points in the neighboring clusters and thus provides
a way to assess parameters like number of clusters visually. This measure has a
range of [-1, 1].
Silhoette coefficients (as these values are referred to as) near +1 indicate
that the sample is far away from the neighboring clusters. A value of 0
indicates that the sample is on or very close to the decision boundary between
two neighboring clusters and negative values indicate that those samples might
have been assigned to the wrong cluster.
In this example the silhouette analysis is used to choose an optimal value for
``n_clusters``. The silhouette plot shows that the ``n_clusters`` value of 3, 5
and 6 are a bad pick for the given data due to the presence of clusters with
below average silhouette scores and also due to wide fluctuations in the size
of the silhouette plots. Silhouette analysis is more ambivalent in deciding
between 2 and 4.
Also from the thickness of the silhouette plot the cluster size can be
visualized. The silhouette plot for cluster 0 when ``n_clusters`` is equal to
2, is bigger in size owing to the grouping of the 3 sub clusters into one big
cluster. However when the ``n_clusters`` is equal to 4, all the plots are more
or less of similar thickness and hence are of similar sizes as can be also
verified from the labelled scatter plot on the right.
"""
from __future__ import print_function
from sklearn.datasets import make_blobs
from sklearn.cluster import KMeans
from sklearn.metrics import silhouette_samples, silhouette_score
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
print(__doc__)
# Generating the sample data from make_blobs
# This particular setting has one distict cluster and 3 clusters placed close
# together.
X, y = make_blobs(n_samples=500,
n_features=2,
centers=4,
cluster_std=1,
center_box=(-10.0, 10.0),
shuffle=True,
random_state=1) # For reproducibility
range_n_clusters = [2, 3, 4, 5, 6]
for n_clusters in range_n_clusters:
# Create a subplot with 1 row and 2 columns
fig, (ax1, ax2) = plt.subplots(1, 2)
fig.set_size_inches(18, 7)
# The 1st subplot is the silhouette plot
# The silhouette coefficient can range from -1, 1 but in this example all
# lie within [-0.1, 1]
ax1.set_xlim([-0.1, 1])
# The (n_clusters+1)*10 is for inserting blank space between silhouette
# plots of individual clusters, to demarcate them clearly.
ax1.set_ylim([0, len(X) + (n_clusters + 1) * 10])
# Initialize the clusterer with n_clusters value and a random generator
# seed of 10 for reproducibility.
clusterer = KMeans(n_clusters=n_clusters, random_state=10)
cluster_labels = clusterer.fit_predict(X)
# The silhouette_score gives the average value for all the samples.
# This gives a perspective into the density and separation of the formed
# clusters
silhouette_avg = silhouette_score(X, cluster_labels)
print("For n_clusters =", n_clusters,
"The average silhouette_score is :", silhouette_avg)
# Compute the silhouette scores for each sample
sample_silhouette_values = silhouette_samples(X, cluster_labels)
y_lower = 10
for i in range(n_clusters):
# Aggregate the silhouette scores for samples belonging to
# cluster i, and sort them
ith_cluster_silhouette_values = \
sample_silhouette_values[cluster_labels == i]
ith_cluster_silhouette_values.sort()
size_cluster_i = ith_cluster_silhouette_values.shape[0]
y_upper = y_lower + size_cluster_i
color = cm.spectral(float(i) / n_clusters)
ax1.fill_betweenx(np.arange(y_lower, y_upper),
0, ith_cluster_silhouette_values,
facecolor=color, edgecolor=color, alpha=0.7)
# Label the silhouette plots with their cluster numbers at the middle
ax1.text(-0.05, y_lower + 0.5 * size_cluster_i, str(i))
# Compute the new y_lower for next plot
y_lower = y_upper + 10 # 10 for the 0 samples
ax1.set_title("The silhouette plot for the various clusters.")
ax1.set_xlabel("The silhouette coefficient values")
ax1.set_ylabel("Cluster label")
# The vertical line for average silhoutte score of all the values
ax1.axvline(x=silhouette_avg, color="red", linestyle="--")
ax1.set_yticks([]) # Clear the yaxis labels / ticks
ax1.set_xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
# 2nd Plot showing the actual clusters formed
colors = cm.spectral(cluster_labels.astype(float) / n_clusters)
ax2.scatter(X[:, 0], X[:, 1], marker='.', s=30, lw=0, alpha=0.7,
c=colors)
# Labeling the clusters
centers = clusterer.cluster_centers_
# Draw white circles at cluster centers
ax2.scatter(centers[:, 0], centers[:, 1],
marker='o', c="white", alpha=1, s=200)
for i, c in enumerate(centers):
ax2.scatter(c[0], c[1], marker='$%d$' % i, alpha=1, s=50)
ax2.set_title("The visualization of the clustered data.")
ax2.set_xlabel("Feature space for the 1st feature")
ax2.set_ylabel("Feature space for the 2nd feature")
plt.suptitle(("Silhouette analysis for KMeans clustering on sample data "
"with n_clusters = %d" % n_clusters),
fontsize=14, fontweight='bold')
plt.show()
| bsd-3-clause |
ruchikd/Algorithms | Python/CreateLLFromBST/CreateLLFromBST.py | 1 | 1529 | #**************************************************************
# 9
# / \
# 5 11
# / \ / \
# 2 7 10 15
# / / \
# 1 4 6
# /
# 3
#**************************************************************
class Node:
val, left, right, parent = None, None, None, None
def __init__ (self, val):
self.val = val
self.left = None
self.right = None
def createTreeAsAbove():
root = Node(9)
root.left = Node(5)
root.left.left = Node(2)
root.left.left.left = Node(1)
root.left.right = Node(7)
root.left.right.left = Node(4)
root.left.right.left.left = Node(3)
root.left.right.right = Node(6)
root.right = Node(11)
root.right.left = Node(10)
root.right.right = Node (15)
return root
def getHeight(node):
if node is None:
return 0
lHeight = getHeight(node.left)
rHeight = getHeight(node.right)
if(lHeight > rHeight):
return lHeight + 1
else:
return rHeight + 1
def getList(node, height, list):
if node is None:
return
if height == 0:
list.append(node.val)
else:
getList(node.left, height-1, list)
getList(node.right, height-1, list)
return list
def levelOrderTraversal(node):
height = getHeight(node)
list = []
for x in range(height):
getList(node, x, list)
print list
def main():
root = createTreeAsAbove()
levelOrderTraversal(root)
if __name__ == '__main__':
main() | gpl-3.0 |
sccblom/vercors | deps/dafny/1.9.6/windows/z3/bin/z3util.py | 4 | 11933 | ############################################
# Copyright (c) 2012 Microsoft Corporation
#
# Z3 Python interface
#
# Authors: Leonardo de Moura (leonardo)
# ThanhVu (Vu) Nguyen <[email protected]>
############################################
"""
Usage:
import common_z3 as CM_Z3
"""
from z3 import *
def vset(seq, idfun=None, as_list=True):
# This functions preserves the order of arguments while removing duplicates.
# This function is from https://code.google.com/p/common-python-vu/source/browse/vu_common.py
# (Thanhu's personal code). It has been copied here to avoid a dependency on vu_common.py.
"""
order preserving
>>> vset([[11,2],1, [10,['9',1]],2, 1, [11,2],[3,3],[10,99],1,[10,['9',1]]],idfun=repr)
[[11, 2], 1, [10, ['9', 1]], 2, [3, 3], [10, 99]]
"""
def _uniq_normal(seq):
d_ = {}
for s in seq:
if s not in d_:
d_[s] = None
yield s
def _uniq_idfun(seq,idfun):
d_ = {}
for s in seq:
h_ = idfun(s)
if h_ not in d_:
d_[h_] = None
yield s
if idfun is None:
res = _uniq_normal(seq)
else:
res = _uniq_idfun(seq,idfun)
return list(res) if as_list else res
def get_z3_version(as_str=False):
major = ctypes.c_uint(0)
minor = ctypes.c_uint(0)
build = ctypes.c_uint(0)
rev = ctypes.c_uint(0)
Z3_get_version(major,minor,build,rev)
rs = map(int,(major.value,minor.value,build.value,rev.value))
if as_str:
return "{}.{}.{}.{}".format(*rs)
else:
return rs
def ehash(v):
"""
Returns a 'stronger' hash value than the default hash() method.
The result from hash() is not enough to distinguish between 2
z3 expressions in some cases.
Note: the following doctests will fail with Python 2.x as the
default formatting doesn't match that of 3.x.
>>> x1 = Bool('x'); x2 = Bool('x'); x3 = Int('x')
>>> print(x1.hash(),x2.hash(),x3.hash()) #BAD: all same hash values
783810685 783810685 783810685
>>> print(ehash(x1), ehash(x2), ehash(x3))
x_783810685_1 x_783810685_1 x_783810685_2
"""
if __debug__:
assert is_expr(v)
return "{}_{}_{}".format(str(v),v.hash(),v.sort_kind())
"""
In Z3, variables are called *uninterpreted* consts and
variables are *interpreted* consts.
"""
def is_expr_var(v):
"""
EXAMPLES:
>>> is_expr_var(Int('7'))
True
>>> is_expr_var(IntVal('7'))
False
>>> is_expr_var(Bool('y'))
True
>>> is_expr_var(Int('x') + 7 == Int('y'))
False
>>> LOnOff, (On,Off) = EnumSort("LOnOff",['On','Off'])
>>> Block,Reset,SafetyInjection=Consts("Block Reset SafetyInjection",LOnOff)
>>> is_expr_var(LOnOff)
False
>>> is_expr_var(On)
False
>>> is_expr_var(Block)
True
>>> is_expr_var(SafetyInjection)
True
"""
return is_const(v) and v.decl().kind()==Z3_OP_UNINTERPRETED
def is_expr_val(v):
"""
EXAMPLES:
>>> is_expr_val(Int('7'))
False
>>> is_expr_val(IntVal('7'))
True
>>> is_expr_val(Bool('y'))
False
>>> is_expr_val(Int('x') + 7 == Int('y'))
False
>>> LOnOff, (On,Off) = EnumSort("LOnOff",['On','Off'])
>>> Block,Reset,SafetyInjection=Consts("Block Reset SafetyInjection",LOnOff)
>>> is_expr_val(LOnOff)
False
>>> is_expr_val(On)
True
>>> is_expr_val(Block)
False
>>> is_expr_val(SafetyInjection)
False
"""
return is_const(v) and v.decl().kind()!=Z3_OP_UNINTERPRETED
def get_vars(f,rs=[]):
"""
>>> x,y = Ints('x y')
>>> a,b = Bools('a b')
>>> get_vars(Implies(And(x+y==0,x*2==10),Or(a,Implies(a,b==False))))
[x, y, a, b]
"""
if __debug__:
assert is_expr(f)
if is_const(f):
if is_expr_val(f):
return rs
else: #variable
return vset(rs + [f],str)
else:
for f_ in f.children():
rs = get_vars(f_,rs)
return vset(rs,str)
def mk_var(name,vsort):
if vsort.kind() == Z3_INT_SORT:
v = Int(name)
elif vsort.kind() == Z3_REAL_SORT:
v = Real(name)
elif vsort.kind() == Z3_BOOL_SORT:
v = Bool(name)
elif vsort.kind() == Z3_DATATYPE_SORT:
v = Const(name,vsort)
else:
assert False, 'Cannot handle this sort (s: %sid: %d)'\
%(vsort,vsort.kind())
return v
def prove(claim,assume=None,verbose=0):
"""
>>> r,m = prove(BoolVal(True),verbose=0); r,model_str(m,as_str=False)
(True, None)
#infinite counter example when proving contradiction
>>> r,m = prove(BoolVal(False)); r,model_str(m,as_str=False)
(False, [])
>>> x,y,z=Bools('x y z')
>>> r,m = prove(And(x,Not(x))); r,model_str(m,as_str=True)
(False, '[]')
>>> r,m = prove(True,assume=And(x,Not(x)),verbose=0)
Traceback (most recent call last):
...
AssertionError: Assumption is alway False!
>>> r,m = prove(Implies(x,x),assume=y,verbose=2); r,model_str(m,as_str=False)
assume:
y
claim:
Implies(x, x)
to_prove:
Implies(y, Implies(x, x))
(True, None)
>>> r,m = prove(And(x,True),assume=y,verbose=0); r,model_str(m,as_str=False)
(False, [(x, False), (y, True)])
>>> r,m = prove(And(x,y),assume=y,verbose=0)
>>> print(r)
False
>>> print(model_str(m,as_str=True))
x = False
y = True
>>> a,b = Ints('a b')
>>> r,m = prove(a**b == b**a,assume=None,verbose=0)
E: cannot solve !
>>> r is None and m is None
True
"""
if __debug__:
assert not assume or is_expr(assume)
to_prove = claim
if assume:
if __debug__:
is_proved,_ = prove(Not(assume))
def _f():
emsg = "Assumption is alway False!"
if verbose >= 2:
emsg = "{}\n{}".format(assume,emsg)
return emsg
assert is_proved==False, _f()
to_prove = Implies(assume,to_prove)
if verbose >= 2:
print('assume: ')
print(assume)
print('claim: ')
print(claim)
print('to_prove: ')
print(to_prove)
f = Not(to_prove)
models = get_models(f,k=1)
if models is None: #unknown
print('E: cannot solve !')
return None, None
elif models == False: #unsat
return True,None
else: #sat
if __debug__:
assert isinstance(models,list)
if models:
return False, models[0] #the first counterexample
else:
return False, [] #infinite counterexample,models
def get_models(f,k):
"""
Returns the first k models satisfiying f.
If f is not satisfiable, returns False.
If f cannot be solved, returns None
If f is satisfiable, returns the first k models
Note that if f is a tautology, e.g.\ True, then the result is []
Based on http://stackoverflow.com/questions/11867611/z3py-checking-all-solutions-for-equation
EXAMPLES:
>>> x, y = Ints('x y')
>>> len(get_models(And(0<=x,x <= 4),k=11))
5
>>> get_models(And(0<=x**y,x <= 1),k=2) is None
True
>>> get_models(And(0<=x,x <= -1),k=2)
False
>>> len(get_models(x+y==7,5))
5
>>> len(get_models(And(x<=5,x>=1),7))
5
>>> get_models(And(x<=0,x>=5),7)
False
>>> x = Bool('x')
>>> get_models(And(x,Not(x)),k=1)
False
>>> get_models(Implies(x,x),k=1)
[]
>>> get_models(BoolVal(True),k=1)
[]
"""
if __debug__:
assert is_expr(f)
assert k>=1
s = Solver()
s.add(f)
models = []
i = 0
while s.check() == sat and i < k:
i = i + 1
m = s.model()
if not m: #if m == []
break
models.append(m)
#create new constraint to block the current model
block = Not(And([v() == m[v] for v in m]))
s.add(block)
if s.check() == unknown:
return None
elif s.check() == unsat and i==0:
return False
else:
return models
def is_tautology(claim,verbose=0):
"""
>>> is_tautology(Implies(Bool('x'),Bool('x')))
True
>>> is_tautology(Implies(Bool('x'),Bool('y')))
False
>>> is_tautology(BoolVal(True))
True
>>> is_tautology(BoolVal(False))
False
"""
return prove(claim=claim,assume=None,verbose=verbose)[0]
def is_contradiction(claim,verbose=0):
"""
>>> x,y=Bools('x y')
>>> is_contradiction(BoolVal(False))
True
>>> is_contradiction(BoolVal(True))
False
>>> is_contradiction(x)
False
>>> is_contradiction(Implies(x,y))
False
>>> is_contradiction(Implies(x,x))
False
>>> is_contradiction(And(x,Not(x)))
True
"""
return prove(claim=Not(claim),assume=None,verbose=verbose)[0]
def exact_one_model(f):
"""
return True if f has exactly 1 model, False otherwise.
EXAMPLES:
>>> x, y = Ints('x y')
>>> exact_one_model(And(0<=x**y,x <= 0))
False
>>> exact_one_model(And(0<=x,x <= 0))
True
>>> exact_one_model(And(0<=x,x <= 1))
False
>>> exact_one_model(And(0<=x,x <= -1))
False
"""
models = get_models(f,k=2)
if isinstance(models,list):
return len(models)==1
else:
return False
def myBinOp(op,*L):
"""
>>> myAnd(*[Bool('x'),Bool('y')])
And(x, y)
>>> myAnd(*[Bool('x'),None])
x
>>> myAnd(*[Bool('x')])
x
>>> myAnd(*[])
>>> myAnd(Bool('x'),Bool('y'))
And(x, y)
>>> myAnd(*[Bool('x'),Bool('y')])
And(x, y)
>>> myAnd([Bool('x'),Bool('y')])
And(x, y)
>>> myAnd((Bool('x'),Bool('y')))
And(x, y)
>>> myAnd(*[Bool('x'),Bool('y'),True])
Traceback (most recent call last):
...
AssertionError
"""
if __debug__:
assert op == Z3_OP_OR or op == Z3_OP_AND or op == Z3_OP_IMPLIES
if len(L)==1 and (isinstance(L[0],list) or isinstance(L[0],tuple)):
L = L[0]
if __debug__:
assert all(not isinstance(l,bool) for l in L)
L = [l for l in L if is_expr(l)]
if L:
if len(L)==1:
return L[0]
else:
if op == Z3_OP_OR:
return Or(L)
elif op == Z3_OP_AND:
return And(L)
else: #IMPLIES
return Implies(L[0],L[1])
else:
return None
def myAnd(*L): return myBinOp(Z3_OP_AND,*L)
def myOr(*L): return myBinOp(Z3_OP_OR,*L)
def myImplies(a,b):return myBinOp(Z3_OP_IMPLIES,[a,b])
Iff = lambda f: And(Implies(f[0],f[1]),Implies(f[1],f[0]))
def model_str(m,as_str=True):
"""
Returned a 'sorted' model (so that it's easier to see)
The model is sorted by its key,
e.g. if the model is y = 3 , x = 10, then the result is
x = 10, y = 3
EXAMPLES:
see doctest exampels from function prove()
"""
if __debug__:
assert m is None or m == [] or isinstance(m,ModelRef)
if m :
vs = [(v,m[v]) for v in m]
vs = sorted(vs,key=lambda a,_: str(a))
if as_str:
return '\n'.join(['{} = {}'.format(k,v) for (k,v) in vs])
else:
return vs
else:
return str(m) if as_str else m
| mpl-2.0 |
jvkops/django | django/db/models/signals.py | 399 | 2734 | from django.apps import apps
from django.dispatch import Signal
from django.utils import six
class_prepared = Signal(providing_args=["class"])
class ModelSignal(Signal):
"""
Signal subclass that allows the sender to be lazily specified as a string
of the `app_label.ModelName` form.
"""
def __init__(self, *args, **kwargs):
super(ModelSignal, self).__init__(*args, **kwargs)
self.unresolved_references = {}
class_prepared.connect(self._resolve_references)
def _resolve_references(self, sender, **kwargs):
opts = sender._meta
reference = (opts.app_label, opts.object_name)
try:
receivers = self.unresolved_references.pop(reference)
except KeyError:
pass
else:
for receiver, weak, dispatch_uid in receivers:
super(ModelSignal, self).connect(
receiver, sender=sender, weak=weak, dispatch_uid=dispatch_uid
)
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
if isinstance(sender, six.string_types):
try:
app_label, model_name = sender.split('.')
except ValueError:
raise ValueError(
"Specified sender must either be a model or a "
"model name of the 'app_label.ModelName' form."
)
try:
sender = apps.get_registered_model(app_label, model_name)
except LookupError:
ref = (app_label, model_name)
refs = self.unresolved_references.setdefault(ref, [])
refs.append((receiver, weak, dispatch_uid))
return
super(ModelSignal, self).connect(
receiver, sender=sender, weak=weak, dispatch_uid=dispatch_uid
)
pre_init = ModelSignal(providing_args=["instance", "args", "kwargs"], use_caching=True)
post_init = ModelSignal(providing_args=["instance"], use_caching=True)
pre_save = ModelSignal(providing_args=["instance", "raw", "using", "update_fields"],
use_caching=True)
post_save = ModelSignal(providing_args=["instance", "raw", "created", "using", "update_fields"], use_caching=True)
pre_delete = ModelSignal(providing_args=["instance", "using"], use_caching=True)
post_delete = ModelSignal(providing_args=["instance", "using"], use_caching=True)
m2m_changed = ModelSignal(
providing_args=["action", "instance", "reverse", "model", "pk_set", "using"],
use_caching=True,
)
pre_migrate = Signal(providing_args=["app_config", "verbosity", "interactive", "using"])
post_migrate = Signal(providing_args=["app_config", "verbosity", "interactive", "using"])
| bsd-3-clause |
kevin8909/xjerp | openerp/addons/account_anglo_saxon/invoice.py | 12 | 12123 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C)
# 2004-2010 Tiny SPRL (<http://tiny.be>).
# 2009-2010 Veritos (http://veritos.nl).
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class account_invoice_line(osv.osv):
_inherit = "account.invoice.line"
def move_line_get(self, cr, uid, invoice_id, context=None):
res = super(account_invoice_line,self).move_line_get(cr, uid, invoice_id, context=context)
inv = self.pool.get('account.invoice').browse(cr, uid, invoice_id, context=context)
company_currency = inv.company_id.currency_id.id
def get_price(cr, uid, inv, company_currency,i_line):
cur_obj = self.pool.get('res.currency')
if inv.currency_id.id != company_currency:
price = cur_obj.compute(cr, uid, company_currency, inv.currency_id.id, i_line.product_id.standard_price * i_line.quantity, context={'date': inv.date_invoice})
else:
price = i_line.product_id.standard_price * i_line.quantity
return price
if inv.type in ('out_invoice','out_refund'):
for i_line in inv.invoice_line:
if i_line.product_id and i_line.product_id.valuation == 'real_time':
if inv.type == 'out_invoice':
# debit account dacc will be the output account
# first check the product, if empty check the category
dacc = i_line.product_id.property_stock_account_output and i_line.product_id.property_stock_account_output.id
if not dacc:
dacc = i_line.product_id.categ_id.property_stock_account_output_categ and i_line.product_id.categ_id.property_stock_account_output_categ.id
else:
# = out_refund
# debit account dacc will be the input account
# first check the product, if empty check the category
dacc = i_line.product_id.property_stock_account_input and i_line.product_id.property_stock_account_input.id
if not dacc:
dacc = i_line.product_id.categ_id.property_stock_account_input_categ and i_line.product_id.categ_id.property_stock_account_input_categ.id
# in both cases the credit account cacc will be the expense account
# first check the product, if empty check the category
cacc = i_line.product_id.property_account_expense and i_line.product_id.property_account_expense.id
if not cacc:
cacc = i_line.product_id.categ_id.property_account_expense_categ and i_line.product_id.categ_id.property_account_expense_categ.id
if dacc and cacc:
res.append({
'type':'src',
'name': i_line.name[:64],
'price_unit':i_line.product_id.standard_price,
'quantity':i_line.quantity,
'price':get_price(cr, uid, inv, company_currency, i_line),
'account_id':dacc,
'product_id':i_line.product_id.id,
'uos_id':i_line.uos_id.id,
'account_analytic_id': False,
'taxes':i_line.invoice_line_tax_id,
})
res.append({
'type':'src',
'name': i_line.name[:64],
'price_unit':i_line.product_id.standard_price,
'quantity':i_line.quantity,
'price': -1 * get_price(cr, uid, inv, company_currency, i_line),
'account_id':cacc,
'product_id':i_line.product_id.id,
'uos_id':i_line.uos_id.id,
'account_analytic_id': False,
'taxes':i_line.invoice_line_tax_id,
})
elif inv.type in ('in_invoice','in_refund'):
for i_line in inv.invoice_line:
if i_line.product_id and i_line.product_id.valuation == 'real_time':
if i_line.product_id.type != 'service':
# get the price difference account at the product
acc = i_line.product_id.property_account_creditor_price_difference and i_line.product_id.property_account_creditor_price_difference.id
if not acc:
# if not found on the product get the price difference account at the category
acc = i_line.product_id.categ_id.property_account_creditor_price_difference_categ and i_line.product_id.categ_id.property_account_creditor_price_difference_categ.id
a = None
if inv.type == 'in_invoice':
# oa will be the stock input account
# first check the product, if empty check the category
oa = i_line.product_id.property_stock_account_input and i_line.product_id.property_stock_account_input.id
if not oa:
oa = i_line.product_id.categ_id.property_stock_account_input_categ and i_line.product_id.categ_id.property_stock_account_input_categ.id
else:
# = in_refund
# oa will be the stock output account
# first check the product, if empty check the category
oa = i_line.product_id.property_stock_account_output and i_line.product_id.property_stock_account_output.id
if not oa:
oa = i_line.product_id.categ_id.property_stock_account_output_categ and i_line.product_id.categ_id.property_stock_account_output_categ.id
if oa:
# get the fiscal position
fpos = i_line.invoice_id.fiscal_position or False
a = self.pool.get('account.fiscal.position').map_account(cr, uid, fpos, oa)
diff_res = []
# calculate and write down the possible price difference between invoice price and product price
for line in res:
if a == line['account_id'] and i_line.product_id.id == line['product_id']:
uom = i_line.product_id.uos_id or i_line.product_id.uom_id
standard_price = self.pool.get('product.uom')._compute_price(cr, uid, uom.id, i_line.product_id.standard_price, i_line.uos_id.id)
if standard_price != i_line.price_unit and line['price_unit'] == i_line.price_unit and acc:
price_diff = i_line.price_unit - standard_price
line.update({'price':standard_price * line['quantity']})
diff_res.append({
'type':'src',
'name': i_line.name[:64],
'price_unit':price_diff,
'quantity':line['quantity'],
'price': price_diff * line['quantity'],
'account_id':acc,
'product_id':line['product_id'],
'uos_id':line['uos_id'],
'account_analytic_id':line['account_analytic_id'],
'taxes':line.get('taxes',[]),
})
res += diff_res
return res
def product_id_change(self, cr, uid, ids, product, uom_id, qty=0, name='', type='out_invoice', partner_id=False, fposition_id=False, price_unit=False, currency_id=False, context=None, company_id=None):
fiscal_pool = self.pool.get('account.fiscal.position')
res = super(account_invoice_line, self).product_id_change(cr, uid, ids, product, uom_id, qty, name, type, partner_id, fposition_id, price_unit, currency_id, context, company_id)
if not product:
return res
if type in ('in_invoice','in_refund'):
product_obj = self.pool.get('product.product').browse(cr, uid, product, context=context)
if type == 'in_invoice':
oa = product_obj.property_stock_account_input and product_obj.property_stock_account_input.id
if not oa:
oa = product_obj.categ_id.property_stock_account_input_categ and product_obj.categ_id.property_stock_account_input_categ.id
else:
oa = product_obj.property_stock_account_output and product_obj.property_stock_account_output.id
if not oa:
oa = product_obj.categ_id.property_stock_account_output_categ and product_obj.categ_id.property_stock_account_output_categ.id
if oa:
fpos = fposition_id and fiscal_pool.browse(cr, uid, fposition_id, context=context) or False
a = fiscal_pool.map_account(cr, uid, fpos, oa)
res['value'].update({'account_id':a})
return res
class account_invoice(osv.osv):
_inherit = "account.invoice"
def _prepare_refund(self, cr, uid, invoice, date=None, period_id=None, description=None, journal_id=None, context=None):
invoice_data = super(account_invoice, self)._prepare_refund(cr, uid, invoice, date, period_id,
description, journal_id, context=context)
if invoice.type == 'in_invoice':
fiscal_position = self.pool.get('account.fiscal.position')
for _, _, line_dict in invoice_data['invoice_line']:
if line_dict.get('product_id'):
product = self.pool.get('product.product').browse(cr, uid, line_dict['product_id'], context=context)
counterpart_acct_id = product.property_stock_account_output and \
product.property_stock_account_output.id
if not counterpart_acct_id:
counterpart_acct_id = product.categ_id.property_stock_account_output_categ and \
product.categ_id.property_stock_account_output_categ.id
if counterpart_acct_id:
fpos = invoice.fiscal_position or False
line_dict['account_id'] = fiscal_position.map_account(cr, uid,
fpos,
counterpart_acct_id)
return invoice_data
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
HiSPARC/station-software | user/python/Lib/lib-tk/test/test_tkinter/test_images.py | 40 | 13352 | import unittest
import Tkinter as tkinter
import ttk
import test.test_support as support
from test_ttk.support import AbstractTkTest, requires_tcl
support.requires('gui')
class MiscTest(AbstractTkTest, unittest.TestCase):
def test_image_types(self):
image_types = self.root.image_types()
self.assertIsInstance(image_types, tuple)
self.assertIn('photo', image_types)
self.assertIn('bitmap', image_types)
def test_image_names(self):
image_names = self.root.image_names()
self.assertIsInstance(image_names, tuple)
class BitmapImageTest(AbstractTkTest, unittest.TestCase):
@classmethod
def setUpClass(cls):
AbstractTkTest.setUpClass.__func__(cls)
cls.testfile = support.findfile('python.xbm', subdir='imghdrdata')
def test_create_from_file(self):
image = tkinter.BitmapImage('::img::test', master=self.root,
foreground='yellow', background='blue',
file=self.testfile)
self.assertEqual(str(image), '::img::test')
self.assertEqual(image.type(), 'bitmap')
self.assertEqual(image.width(), 16)
self.assertEqual(image.height(), 16)
self.assertIn('::img::test', self.root.image_names())
del image
self.assertNotIn('::img::test', self.root.image_names())
def test_create_from_data(self):
with open(self.testfile, 'rb') as f:
data = f.read()
image = tkinter.BitmapImage('::img::test', master=self.root,
foreground='yellow', background='blue',
data=data)
self.assertEqual(str(image), '::img::test')
self.assertEqual(image.type(), 'bitmap')
self.assertEqual(image.width(), 16)
self.assertEqual(image.height(), 16)
self.assertIn('::img::test', self.root.image_names())
del image
self.assertNotIn('::img::test', self.root.image_names())
def assertEqualStrList(self, actual, expected):
self.assertIsInstance(actual, str)
self.assertEqual(self.root.splitlist(actual), expected)
def test_configure_data(self):
image = tkinter.BitmapImage('::img::test', master=self.root)
self.assertEqual(image['data'], '-data {} {} {} {}')
with open(self.testfile, 'rb') as f:
data = f.read()
image.configure(data=data)
self.assertEqualStrList(image['data'],
('-data', '', '', '', data))
self.assertEqual(image.width(), 16)
self.assertEqual(image.height(), 16)
self.assertEqual(image['maskdata'], '-maskdata {} {} {} {}')
image.configure(maskdata=data)
self.assertEqualStrList(image['maskdata'],
('-maskdata', '', '', '', data))
def test_configure_file(self):
image = tkinter.BitmapImage('::img::test', master=self.root)
self.assertEqual(image['file'], '-file {} {} {} {}')
image.configure(file=self.testfile)
self.assertEqualStrList(image['file'],
('-file', '', '', '',self.testfile))
self.assertEqual(image.width(), 16)
self.assertEqual(image.height(), 16)
self.assertEqual(image['maskfile'], '-maskfile {} {} {} {}')
image.configure(maskfile=self.testfile)
self.assertEqualStrList(image['maskfile'],
('-maskfile', '', '', '', self.testfile))
def test_configure_background(self):
image = tkinter.BitmapImage('::img::test', master=self.root)
self.assertEqual(image['background'], '-background {} {} {} {}')
image.configure(background='blue')
self.assertEqual(image['background'], '-background {} {} {} blue')
def test_configure_foreground(self):
image = tkinter.BitmapImage('::img::test', master=self.root)
self.assertEqual(image['foreground'],
'-foreground {} {} #000000 #000000')
image.configure(foreground='yellow')
self.assertEqual(image['foreground'],
'-foreground {} {} #000000 yellow')
class PhotoImageTest(AbstractTkTest, unittest.TestCase):
@classmethod
def setUpClass(cls):
AbstractTkTest.setUpClass.__func__(cls)
cls.testfile = support.findfile('python.gif', subdir='imghdrdata')
def create(self):
return tkinter.PhotoImage('::img::test', master=self.root,
file=self.testfile)
def colorlist(self, *args):
if tkinter.TkVersion >= 8.6 and self.wantobjects:
return args
else:
return tkinter._join(args)
def check_create_from_file(self, ext):
testfile = support.findfile('python.' + ext, subdir='imghdrdata')
image = tkinter.PhotoImage('::img::test', master=self.root,
file=testfile)
self.assertEqual(str(image), '::img::test')
self.assertEqual(image.type(), 'photo')
self.assertEqual(image.width(), 16)
self.assertEqual(image.height(), 16)
self.assertEqual(image['data'], '')
self.assertEqual(image['file'], testfile)
self.assertIn('::img::test', self.root.image_names())
del image
self.assertNotIn('::img::test', self.root.image_names())
def check_create_from_data(self, ext):
testfile = support.findfile('python.' + ext, subdir='imghdrdata')
with open(testfile, 'rb') as f:
data = f.read()
image = tkinter.PhotoImage('::img::test', master=self.root,
data=data)
self.assertEqual(str(image), '::img::test')
self.assertEqual(image.type(), 'photo')
self.assertEqual(image.width(), 16)
self.assertEqual(image.height(), 16)
self.assertEqual(image['data'], data if self.wantobjects
else data.decode('latin1'))
self.assertEqual(image['file'], '')
self.assertIn('::img::test', self.root.image_names())
del image
self.assertNotIn('::img::test', self.root.image_names())
def test_create_from_ppm_file(self):
self.check_create_from_file('ppm')
def test_create_from_ppm_data(self):
self.check_create_from_data('ppm')
def test_create_from_pgm_file(self):
self.check_create_from_file('pgm')
def test_create_from_pgm_data(self):
self.check_create_from_data('pgm')
def test_create_from_gif_file(self):
self.check_create_from_file('gif')
def test_create_from_gif_data(self):
self.check_create_from_data('gif')
@requires_tcl(8, 6)
def test_create_from_png_file(self):
self.check_create_from_file('png')
@requires_tcl(8, 6)
def test_create_from_png_data(self):
self.check_create_from_data('png')
def test_configure_data(self):
image = tkinter.PhotoImage('::img::test', master=self.root)
self.assertEqual(image['data'], '')
with open(self.testfile, 'rb') as f:
data = f.read()
image.configure(data=data)
self.assertEqual(image['data'], data if self.wantobjects
else data.decode('latin1'))
self.assertEqual(image.width(), 16)
self.assertEqual(image.height(), 16)
def test_configure_format(self):
image = tkinter.PhotoImage('::img::test', master=self.root)
self.assertEqual(image['format'], '')
image.configure(file=self.testfile, format='gif')
self.assertEqual(image['format'], ('gif',) if self.wantobjects
else 'gif')
self.assertEqual(image.width(), 16)
self.assertEqual(image.height(), 16)
def test_configure_file(self):
image = tkinter.PhotoImage('::img::test', master=self.root)
self.assertEqual(image['file'], '')
image.configure(file=self.testfile)
self.assertEqual(image['file'], self.testfile)
self.assertEqual(image.width(), 16)
self.assertEqual(image.height(), 16)
def test_configure_gamma(self):
image = tkinter.PhotoImage('::img::test', master=self.root)
self.assertEqual(image['gamma'], '1.0')
image.configure(gamma=2.0)
self.assertEqual(image['gamma'], '2.0')
def test_configure_width_height(self):
image = tkinter.PhotoImage('::img::test', master=self.root)
self.assertEqual(image['width'], '0')
self.assertEqual(image['height'], '0')
image.configure(width=20)
image.configure(height=10)
self.assertEqual(image['width'], '20')
self.assertEqual(image['height'], '10')
self.assertEqual(image.width(), 20)
self.assertEqual(image.height(), 10)
def test_configure_palette(self):
image = tkinter.PhotoImage('::img::test', master=self.root)
self.assertEqual(image['palette'], '')
image.configure(palette=256)
self.assertEqual(image['palette'], '256')
image.configure(palette='3/4/2')
self.assertEqual(image['palette'], '3/4/2')
def test_blank(self):
image = self.create()
image.blank()
self.assertEqual(image.width(), 16)
self.assertEqual(image.height(), 16)
self.assertEqual(image.get(4, 6), self.colorlist(0, 0, 0))
def test_copy(self):
image = self.create()
image2 = image.copy()
self.assertEqual(image2.width(), 16)
self.assertEqual(image2.height(), 16)
self.assertEqual(image.get(4, 6), image.get(4, 6))
def test_subsample(self):
image = self.create()
image2 = image.subsample(2, 3)
self.assertEqual(image2.width(), 8)
self.assertEqual(image2.height(), 6)
self.assertEqual(image2.get(2, 2), image.get(4, 6))
image2 = image.subsample(2)
self.assertEqual(image2.width(), 8)
self.assertEqual(image2.height(), 8)
self.assertEqual(image2.get(2, 3), image.get(4, 6))
def test_zoom(self):
image = self.create()
image2 = image.zoom(2, 3)
self.assertEqual(image2.width(), 32)
self.assertEqual(image2.height(), 48)
self.assertEqual(image2.get(8, 18), image.get(4, 6))
self.assertEqual(image2.get(9, 20), image.get(4, 6))
image2 = image.zoom(2)
self.assertEqual(image2.width(), 32)
self.assertEqual(image2.height(), 32)
self.assertEqual(image2.get(8, 12), image.get(4, 6))
self.assertEqual(image2.get(9, 13), image.get(4, 6))
def test_put(self):
image = self.create()
image.put('{red green} {blue yellow}', to=(4, 6))
self.assertEqual(image.get(4, 6), self.colorlist(255, 0, 0))
self.assertEqual(image.get(5, 6),
self.colorlist(0, 128 if tkinter.TkVersion >= 8.6
else 255, 0))
self.assertEqual(image.get(4, 7), self.colorlist(0, 0, 255))
self.assertEqual(image.get(5, 7), self.colorlist(255, 255, 0))
image.put((('#f00', '#00ff00'), ('#000000fff', '#ffffffff0000')))
self.assertEqual(image.get(0, 0), self.colorlist(255, 0, 0))
self.assertEqual(image.get(1, 0), self.colorlist(0, 255, 0))
self.assertEqual(image.get(0, 1), self.colorlist(0, 0, 255))
self.assertEqual(image.get(1, 1), self.colorlist(255, 255, 0))
def test_get(self):
image = self.create()
self.assertEqual(image.get(4, 6), self.colorlist(62, 116, 162))
self.assertEqual(image.get(0, 0), self.colorlist(0, 0, 0))
self.assertEqual(image.get(15, 15), self.colorlist(0, 0, 0))
self.assertRaises(tkinter.TclError, image.get, -1, 0)
self.assertRaises(tkinter.TclError, image.get, 0, -1)
self.assertRaises(tkinter.TclError, image.get, 16, 15)
self.assertRaises(tkinter.TclError, image.get, 15, 16)
def test_write(self):
image = self.create()
self.addCleanup(support.unlink, support.TESTFN)
image.write(support.TESTFN)
image2 = tkinter.PhotoImage('::img::test2', master=self.root,
format='ppm',
file=support.TESTFN)
self.assertEqual(str(image2), '::img::test2')
self.assertEqual(image2.type(), 'photo')
self.assertEqual(image2.width(), 16)
self.assertEqual(image2.height(), 16)
self.assertEqual(image2.get(0, 0), image.get(0, 0))
self.assertEqual(image2.get(15, 8), image.get(15, 8))
image.write(support.TESTFN, format='gif', from_coords=(4, 6, 6, 9))
image3 = tkinter.PhotoImage('::img::test3', master=self.root,
format='gif',
file=support.TESTFN)
self.assertEqual(str(image3), '::img::test3')
self.assertEqual(image3.type(), 'photo')
self.assertEqual(image3.width(), 2)
self.assertEqual(image3.height(), 3)
self.assertEqual(image3.get(0, 0), image.get(4, 6))
self.assertEqual(image3.get(1, 2), image.get(5, 8))
tests_gui = (MiscTest, BitmapImageTest, PhotoImageTest,)
if __name__ == "__main__":
support.run_unittest(*tests_gui)
| gpl-3.0 |
atul-bhouraskar/django | django/contrib/admin/__init__.py | 562 | 1243 | # ACTION_CHECKBOX_NAME is unused, but should stay since its import from here
# has been referenced in documentation.
from django.contrib.admin.decorators import register
from django.contrib.admin.filters import (
AllValuesFieldListFilter, BooleanFieldListFilter, ChoicesFieldListFilter,
DateFieldListFilter, FieldListFilter, ListFilter, RelatedFieldListFilter,
RelatedOnlyFieldListFilter, SimpleListFilter,
)
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import (
HORIZONTAL, VERTICAL, ModelAdmin, StackedInline, TabularInline,
)
from django.contrib.admin.sites import AdminSite, site
from django.utils.module_loading import autodiscover_modules
__all__ = [
"register", "ACTION_CHECKBOX_NAME", "ModelAdmin", "HORIZONTAL", "VERTICAL",
"StackedInline", "TabularInline", "AdminSite", "site", "ListFilter",
"SimpleListFilter", "FieldListFilter", "BooleanFieldListFilter",
"RelatedFieldListFilter", "ChoicesFieldListFilter", "DateFieldListFilter",
"AllValuesFieldListFilter", "RelatedOnlyFieldListFilter", "autodiscover",
]
def autodiscover():
autodiscover_modules('admin', register_to=site)
default_app_config = 'django.contrib.admin.apps.AdminConfig'
| bsd-3-clause |
TheR3ason/map-your-location-history | LatitudePlot.py | 1 | 4022 | #!/usr/bin/env python
# LatitudePlot.py
# Created 30 July 2013
# Created by [email protected]
import os, time, math
from datetime import datetime
from time import mktime
import xml.etree.ElementTree as ET
from PIL import Image, ImageDraw
def GetKmlFiles():
"""Locates and reads local .kml files, returns a list of kml dictionary data"""
KmlData = []
for dirname, dirnames, filenames in os.walk('.'):
for filename in filenames:
sp = filename.split('.')
if sp[len(sp)-1]== "kml": #locate kml files
print "Reading kml file " + filename
KmlData.append(ReadKmlFile(dirname, filename))
print KmlData
return KmlData
def ReadKmlFile(dirname, filename):
"""Parses a single kml file, returns a dict of format {time: [lat, long]}"""
KmlData = {}
kmltime = datetime.time
latlist = []
longlist = []
timelist = []
cnt =0
f = open(filename)
line = f.readline()
while line:
if 'when' in line:
timelist.append(time.strptime(ET.fromstring(line)[0].text,"%Y-%m-%dT%H:%M:%SZ"))
if 'coordinates' in line:
latlist.append(float(ET.fromstring(line)[0].text.split(',')[0]))
longlist.append(float(ET.fromstring(line)[0].text.split(',')[1]))
cnt+=1
if cnt % 5000 ==0:
print "Parsing " + filename + ": points found: " + str(cnt)
line = f.readline()
f.close()
return [latlist, longlist, timelist]
def DrawMapData(KmlData,InputImage, OutputImage, itop, ibottom, ileft, iright,xnudge,ynudge):
"""Draws kml line data on top of the specified image"""
im = Image.open(InputImage)
draw = ImageDraw.Draw(im)
cnt =0
for KmlD in KmlData:
for d in range(len(KmlD[0])-1):
#Get points x and y coordinates and draw line
x1=(LongToX(KmlD[0][d],ileft,iright,im.size[0]))+xnudge
y1=(LatToY(KmlD[1][d],itop,ibottom,im.size[1]))+ynudge
x2=(LongToX(KmlD[0][d+1],ileft,iright,im.size[0]))+xnudge
y2=(LatToY(KmlD[1][d+1],itop,ibottom,im.size[1]))+ynudge
if(EuclidDistance(x1,y1,x2,y2) < 10000):
#setting this around 80 works okay. Attempts to remove some noise
draw.line((x1,y1, x2,y2), fill=80)
cnt+=1
if cnt % 10000 ==0:
print "Drawing point number " + str(cnt)
im.save(OutputImage)
def LongToX(InputLong, LeftLong, RightLong, ImWidth):
"""Converts a longitude value in to an x coordinate"""
return ScalingFunc(InputLong+360, LeftLong+360, RightLong+360, ImWidth);
def LatToY(InputLat, TopLat, BottomLat, ImHeight):
"""Converts a latitude value in to a y coordinate"""
return ScalingFunc(InputLat+360, TopLat+360, BottomLat+360, ImHeight);
def EuclidDistance(x1, y1, x2, y2):
"""Calculates the euclidean distance between two points"""
return math.sqrt((x1 - x2)**2+(y1 - y2)**2)
def ScalingFunc(inputv, minv, maxv, size):
"""Helps convert latitudes and longitudes to x and y"""
if((float(maxv) -float(minv)) ==0):
return 0
return ((((float(inputv) - float(minv)) / (float(maxv) -float(minv))) * float(size)));
def ParseImageFile():
"""Reads SatelliteImageData.csv containing:
<File name of image to draw data on>,
<image top latitude>,
<image bottom lattitude>,
<image left longitude>,
<image right longitude>,
(optional) <x value nudge>,
(optional) <y value nudge>"""
with open('ImageData.csv', 'r') as f:
read_data = f.read().split(',')
while 5 <= len(read_data) < 7:
read_data.append(0)
ReturnData = [0]*7
ReturnData[0]=read_data[0]
for i in range(1,7):
ReturnData[i] = float(read_data[i])
return ReturnData
if __name__ == "__main__":
ImageData = ParseImageFile()
DrawMapData(GetKmlFiles(),ImageData[0], "LatitudeData.png", ImageData[1], ImageData[2], ImageData[3], ImageData[4],ImageData[5],ImageData[6])
| apache-2.0 |
erseco/ugr_desarrollo_aplicaciones_internet | Practica_01/Soluciones Practica 1/ej_01_01.py | 2 | 1972 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Practicas de Desarrollo de Aplicaciones para Internet (DAI)
# Copyright (C) 2013 - Zerjillo ([email protected])
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import random
# Generamos numero aleatorio
numeroAdivinar = random.randint(1, 100)
# Inicialmente el número es desconocido
numero = -1
iteraciones = 0
maxIntentos = 10;
print "Bienvenido al wonderfuloso juego de adivinar un número"
while (numero != numeroAdivinar) and (iteraciones < maxIntentos):
leido = input("Adivina un número entre 1 y 100 (te quedan %i intentos)... " % (maxIntentos - iteraciones))
# Casting a entero para poder hacer comparaciones, etc. Peta si el usuario no mete un número, pero no me preocupa
numero = int(leido)
if (numero < 1) or (numero > 100):
print "Tu eres tonto, el número tiene que estar entre 1 y 100."
elif (numero < numeroAdivinar):
print "El número buscado es mayor que %i." % (numero)
elif (numero > numeroAdivinar):
print "El número buscado el menor que %i." % (numero)
else:
print "Felicidades, el número buscado era el %i." % (numeroAdivinar)
iteraciones += 1
if (iteraciones == maxIntentos):
print "Lo siento, no te quedan más intentos. El número buscado era el %i. Y tú eres un poco ceporro por no haberlo adivinado." % (numeroAdivinar)
| gpl-3.0 |
felipetomm/POX-Django | pox/web/jsonrpc.py | 45 | 8357 | # Copyright 2011,2012 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A library for implementing JSON-RPC based web services
This is lightweight, low on features, and not a whole lot of effort
has been paid to really complying with the JSON-RPC spec. Feel
free to improve it. ;)
It'd be nice to factor the JSON-RPC stuff out so that it could
be used with something besides just HTTP.
Also, it has some capability for compatibility with Qooxdoo.
"""
import json
import sys
from pox.web.webcore import *
from pox.core import core
log = core.getLogger()
# A long polling handler can return this if it notices that the
# connection has closed.
ABORT = object()
class JSONRPCHandler (SplitRequestHandler):
"""
Meant for implementing JSON-RPC web services
Implement RPC methods by prefacing them with "_exec_".
config keys of note:
"auth" is a function which takes a username and password and returns
True if they are a valid user. If set, turns on authentication.
"auth_realm" is the optional authentication realm name.
"qx" turns on Qooxdoo mode by default (it's usually switched on by
seeing a "service" key in the request).
There are a couple of extensions to JSON-RPC:
If you want to use positional AND named parameters, in a request, use
"params" for the former and "kwparams" for the latter.
There's an optional "service" key in requests. This comes from qooxdoo.
If it is given, look for the _exec_ method on some otherobject instead
of self. Put the additional services in an arg named 'services'.
"""
protocol_version = 'HTTP/1.1'
QX_ERR_ILLEGAL_SERVICE = 1
QX_ERR_SERVICE_NOT_FOUND = 2
QX_ERR_CLASS_NOT_FOUND = 3
QX_ERR_METHOD_NOT_FOUND = 4
QX_ERR_PARAMETER_MISMATCH = 5
QX_ERR_PERMISSION_DENIED = 6
QX_ORIGIN_SERVER = 1
QX_ORIGIN_METHOD = 2
ERR_PARSE_ERROR = -32700 # WE USE THIS
ERR_INVALID_REQUEST = -32600
ERR_METHOD_NOT_FOUND = -32601 # WE USE THIS
ERR_INVALID_PARAMS = -32602
ERR_INTERNAL_ERROR = -32603 # WE USE THIS
ERR_SERVER_ERROR = -32000 # to -32099 WE USE THIS
ERR_METHOD_ERROR = 99 # We use this for errors in methods
ERROR_XLATE = {
ERR_PARSE_ERROR : (1, QX_ERR_ILLEGAL_SERVICE), # Nonsense
ERR_METHOD_NOT_FOUND : (1, QX_ERR_METHOD_NOT_FOUND),
ERR_INTERNAL_ERROR : (),
ERR_SERVER_ERROR : (),
}
_qx = False
def _init (self):
# Maybe the following arg-adding feature should just be part of
# SplitRequestHandler?
for k,v in self.args.iteritems():
setattr(self, "_arg_" + k, v)
self.auth_function = self.args.get('auth', None)
self.auth_realm = self.args.get('auth_realm', "JSONRPC")
self._qx = self.args.get('qx', self._qx)
def _send_auth_header (self):
if self.auth_function:
self.send_header('WWW-Authenticate',
'Basic realm="%s"' % (self.auth_realm,))
def _do_auth (self):
if not self.auth_function:
return True
auth = self.headers.get("Authorization", "").strip().lower()
success = False
if auth.startswith("basic "):
try:
auth = base64.decodestring(auth[6:].strip()).split(':', 1)
success = self.auth_function(auth[0], auth[1])
except:
pass
if not success:
self.send_response(401, "Authorization Required")
self._send_auth_header()
self.end_headers()
return success
def _translate_error (self, e):
if not 'error' in e: return
if self._qx:
if e['code'] < 0:
c,o = ERROR_XLATE.get(e['code'], (1, self.QX_ERR_ILLEGAL_SERVICE))
e['code'] = c
e['origin'] = o
else:
e['origin'] = QX_ORIGIN_METHOD
def _handle (self, data):
try:
try:
service = self
if 'services' in self.args:
if 'service' in data:
service = self.args['services'].get(data['service'], self)
self._qx = True # This is a qooxdoo request
method = "_exec_" + data.get('method')
method = getattr(service, method)
except:
response = {}
response['error'] = {'code':self.ERR_METHOD_NOT_FOUND,
'message':'Method not found'}
return response
params = data.get('params', [])
if isinstance(params, dict):
kw = params
params = []
else:
kw = data.get('kwparams', {})
try:
r = method(*params,**kw)
#TODO: jsonrpc version?
return r
except:
response = {}
t,v,_ = sys.exc_info()
response['error'] = {'message': "%s: %s" % (t,v),
'code':self.ERR_METHOD_ERROR}
import traceback
response['error']['data'] = {'traceback':traceback.format_exc()}
log.exception("While handling %s...", data.get('method'))
return response
except:
response = {}
t,v,_ = sys.exc_info()
response['error'] = {'message': "%s: %s" % (t,v),
'code':self.ERR_INTERNAL_ERROR}
return response
def do_POST (self):
if not self._do_auth():
return
dumps_opts = {}
#FIXME: this is a hack
if 'pretty' in self.path:
dumps_opts = {'sort_keys':True, 'indent':2}
def reply (response):
orig = response
#if not isinstance(response, basestring):
if isinstance(response, list):
for r in response: self._translate_error(r)
else:
self._translate_error(response)
response = json.dumps(response, default=str, **dumps_opts)
response = response.strip()
if len(response) and not response.endswith("\n"): response += "\n"
try:
self.send_response(200, "OK")
self.send_header("Content-Type", "application/json")
self.send_header("Content-Length", len(response))
self.end_headers()
self.wfile.write(response)
except IOError as e:
if e.errno == 32:
if isinstance(orig, dict) and 'error' in orig:
log.info("Socket closed when writing error response")
else:
log.warning("Socket closed when writing response")
#log.debug(" response was: " + response)
else:
log.exception("Exception while trying to send JSON-RPC response")
try:
self.wfile.close()
except:
pass
return False
except:
log.exception("Exception while trying to send JSON-RPC response")
return False
return True
l = self.headers.get("Content-Length", "")
data = ''
if l == "":
data = self.rfile.read()
else:
data = self.rfile.read(int(l))
try:
data = json.loads(data)
except:
response = {}
response['error'] = {'code':self.ERR_PARSE_ERROR,
'message':'Parse error'}
return reply(response)
single = False
if not isinstance(data, list):
data = [data]
single = True
responses = []
for req in data:
response = self._handle(req) # Should never raise an exception
if response is ABORT:
return
if 'id' in req or 'error' in response:
response['id'] = req.get('id')
responses.append(response)
if len(responses) == 0:
responses = ''
else:
if single:
responses = responses[0]
reply(responses)
class QXJSONRPCHandler (JSONRPCHandler):
"""
A subclass of JSONRPCHandler which speaks something closer to
qooxdoo's version JSON-RPC.
"""
_qx = True
#TODO: Implement the <SCRIPT> based GET method for cross-domain
def make_error (msg = "Unknown Error",
code = JSONRPCHandler.ERR_SERVER_ERROR,
data = None):
e = {'code':code,'message':msg}
if data is not None:
e['data'] = data
r = {'error':e}
return r
| apache-2.0 |
tedder/ansible | lib/ansible/modules/network/cloudengine/ce_netconf.py | 15 | 5948 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_netconf
version_added: "2.4"
short_description: Run an arbitrary netconf command on HUAWEI CloudEngine switches.
description:
- Sends an arbitrary netconf command on HUAWEI CloudEngine switches.
author:
- wangdezhuang (@QijunPan)
options:
rpc:
description:
- The type of rpc.
required: true
choices: ['get', 'edit-config', 'execute-action', 'execute-cli']
cfg_xml:
description:
- The config xml string.
required: true
'''
EXAMPLES = '''
- name: CloudEngine netconf test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Netconf get operation"
ce_netconf:
rpc: get
cfg_xml: '<filter type=\"subtree\">
<vlan xmlns=\"http://www.huawei.com/netconf/vrp\" content-version=\"1.0\" format-version=\"1.0\">
<vlans>
<vlan>
<vlanId>10</vlanId>
<vlanif>
<ifName></ifName>
<cfgBand></cfgBand>
<dampTime></dampTime>
</vlanif>
</vlan>
</vlans>
</vlan>
</filter>'
provider: "{{ cli }}"
- name: "Netconf edit-config operation"
ce_netconf:
rpc: edit-config
cfg_xml: '<config>
<aaa xmlns=\"http://www.huawei.com/netconf/vrp\" content-version=\"1.0\" format-version=\"1.0\">
<authenticationSchemes>
<authenticationScheme operation=\"create\">
<authenSchemeName>default_wdz</authenSchemeName>
<firstAuthenMode>local</firstAuthenMode>
<secondAuthenMode>invalid</secondAuthenMode>
</authenticationScheme>
</authenticationSchemes>
</aaa>
</config>'
provider: "{{ cli }}"
- name: "Netconf execute-action operation"
ce_netconf:
rpc: execute-action
cfg_xml: '<action>
<l2mc xmlns=\"http://www.huawei.com/netconf/vrp\" content-version=\"1.0\" format-version=\"1.0\">
<l2McResetAllVlanStatis>
<addrFamily>ipv4unicast</addrFamily>
</l2McResetAllVlanStatis>
</l2mc>
</action>'
provider: "{{ cli }}"
'''
RETURN = '''
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample: {"result": ["ok"]}
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config
from ansible.module_utils.network.cloudengine.ce import execute_nc_action, ce_argument_spec, execute_nc_cli
def main():
""" main """
argument_spec = dict(
rpc=dict(choices=['get', 'edit-config',
'execute-action', 'execute-cli'], required=True),
cfg_xml=dict(required=True)
)
argument_spec.update(ce_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
rpc = module.params['rpc']
cfg_xml = module.params['cfg_xml']
changed = False
end_state = dict()
if rpc == "get":
response = get_nc_config(module, cfg_xml)
if "<data/>" in response:
end_state["result"] = "<data/>"
else:
tmp1 = response.split(r"<data>")
tmp2 = tmp1[1].split(r"</data>")
result = tmp2[0].split("\n")
end_state["result"] = result
elif rpc == "edit-config":
response = set_nc_config(module, cfg_xml)
if "<ok/>" not in response:
module.fail_json(msg='rpc edit-config failed.')
changed = True
end_state["result"] = "ok"
elif rpc == "execute-action":
response = execute_nc_action(module, cfg_xml)
if "<ok/>" not in response:
module.fail_json(msg='rpc execute-action failed.')
changed = True
end_state["result"] = "ok"
elif rpc == "execute-cli":
response = execute_nc_cli(module, cfg_xml)
if "<data/>" in response:
end_state["result"] = "<data/>"
else:
tmp1 = response.xml.split(r"<data>")
tmp2 = tmp1[1].split(r"</data>")
result = tmp2[0].split("\n")
end_state["result"] = result
else:
module.fail_json(msg='please input correct rpc.')
results = dict()
results['changed'] = changed
results['end_state'] = end_state
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
jasonbot/django | tests/template_tests/filter_tests/test_dictsort.py | 342 | 1477 | from django.template.defaultfilters import dictsort
from django.test import SimpleTestCase
class FunctionTests(SimpleTestCase):
def test_sort(self):
sorted_dicts = dictsort(
[{'age': 23, 'name': 'Barbara-Ann'},
{'age': 63, 'name': 'Ra Ra Rasputin'},
{'name': 'Jonny B Goode', 'age': 18}],
'age',
)
self.assertEqual(
[sorted(dict.items()) for dict in sorted_dicts],
[[('age', 18), ('name', 'Jonny B Goode')],
[('age', 23), ('name', 'Barbara-Ann')],
[('age', 63), ('name', 'Ra Ra Rasputin')]],
)
def test_dictsort_complex_sorting_key(self):
"""
Since dictsort uses template.Variable under the hood, it can sort
on keys like 'foo.bar'.
"""
data = [
{'foo': {'bar': 1, 'baz': 'c'}},
{'foo': {'bar': 2, 'baz': 'b'}},
{'foo': {'bar': 3, 'baz': 'a'}},
]
sorted_data = dictsort(data, 'foo.baz')
self.assertEqual([d['foo']['bar'] for d in sorted_data], [3, 2, 1])
def test_invalid_values(self):
"""
If dictsort is passed something other than a list of dictionaries,
fail silently.
"""
self.assertEqual(dictsort([1, 2, 3], 'age'), '')
self.assertEqual(dictsort('Hello!', 'age'), '')
self.assertEqual(dictsort({'a': 1}, 'age'), '')
self.assertEqual(dictsort(1, 'age'), '')
| bsd-3-clause |
rahulsharma1991/scrapy | tests/test_utils_deprecate.py | 140 | 10526 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import inspect
import unittest
import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.utils.deprecate import create_deprecated_class, update_classpath
from tests import mock
class MyWarning(UserWarning):
pass
class SomeBaseClass(object):
pass
class NewName(SomeBaseClass):
pass
class WarnWhenSubclassedTest(unittest.TestCase):
def _mywarnings(self, w, category=MyWarning):
return [x for x in w if x.category is MyWarning]
def test_no_warning_on_definition(self):
with warnings.catch_warnings(record=True) as w:
Deprecated = create_deprecated_class('Deprecated', NewName)
w = self._mywarnings(w)
self.assertEqual(w, [])
def test_subclassing_warning_message(self):
Deprecated = create_deprecated_class('Deprecated', NewName,
warn_category=MyWarning)
with warnings.catch_warnings(record=True) as w:
class UserClass(Deprecated):
pass
w = self._mywarnings(w)
self.assertEqual(len(w), 1)
self.assertEqual(
str(w[0].message),
"tests.test_utils_deprecate.UserClass inherits from "
"deprecated class tests.test_utils_deprecate.Deprecated, "
"please inherit from tests.test_utils_deprecate.NewName."
" (warning only on first subclass, there may be others)"
)
self.assertEqual(w[0].lineno, inspect.getsourcelines(UserClass)[1])
def test_custom_class_paths(self):
Deprecated = create_deprecated_class('Deprecated', NewName,
new_class_path='foo.NewClass',
old_class_path='bar.OldClass',
warn_category=MyWarning)
with warnings.catch_warnings(record=True) as w:
class UserClass(Deprecated):
pass
_ = Deprecated()
w = self._mywarnings(w)
self.assertEqual(len(w), 2)
self.assertIn('foo.NewClass', str(w[0].message))
self.assertIn('bar.OldClass', str(w[0].message))
self.assertIn('foo.NewClass', str(w[1].message))
self.assertIn('bar.OldClass', str(w[1].message))
def test_subclassing_warns_only_on_direct_childs(self):
Deprecated = create_deprecated_class('Deprecated', NewName,
warn_once=False,
warn_category=MyWarning)
with warnings.catch_warnings(record=True) as w:
class UserClass(Deprecated):
pass
class NoWarnOnMe(UserClass):
pass
w = self._mywarnings(w)
self.assertEqual(len(w), 1)
self.assertIn('UserClass', str(w[0].message))
def test_subclassing_warns_once_by_default(self):
Deprecated = create_deprecated_class('Deprecated', NewName,
warn_category=MyWarning)
with warnings.catch_warnings(record=True) as w:
class UserClass(Deprecated):
pass
class FooClass(Deprecated):
pass
class BarClass(Deprecated):
pass
w = self._mywarnings(w)
self.assertEqual(len(w), 1)
self.assertIn('UserClass', str(w[0].message))
def test_warning_on_instance(self):
Deprecated = create_deprecated_class('Deprecated', NewName,
warn_category=MyWarning)
# ignore subclassing warnings
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
class UserClass(Deprecated):
pass
with warnings.catch_warnings(record=True) as w:
_, lineno = Deprecated(), inspect.getlineno(inspect.currentframe())
_ = UserClass() # subclass instances don't warn
w = self._mywarnings(w)
self.assertEqual(len(w), 1)
self.assertEqual(
str(w[0].message),
"tests.test_utils_deprecate.Deprecated is deprecated, "
"instantiate tests.test_utils_deprecate.NewName instead."
)
self.assertEqual(w[0].lineno, lineno)
def test_warning_auto_message(self):
with warnings.catch_warnings(record=True) as w:
Deprecated = create_deprecated_class('Deprecated', NewName)
class UserClass2(Deprecated):
pass
msg = str(w[0].message)
self.assertIn("tests.test_utils_deprecate.NewName", msg)
self.assertIn("tests.test_utils_deprecate.Deprecated", msg)
def test_issubclass(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
DeprecatedName = create_deprecated_class('DeprecatedName', NewName)
class UpdatedUserClass1(NewName):
pass
class UpdatedUserClass1a(NewName):
pass
class OutdatedUserClass1(DeprecatedName):
pass
class OutdatedUserClass1a(DeprecatedName):
pass
class UnrelatedClass(object):
pass
class OldStyleClass:
pass
assert issubclass(UpdatedUserClass1, NewName)
assert issubclass(UpdatedUserClass1a, NewName)
assert issubclass(UpdatedUserClass1, DeprecatedName)
assert issubclass(UpdatedUserClass1a, DeprecatedName)
assert issubclass(OutdatedUserClass1, DeprecatedName)
assert not issubclass(UnrelatedClass, DeprecatedName)
assert not issubclass(OldStyleClass, DeprecatedName)
assert not issubclass(OldStyleClass, DeprecatedName)
assert not issubclass(OutdatedUserClass1, OutdatedUserClass1a)
assert not issubclass(OutdatedUserClass1a, OutdatedUserClass1)
self.assertRaises(TypeError, issubclass, object(), DeprecatedName)
def test_isinstance(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
DeprecatedName = create_deprecated_class('DeprecatedName', NewName)
class UpdatedUserClass2(NewName):
pass
class UpdatedUserClass2a(NewName):
pass
class OutdatedUserClass2(DeprecatedName):
pass
class OutdatedUserClass2a(DeprecatedName):
pass
class UnrelatedClass(object):
pass
class OldStyleClass:
pass
assert isinstance(UpdatedUserClass2(), NewName)
assert isinstance(UpdatedUserClass2a(), NewName)
assert isinstance(UpdatedUserClass2(), DeprecatedName)
assert isinstance(UpdatedUserClass2a(), DeprecatedName)
assert isinstance(OutdatedUserClass2(), DeprecatedName)
assert isinstance(OutdatedUserClass2a(), DeprecatedName)
assert not isinstance(OutdatedUserClass2a(), OutdatedUserClass2)
assert not isinstance(OutdatedUserClass2(), OutdatedUserClass2a)
assert not isinstance(UnrelatedClass(), DeprecatedName)
assert not isinstance(OldStyleClass(), DeprecatedName)
def test_clsdict(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
Deprecated = create_deprecated_class('Deprecated', NewName, {'foo': 'bar'})
self.assertEqual(Deprecated.foo, 'bar')
def test_deprecate_a_class_with_custom_metaclass(self):
Meta1 = type('Meta1', (type,), {})
New = Meta1('New', (), {})
Deprecated = create_deprecated_class('Deprecated', New)
def test_deprecate_subclass_of_deprecated_class(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
Deprecated = create_deprecated_class('Deprecated', NewName,
warn_category=MyWarning)
AlsoDeprecated = create_deprecated_class('AlsoDeprecated', Deprecated,
new_class_path='foo.Bar',
warn_category=MyWarning)
w = self._mywarnings(w)
self.assertEqual(len(w), 0, str(map(str, w)))
with warnings.catch_warnings(record=True) as w:
AlsoDeprecated()
class UserClass(AlsoDeprecated):
pass
w = self._mywarnings(w)
self.assertEqual(len(w), 2)
self.assertIn('AlsoDeprecated', str(w[0].message))
self.assertIn('foo.Bar', str(w[0].message))
self.assertIn('AlsoDeprecated', str(w[1].message))
self.assertIn('foo.Bar', str(w[1].message))
def test_inspect_stack(self):
with mock.patch('inspect.stack', side_effect=IndexError):
with warnings.catch_warnings(record=True) as w:
DeprecatedName = create_deprecated_class('DeprecatedName', NewName)
class SubClass(DeprecatedName):
pass
self.assertIn("Error detecting parent module", str(w[0].message))
@mock.patch('scrapy.utils.deprecate.DEPRECATION_RULES',
[('scrapy.contrib.pipeline.', 'scrapy.pipelines.'),
('scrapy.contrib.', 'scrapy.extensions.')])
class UpdateClassPathTest(unittest.TestCase):
def test_old_path_gets_fixed(self):
with warnings.catch_warnings(record=True) as w:
output = update_classpath('scrapy.contrib.debug.Debug')
self.assertEqual(output, 'scrapy.extensions.debug.Debug')
self.assertEqual(len(w), 1)
self.assertIn("scrapy.contrib.debug.Debug", str(w[0].message))
self.assertIn("scrapy.extensions.debug.Debug", str(w[0].message))
def test_sorted_replacement(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', ScrapyDeprecationWarning)
output = update_classpath('scrapy.contrib.pipeline.Pipeline')
self.assertEqual(output, 'scrapy.pipelines.Pipeline')
def test_unmatched_path_stays_the_same(self):
with warnings.catch_warnings(record=True) as w:
output = update_classpath('scrapy.unmatched.Path')
self.assertEqual(output, 'scrapy.unmatched.Path')
self.assertEqual(len(w), 0)
| bsd-3-clause |
Outernet-Project/librarian | tests/utils/test_route_mixins.py | 1 | 4074 | import mock
from bottle_utils import csrf
import librarian.utils.route_mixins as mod
# Common test helper
class MockedRouteBase(object):
def __init__(self, *args, **kwargs):
# this way all tests will get a separate instance of the mock
# object when they instantiate their routes, because otherwise
# a class level mock would carry over state from previous tests
self.request = mock.Mock()
self.response = mock.Mock()
def get(self, *args, **kwargs):
return None
def post(self, *args, **kwargs):
return None
def get_default_context(self):
return {'default': 'default'}
# CSRFRouteMixin tests
@mock.patch.object(csrf, 'response')
@mock.patch.object(csrf, 'request')
def test_csrf_route_mixin_get(request, response):
request.get_cookie.return_value = ''
class TestRoute(mod.CSRFRouteMixin, MockedRouteBase):
pass
inst = TestRoute()
inst.get()
assert hasattr(inst.request, 'csrf_token')
@mock.patch.object(csrf, 'abort')
@mock.patch.object(csrf, 'response')
@mock.patch.object(csrf, 'request')
def test_csrf_route_mixin_post(request, response, abort):
request.get_cookie.return_value = ''
class TestRoute(mod.CSRFRouteMixin, MockedRouteBase):
pass
inst = TestRoute()
inst.post()
assert abort.called
# RedirectRouteMixin tests
def test_redirect_route_mixin_get_next_path_found():
class TestRoute(mod.RedirectRouteMixin, MockedRouteBase):
pass
inst = TestRoute()
assert inst.get_next_path() == inst.request.params.get.return_value
def test_redirect_route_mixin_get_next_path_default():
class TestRoute(mod.RedirectRouteMixin, MockedRouteBase):
pass
inst = TestRoute()
inst.request.params = {}
assert inst.get_next_path() == inst.default_next_path
@mock.patch.object(mod.RedirectRouteMixin, 'get_next_path')
def test_redirect_route_mixin_get_default_context(get_next_path):
class TestRoute(mod.RedirectRouteMixin, MockedRouteBase):
pass
inst = TestRoute()
exp = {'default': 'default',
inst.next_context_parameter_name: inst.get_next_path.return_value}
assert inst.get_default_context() == exp
assert inst.get_next_path.called
@mock.patch.object(mod, 'i18n_path')
@mock.patch.object(mod.RedirectRouteMixin, 'get_next_path')
def test_redirect_route_mixin_get_next_url(get_next_path, i18n_path):
i18n_path.return_value = '/en/some/path/'
class TestRoute(mod.RedirectRouteMixin, MockedRouteBase):
pass
inst = TestRoute()
inst.request.url = 'http://localhost/here/there/'
assert inst.get_next_url() == 'http://localhost/en/some/path/'
assert inst.get_next_path.called
i18n_path.assert_called_with(get_next_path.return_value)
@mock.patch.object(mod.RedirectRouteMixin, 'get_next_path')
def test_redirect_route_mixin_add_next_parameter(get_next_path):
get_next_path.return_value = '/next/path/'
class TestRoute(mod.RedirectRouteMixin, MockedRouteBase):
pass
inst = TestRoute()
exp = '/main/route/?next=/next/path/'
assert inst.add_next_parameter('/main/route/') == exp
@mock.patch.object(mod.RedirectRouteMixin, 'get_next_url')
def test_redirect_route_mixin_perform_redirect_default(get_next_url):
class TestRoute(mod.RedirectRouteMixin, MockedRouteBase):
pass
inst = TestRoute()
inst.perform_redirect()
inst.response.set_header.assert_called_with('Location',
get_next_url.return_value)
assert inst.response.status == 303
@mock.patch.object(mod.RedirectRouteMixin, 'get_next_url')
def test_redirect_route_mixin_perform_redirect_custom(get_next_url):
class TestRoute(mod.RedirectRouteMixin, MockedRouteBase):
pass
inst = TestRoute()
custom_url = 'outernet.is'
custom_status = 302
inst.perform_redirect(custom_url, custom_status)
inst.response.set_header.assert_called_with('Location', custom_url)
assert inst.response.status == custom_status
| gpl-3.0 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.1/Lib/encodings/iso8859_5.py | 4 | 5385 | """ Python Character Mapping Codec generated from '8859-5.TXT' with gencodec.py.
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x00a1: 0x0401, # CYRILLIC CAPITAL LETTER IO
0x00a2: 0x0402, # CYRILLIC CAPITAL LETTER DJE
0x00a3: 0x0403, # CYRILLIC CAPITAL LETTER GJE
0x00a4: 0x0404, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
0x00a5: 0x0405, # CYRILLIC CAPITAL LETTER DZE
0x00a6: 0x0406, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
0x00a7: 0x0407, # CYRILLIC CAPITAL LETTER YI
0x00a8: 0x0408, # CYRILLIC CAPITAL LETTER JE
0x00a9: 0x0409, # CYRILLIC CAPITAL LETTER LJE
0x00aa: 0x040a, # CYRILLIC CAPITAL LETTER NJE
0x00ab: 0x040b, # CYRILLIC CAPITAL LETTER TSHE
0x00ac: 0x040c, # CYRILLIC CAPITAL LETTER KJE
0x00ae: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U
0x00af: 0x040f, # CYRILLIC CAPITAL LETTER DZHE
0x00b0: 0x0410, # CYRILLIC CAPITAL LETTER A
0x00b1: 0x0411, # CYRILLIC CAPITAL LETTER BE
0x00b2: 0x0412, # CYRILLIC CAPITAL LETTER VE
0x00b3: 0x0413, # CYRILLIC CAPITAL LETTER GHE
0x00b4: 0x0414, # CYRILLIC CAPITAL LETTER DE
0x00b5: 0x0415, # CYRILLIC CAPITAL LETTER IE
0x00b6: 0x0416, # CYRILLIC CAPITAL LETTER ZHE
0x00b7: 0x0417, # CYRILLIC CAPITAL LETTER ZE
0x00b8: 0x0418, # CYRILLIC CAPITAL LETTER I
0x00b9: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I
0x00ba: 0x041a, # CYRILLIC CAPITAL LETTER KA
0x00bb: 0x041b, # CYRILLIC CAPITAL LETTER EL
0x00bc: 0x041c, # CYRILLIC CAPITAL LETTER EM
0x00bd: 0x041d, # CYRILLIC CAPITAL LETTER EN
0x00be: 0x041e, # CYRILLIC CAPITAL LETTER O
0x00bf: 0x041f, # CYRILLIC CAPITAL LETTER PE
0x00c0: 0x0420, # CYRILLIC CAPITAL LETTER ER
0x00c1: 0x0421, # CYRILLIC CAPITAL LETTER ES
0x00c2: 0x0422, # CYRILLIC CAPITAL LETTER TE
0x00c3: 0x0423, # CYRILLIC CAPITAL LETTER U
0x00c4: 0x0424, # CYRILLIC CAPITAL LETTER EF
0x00c5: 0x0425, # CYRILLIC CAPITAL LETTER HA
0x00c6: 0x0426, # CYRILLIC CAPITAL LETTER TSE
0x00c7: 0x0427, # CYRILLIC CAPITAL LETTER CHE
0x00c8: 0x0428, # CYRILLIC CAPITAL LETTER SHA
0x00c9: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA
0x00ca: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN
0x00cb: 0x042b, # CYRILLIC CAPITAL LETTER YERU
0x00cc: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN
0x00cd: 0x042d, # CYRILLIC CAPITAL LETTER E
0x00ce: 0x042e, # CYRILLIC CAPITAL LETTER YU
0x00cf: 0x042f, # CYRILLIC CAPITAL LETTER YA
0x00d0: 0x0430, # CYRILLIC SMALL LETTER A
0x00d1: 0x0431, # CYRILLIC SMALL LETTER BE
0x00d2: 0x0432, # CYRILLIC SMALL LETTER VE
0x00d3: 0x0433, # CYRILLIC SMALL LETTER GHE
0x00d4: 0x0434, # CYRILLIC SMALL LETTER DE
0x00d5: 0x0435, # CYRILLIC SMALL LETTER IE
0x00d6: 0x0436, # CYRILLIC SMALL LETTER ZHE
0x00d7: 0x0437, # CYRILLIC SMALL LETTER ZE
0x00d8: 0x0438, # CYRILLIC SMALL LETTER I
0x00d9: 0x0439, # CYRILLIC SMALL LETTER SHORT I
0x00da: 0x043a, # CYRILLIC SMALL LETTER KA
0x00db: 0x043b, # CYRILLIC SMALL LETTER EL
0x00dc: 0x043c, # CYRILLIC SMALL LETTER EM
0x00dd: 0x043d, # CYRILLIC SMALL LETTER EN
0x00de: 0x043e, # CYRILLIC SMALL LETTER O
0x00df: 0x043f, # CYRILLIC SMALL LETTER PE
0x00e0: 0x0440, # CYRILLIC SMALL LETTER ER
0x00e1: 0x0441, # CYRILLIC SMALL LETTER ES
0x00e2: 0x0442, # CYRILLIC SMALL LETTER TE
0x00e3: 0x0443, # CYRILLIC SMALL LETTER U
0x00e4: 0x0444, # CYRILLIC SMALL LETTER EF
0x00e5: 0x0445, # CYRILLIC SMALL LETTER HA
0x00e6: 0x0446, # CYRILLIC SMALL LETTER TSE
0x00e7: 0x0447, # CYRILLIC SMALL LETTER CHE
0x00e8: 0x0448, # CYRILLIC SMALL LETTER SHA
0x00e9: 0x0449, # CYRILLIC SMALL LETTER SHCHA
0x00ea: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN
0x00eb: 0x044b, # CYRILLIC SMALL LETTER YERU
0x00ec: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN
0x00ed: 0x044d, # CYRILLIC SMALL LETTER E
0x00ee: 0x044e, # CYRILLIC SMALL LETTER YU
0x00ef: 0x044f, # CYRILLIC SMALL LETTER YA
0x00f0: 0x2116, # NUMERO SIGN
0x00f1: 0x0451, # CYRILLIC SMALL LETTER IO
0x00f2: 0x0452, # CYRILLIC SMALL LETTER DJE
0x00f3: 0x0453, # CYRILLIC SMALL LETTER GJE
0x00f4: 0x0454, # CYRILLIC SMALL LETTER UKRAINIAN IE
0x00f5: 0x0455, # CYRILLIC SMALL LETTER DZE
0x00f6: 0x0456, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
0x00f7: 0x0457, # CYRILLIC SMALL LETTER YI
0x00f8: 0x0458, # CYRILLIC SMALL LETTER JE
0x00f9: 0x0459, # CYRILLIC SMALL LETTER LJE
0x00fa: 0x045a, # CYRILLIC SMALL LETTER NJE
0x00fb: 0x045b, # CYRILLIC SMALL LETTER TSHE
0x00fc: 0x045c, # CYRILLIC SMALL LETTER KJE
0x00fd: 0x00a7, # SECTION SIGN
0x00fe: 0x045e, # CYRILLIC SMALL LETTER SHORT U
0x00ff: 0x045f, # CYRILLIC SMALL LETTER DZHE
})
### Encoding Map
encoding_map = {}
for k,v in decoding_map.items():
encoding_map[v] = k
| mit |
krishnazure/Flask | Work/TriviaMVA/TriviaMVA/env/Lib/site-packages/pip/_vendor/requests/packages/urllib3/exceptions.py | 330 | 3364 | # urllib3/exceptions.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
## Base Exceptions
class HTTPError(Exception):
"Base exception used by this module."
pass
class PoolError(HTTPError):
"Base exception for errors caused within a pool."
def __init__(self, pool, message):
self.pool = pool
HTTPError.__init__(self, "%s: %s" % (pool, message))
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, None)
class RequestError(PoolError):
"Base exception for PoolErrors that have associated URLs."
def __init__(self, pool, url, message):
self.url = url
PoolError.__init__(self, pool, message)
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, self.url, None)
class SSLError(HTTPError):
"Raised when SSL certificate fails in an HTTPS connection."
pass
class ProxyError(HTTPError):
"Raised when the connection to a proxy fails."
pass
class ConnectionError(HTTPError):
"Raised when a normal connection fails."
pass
class DecodeError(HTTPError):
"Raised when automatic decoding based on Content-Type fails."
pass
## Leaf Exceptions
class MaxRetryError(RequestError):
"Raised when the maximum number of retries is exceeded."
def __init__(self, pool, url, reason=None):
self.reason = reason
message = "Max retries exceeded with url: %s" % url
if reason:
message += " (Caused by %s: %s)" % (type(reason), reason)
else:
message += " (Caused by redirect)"
RequestError.__init__(self, pool, url, message)
class HostChangedError(RequestError):
"Raised when an existing pool gets a request for a foreign host."
def __init__(self, pool, url, retries=3):
message = "Tried to open a foreign host with url: %s" % url
RequestError.__init__(self, pool, url, message)
self.retries = retries
class TimeoutStateError(HTTPError):
""" Raised when passing an invalid state to a timeout """
pass
class TimeoutError(HTTPError):
""" Raised when a socket timeout error occurs.
Catching this error will catch both :exc:`ReadTimeoutErrors
<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
"""
pass
class ReadTimeoutError(TimeoutError, RequestError):
"Raised when a socket timeout occurs while receiving data from a server"
pass
# This timeout error does not have a URL attached and needs to inherit from the
# base HTTPError
class ConnectTimeoutError(TimeoutError):
"Raised when a socket timeout occurs while connecting to a server"
pass
class EmptyPoolError(PoolError):
"Raised when a pool runs out of connections and no more are allowed."
pass
class ClosedPoolError(PoolError):
"Raised when a request enters a pool after the pool has been closed."
pass
class LocationParseError(ValueError, HTTPError):
"Raised when get_host or similar fails to parse the URL input."
def __init__(self, location):
message = "Failed to parse: %s" % location
HTTPError.__init__(self, message)
self.location = location
| apache-2.0 |
nodejs/node-gyp | gyp/pylib/gyp/flock_tool.py | 3 | 1859 | #!/usr/bin/env python3
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""These functions are executed via gyp-flock-tool when using the Makefile
generator. Used on systems that don't have a built-in flock."""
import fcntl
import os
import struct
import subprocess
import sys
def main(args):
executor = FlockTool()
executor.Dispatch(args)
class FlockTool:
"""This class emulates the 'flock' command."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace("-", "")
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
# Note that the stock python on SunOS has a bug
# where fcntl.flock(fd, LOCK_EX) always fails
# with EBADF, that's why we use this F_SETLK
# hack instead.
fd = os.open(lockfile, os.O_WRONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
if sys.platform.startswith("aix"):
# Python on AIX is compiled with LARGEFILE support, which changes the
# struct size.
op = struct.pack("hhIllqq", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
else:
op = struct.pack("hhllhhl", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
fcntl.fcntl(fd, fcntl.F_SETLK, op)
return subprocess.call(cmd_list)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| mit |
arrabito/DIRAC | Core/DISET/private/GatewayService.py | 4 | 20150 | """ The gateway service is used for forwarding service calls to the appropriate services.
For this to be used, the following CS option is required:
DIRAC
{
Gateways
{
my.site.org = dips://thisIsAn.url.org:9159/Framework/Gateway
}
}
At the same time, this same gateway service should be run with option /LocalInstallation/Site
which is different from "my.site.org" or whatever is set in the option above, to avoid initialization loops.
"""
__RCSID__ = "$id:"
import sys
import cStringIO
import DIRAC
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.Utilities.LockRing import LockRing
from DIRAC.Core.Utilities.DictCache import DictCache
from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData
from DIRAC.Core.DISET.private.FileHelper import FileHelper
from DIRAC.Core.DISET.private.MessageBroker import MessageBroker, getGlobalMessageBroker
from DIRAC.Core.DISET.MessageClient import MessageClient
from DIRAC.Core.Security.X509Chain import X509Chain
from DIRAC.Core.Utilities.ThreadPool import ThreadPool
from DIRAC.Core.DISET.private.Service import Service
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.Core.DISET.TransferClient import TransferClient
from DIRAC.Core.DISET.private.BaseClient import BaseClient
class GatewayService( Service ):
""" Inherits from Service so it can (and should) be run as a DIRAC service,
but replaces several of the internal methods
"""
GATEWAY_NAME = "Framework/Gateway"
def __init__( self ):
""" Initialize like a real service
"""
super(GatewayService, self).__init__(
{'modName':GatewayService.GATEWAY_NAME,
'loadName':GatewayService.GATEWAY_NAME,
'standalone': True,
'moduleObj': sys.modules[DIRAC.Core.DISET.private.GatewayService.GatewayService.__module__],
'classObj': self.__class__} )
self.__delegatedCredentials = DictCache()
self.__transferBytesLimit = 1024 * 1024 * 100
# to be resolved
self._url = None
self._handler = None
self._threadPool = None
self._msgBroker = None
self._msgForwarder = None
def initialize( self ):
""" This replaces the standard initialize from Service
"""
#Build the URLs
self._url = self._cfg.getURL()
if not self._url:
return S_ERROR( "Could not build service URL for %s" % GatewayService.GATEWAY_NAME )
gLogger.verbose( "Service URL is %s" % self._url )
#Load handler
result = self._loadHandlerInit()
if not result[ 'OK' ]:
return result
self._handler = result[ 'Value' ]
#Discover Handler
self._threadPool = ThreadPool( 1,
max( 0, self._cfg.getMaxThreads() ),
self._cfg.getMaxWaitingPetitions() )
self._threadPool.daemonize()
self._msgBroker = MessageBroker( "%sMSB" % GatewayService.GATEWAY_NAME, threadPool = self._threadPool )
self._msgBroker.useMessageObjects( False )
getGlobalMessageBroker().useMessageObjects( False )
self._msgForwarder = MessageForwarder( self._msgBroker )
return S_OK()
def _processInThread( self, clientTransport ):
""" Threaded process function
"""
#Handshake
try:
clientTransport.handshake()
except:
return
#Add to the transport pool
trid = self._transportPool.add( clientTransport )
if not trid:
return
#Receive and check proposal
result = self._receiveAndCheckProposal( trid )
if not result[ 'OK' ]:
self._transportPool.sendAndClose( trid, result )
return
proposalTuple = result[ 'Value' ]
#Instantiate handler
result = self.__getClientInitArgs( trid, proposalTuple )
if not result[ 'OK' ]:
self._transportPool.sendAndClose( trid, result )
return
clientInitArgs = result[ 'Value' ]
#Execute the action
result = self._processProposal( trid, proposalTuple, clientInitArgs )
#Close the connection if required
if result[ 'closeTransport' ]:
self._transportPool.close( trid )
return result
def _receiveAndCheckProposal( self, trid ):
clientTransport = self._transportPool.get( trid )
#Get the peer credentials
credDict = clientTransport.getConnectingCredentials()
#Receive the action proposal
retVal = clientTransport.receiveData( 1024 )
if not retVal[ 'OK' ]:
gLogger.error( "Invalid action proposal", "%s %s" % ( self._createIdentityString( credDict,
clientTransport ),
retVal[ 'Message' ] ) )
return S_ERROR( "Invalid action proposal" )
proposalTuple = retVal[ 'Value' ]
gLogger.debug( "Received action from client", "/".join( list( proposalTuple[1] ) ) )
#Check if there are extra credentials
if proposalTuple[2]:
clientTransport.setExtraCredentials( proposalTuple[2] )
return S_OK( proposalTuple )
def __getClientInitArgs( self, trid, proposalTuple ):
clientTransport = self._transportPool.get( trid )
#Get the peer credentials
credDict = clientTransport.getConnectingCredentials()
if 'x509Chain' not in credDict:
return S_OK()
cKey = ( credDict[ 'DN' ],
credDict.get( 'group', False ),
credDict.get( 'extraCredentials', False ),
credDict[ 'isLimitedProxy' ] )
dP = self.__delegatedCredentials.get( cKey, 3600 )
idString = self._createIdentityString( credDict, clientTransport )
if dP:
gLogger.verbose( "Proxy for %s is cached" % idString )
return S_OK( dP )
result = self.__requestDelegation( clientTransport, credDict )
if not result[ 'OK' ]:
gLogger.warn( "Could not get proxy for %s: %s" % ( idString, result[ 'Message' ] ) )
return result
delChain = result[ 'Value' ]
delegatedChain = delChain.dumpAllToString()[ 'Value' ]
secsLeft = delChain.getRemainingSecs()[ 'Value' ] - 1
clientInitArgs = { BaseClient.KW_SETUP : proposalTuple[0][1],
BaseClient.KW_TIMEOUT : 600,
BaseClient.KW_IGNORE_GATEWAYS : True,
BaseClient.KW_USE_CERTIFICATES : False,
BaseClient.KW_PROXY_STRING : delegatedChain
}
if BaseClient.KW_EXTRA_CREDENTIALS in credDict:
clientInitArgs[ BaseClient.KW_EXTRA_CREDENTIALS ] = credDict[ BaseClient.KW_EXTRA_CREDENTIALS ]
gLogger.warn( "Got delegated proxy for %s: %s secs left" % ( idString, secsLeft ) )
self.__delegatedCredentials.add( cKey, secsLeft, clientInitArgs )
return S_OK( clientInitArgs )
def __requestDelegation( self, clientTransport, credDict ):
peerChain = credDict[ 'x509Chain' ]
retVal = peerChain.getCertInChain()[ 'Value' ].generateProxyRequest()
if not retVal[ 'OK' ]:
return retVal
delegationRequest = retVal[ 'Value' ]
retVal = delegationRequest.dumpRequest()
if not retVal[ 'OK' ]:
retVal = S_ERROR( "Server Error: Can't generate delegation request" )
clientTransport.sendData( retVal )
return retVal
gLogger.info( "Sending delegation request for %s" % delegationRequest.getSubjectDN()[ 'Value' ] )
clientTransport.sendData( S_OK( { 'delegate' : retVal[ 'Value' ] } ) )
delegatedCertChain = clientTransport.receiveData()
delegatedChain = X509Chain( keyObj = delegationRequest.getPKey() )
retVal = delegatedChain.loadChainFromString( delegatedCertChain )
if not retVal[ 'OK' ]:
retVal = S_ERROR( "Error in receiving delegated proxy: %s" % retVal[ 'Message' ] )
clientTransport.sendData( retVal )
return retVal
return S_OK( delegatedChain )
#Msg
def _mbConnect( self, trid, handlerObj = None ):
return S_OK()
def _mbReceivedMsg( self, cliTrid, msgObj ):
return self._msgForwarder.msgFromClient( cliTrid, msgObj )
def _mbDisconnect( self, cliTrid ):
self._msgForwarder.cliDisconnect( cliTrid )
#Execute action
def _executeAction( self, trid, proposalTuple, clientInitArgs ):
clientTransport = self._transportPool.get( trid )
credDict = clientTransport.getConnectingCredentials()
targetService = proposalTuple[0][0]
actionType = proposalTuple[1][0]
actionMethod = proposalTuple[1][1]
idString = self._createIdentityString( credDict, clientTransport )
#OOkay! Lets do the magic!
retVal = clientTransport.receiveData()
if not retVal[ 'OK' ]:
gLogger.error( "Error while receiving file description", retVal[ 'Message' ] )
clientTransport.sendData( S_ERROR( "Error while receiving file description: %s" % retVal[ 'Message' ] ) )
return
if actionType == "FileTransfer":
gLogger.warn( "Received a file transfer action from %s" % idString )
clientTransport.sendData( S_OK( "Accepted" ) )
retVal = self.__forwardFileTransferCall( targetService, clientInitArgs,
actionMethod, retVal[ 'Value' ], clientTransport )
elif actionType == "RPC":
gLogger.info( "Forwarding %s/%s action to %s for %s" % ( actionType, actionMethod, targetService, idString ) )
retVal = self.__forwardRPCCall( targetService, clientInitArgs, actionMethod, retVal[ 'Value' ] )
elif actionType == "Connection" and actionMethod == "new":
gLogger.info( "Initiating a messaging connection to %s for %s" % ( targetService, idString ) )
retVal = self._msgForwarder.addClient( trid, targetService, clientInitArgs, retVal[ 'Value' ] )
else:
gLogger.warn( "Received an invalid %s/%s action from %s" % ( actionType, actionMethod, idString ) )
retVal = S_ERROR( "Unknown type of action (%s)" % actionType )
#TODO: Send back the data?
if 'rpcStub' in retVal:
retVal.pop( 'rpcStub' )
clientTransport.sendData( retVal )
return retVal
def __forwardRPCCall( self, targetService, clientInitArgs, method, params ):
if targetService == "Configuration/Server":
if method == "getCompressedDataIfNewer":
#Relay CS data directly
serviceVersion = gConfigurationData.getVersion()
retDict = { 'newestVersion' : serviceVersion }
clientVersion = params[0]
if clientVersion < serviceVersion:
retDict[ 'data' ] = gConfigurationData.getCompressedData()
return S_OK( retDict )
#Default
rpcClient = RPCClient( targetService, **clientInitArgs )
methodObj = getattr( rpcClient, method )
return methodObj( *params )
def __forwardFileTransferCall( self, targetService, clientInitArgs, method,
params, clientTransport ):
transferRelay = TransferRelay( targetService, **clientInitArgs )
transferRelay.setTransferLimit( self.__transferBytesLimit )
cliFH = FileHelper( clientTransport )
#Check file size
if method.find( "ToClient" ) > -1:
cliFH.setDirection( "send" )
elif method.find( "FromClient" ) > -1:
cliFH.setDirection( "receive" )
if not self.__ftCheckMaxTransferSize( params[2] ):
cliFH.markAsTransferred()
return S_ERROR( "Transfer size is too big" )
#Forward queries
try:
relayMethodObject = getattr( transferRelay, 'forward%s' % method )
except:
return S_ERROR( "Cannot forward unknown method %s" % method )
result = relayMethodObject( cliFH, params )
return result
def __ftCheckMaxTransferSize( self, requestedTransferSize ):
if not self.__transferBytesLimit:
return True
if not requestedTransferSize:
return True
if requestedTransferSize <= self.__transferBytesLimit:
return True
return False
class TransferRelay( TransferClient ):
def setTransferLimit( self, trLimit ):
self.__transferBytesLimit = trLimit
self.__currentMethod = ""
def infoMsg( self, msg, dynMsg = "" ):
gLogger.info( "[%s] %s" % ( self.__currentMethod, msg ), dynMsg )
def errMsg( self, msg, dynMsg = "" ):
gLogger.error( "[%s] %s" % ( self.__currentMethod, msg ), dynMsg )
def getDataFromClient( self, clientFileHelper ):
sIO = cStringIO.StringIO()
self.infoMsg( "About to get data from client" )
result = clientFileHelper.networkToDataSink( sIO, self.__transferBytesLimit )
if not result[ 'OK' ]:
sIO.close()
self.errMsg( "Could not get data from client", result[ 'Message' ] )
return result
data = sIO.getvalue()
sIO.close()
self.infoMsg( "Got %s bytes from client" % len( data ) )
return S_OK( data )
def sendDataToClient( self, clientFileHelper, dataToSend ):
self.infoMsg( "About to get send data to client" )
result = clientFileHelper.BufferToNetwork( dataToSend )
if not result[ 'OK' ]:
self.errMsg( "Could not send data to client", result[ 'Message' ] )
return result
self.infoMsg( "Sent %s bytes from client" % len( dataToSend ) )
return S_OK()
def sendDataToService( self, srvMethod, params, data ):
self.infoMsg( "Sending header request to %s" % self.getDestinationService(), str( params ) )
result = self._sendTransferHeader( srvMethod, params )
if not result[ 'OK' ]:
self.errMsg( "Could not send header", result[ 'Message' ] )
return result
self.infoMsg( "Starting to send data to service" )
trid, srvTransport = result[ 'Value' ]
srvFileHelper = FileHelper( srvTransport )
srvFileHelper.setDirection( "send" )
result = srvFileHelper.BufferToNetwork( data )
if not result[ 'OK' ]:
self.errMsg( "Could send data to server", result[ 'Message' ] )
srvTransport.close()
return result
self.infoMsg( "Data sent to service (%s bytes)" % len( data ) )
retVal = srvTransport.receiveData()
srvTransport.close()
return retVal
def getDataFromService( self, srvMethod, params ):
self.infoMsg( "Sending header request to %s" % self.getDestinationService(), str( params ) )
result = self._sendTransferHeader( srvMethod, params )
if not result[ 'OK' ]:
self.errMsg( "Could not send header", result[ 'Message' ] )
return result
self.infoMsg( "Starting to receive data from service" )
trid, srvTransport = result[ 'Value' ]
srvFileHelper = FileHelper( srvTransport )
srvFileHelper.setDirection( "receive" )
sIO = cStringIO.StringIO()
result = srvFileHelper.networkToDataSink( sIO, self.__transferBytesLimit )
if not result[ 'OK' ]:
self.errMsg( "Could not receive data from server", result[ 'Message' ] )
srvTransport.close()
sIO.close()
return result
dataReceived = sIO.getvalue()
sIO.close()
self.infoMsg( "Received %s bytes from service" % len( dataReceived ) )
retVal = srvTransport.receiveData()
srvTransport.close()
if not retVal[ 'OK' ]:
return retVal
return S_OK( { 'data' : dataReceived, 'srvResponse' : retVal } )
def forwardFromClient( self, clientFileHelper, params ):
fileId, token = params[:2]
self.__currentMethod = "FromClient"
result = self.getDataFromClient( clientFileHelper )
if not result[ 'OK' ]:
return result
dataReceived = result[ 'Value' ]
receivedBytes = clientFileHelper.getTransferedBytes()
return self.sendDataToService( "FromClient", ( fileId, token, receivedBytes ), dataReceived )
def forwardBulkFromClient( self, clientFileHelper, params ):
fileId, token = params[:2]
self.__currentMethod = "BulkFromClient"
result = self.getDataFromClient( clientFileHelper )
if not result[ 'OK' ]:
return result
dataReceived = result[ 'Value' ]
receivedBytes = clientFileHelper.getTransferedBytes()
return self.sendDataToService( "BulkFromClient", ( fileId, token, receivedBytes ), dataReceived )
def forwardToClient( self, clientFileHelper, params ):
fileId, token = params[:2]
self.__currentMethod = "ToClient"
result = self.getDataFromService( "ToClient", ( fileId, token ) )
if not result[ 'OK' ]:
return result
dataReceived = result[ 'Value' ][ 'data' ]
srvResponse = result[ 'Value' ][ 'srvResponse' ]
result = self.sendDataToClient( clientFileHelper, dataReceived )
if not result[ 'OK' ]:
return result
return srvResponse
def forwardBulkToClient( self, clientFileHelper, params ):
fileId, token = params[:2]
self.__currentMethod = "BulkToClient"
result = self.getDataFromService( "BulkToClient", ( fileId, token ) )
if not result[ 'OK' ]:
return result
dataReceived = result[ 'Value' ][ 'data' ]
srvResponse = result[ 'Value' ][ 'srvResponse' ]
result = self.sendDataToClient( clientFileHelper, dataReceived )
if not result[ 'OK' ]:
return result
return srvResponse
def forwardListBulk( self, clientFileHelper, params ):
self.__currentMethod = "ListBulk"
self.infoMsg( "Sending header request to %s" % self.getDestinationService(), str( params ) )
result = self._sendTransferHeader( "ListBulk", params )
if not result[ 'OK' ]:
self.errMsg( "Could not send header", result[ 'Message' ] )
return result
trid, srvTransport = result[ 'Value' ]
response = srvTransport.receiveData( 1048576 )
srvTransport.close()
self.infoMsg( "Sending data back to client" )
return response
class MessageForwarder(object):
def __init__( self, msgBroker ):
self.__inOutLock = LockRing().getLock()
self.__msgBroker = msgBroker
self.__byClient = {}
self.__srvToCliTrid = {}
def addClient( self, cliTrid, destination, clientInitParams, connectParams ):
if cliTrid in self.__byClient:
gLogger.fatal( "Trid is duplicated!! this shouldn't happen" )
return
msgClient = MessageClient( destination, **clientInitParams )
msgClient.subscribeToDisconnect( self.__srvDisconnect )
msgClient.subscribeToAllMessages( self.msgFromSrv )
msgClient.setUniqueName( connectParams[0] )
result = msgClient.connect( **connectParams[1] )
if not result[ 'OK' ]:
return result
self.__inOutLock.acquire()
try:
self.__byClient[ cliTrid ] = { 'srvEnd' : msgClient,
'srvTrid' : msgClient.getTrid(),
'srvName' : destination }
self.__srvToCliTrid[ msgClient.getTrid() ] = cliTrid
finally:
self.__inOutLock.release()
return result
def __srvDisconnect( self, srvEndCli ):
try:
cliTrid = self.__srvToCliTrid[ srvEndCli.getTrid() ]
except IndexError:
gLogger.exception( "This shouldn't happen!" )
gLogger.info( "Service %s disconnected messaging connection" % self.__byClient[ cliTrid ][ 'srvName' ] )
self.__msgBroker.removeTransport( cliTrid )
self.__removeClient( cliTrid )
def cliDisconnect( self, cliTrid ):
if cliTrid not in self.__byClient:
gLogger.fatal( "This shouldn't happen!" )
return
gLogger.info( "Client to %s disconnected messaging connection" % self.__byClient[ cliTrid ][ 'srvName' ] )
self.__byClient[ cliTrid ][ 'srvEnd' ].disconnect()
self.__removeClient( cliTrid )
def __removeClient( self, cliTrid ):
self.__inOutLock.acquire()
try:
try:
srvTrid = self.__byClient[ cliTrid ][ 'srvTrid' ]
self.__byClient.pop( cliTrid )
self.__srvToCliTrid.pop( srvTrid )
except Exception as e:
gLogger.exception( "This shouldn't happen!" )
finally:
self.__inOutLock.release()
def msgFromClient( self, cliTrid, msgObj ):
gLogger.info( "Message %s to %s service" % ( msgObj.getName(), self.__byClient[ cliTrid ][ 'srvName' ] ) )
result = self.__byClient[ cliTrid ][ 'srvEnd' ].sendMessage( msgObj )
return result
def msgFromSrv( self, srvEndCli, msgObj ):
try:
cliTrid = self.__srvToCliTrid[ srvEndCli.getTrid() ]
except:
gLogger.exception( "This shouldn't happen" )
return S_ERROR( "MsgFromSrv -> Mismatched srv2cli trid" )
gLogger.info( "Message %s from %s service" % ( msgObj.getName(), self.__byClient[ cliTrid ][ 'srvName' ] ) )
return self.__msgBroker.sendMessage( cliTrid, msgObj )
| gpl-3.0 |
sauloal/cufflinksviewer | venvwin/Lib/encodings/cp856.py | 93 | 12986 | """ Python Character Mapping Codec cp856 generated from 'MAPPINGS/VENDORS/MISC/CP856.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp856',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u05d0' # 0x80 -> HEBREW LETTER ALEF
u'\u05d1' # 0x81 -> HEBREW LETTER BET
u'\u05d2' # 0x82 -> HEBREW LETTER GIMEL
u'\u05d3' # 0x83 -> HEBREW LETTER DALET
u'\u05d4' # 0x84 -> HEBREW LETTER HE
u'\u05d5' # 0x85 -> HEBREW LETTER VAV
u'\u05d6' # 0x86 -> HEBREW LETTER ZAYIN
u'\u05d7' # 0x87 -> HEBREW LETTER HET
u'\u05d8' # 0x88 -> HEBREW LETTER TET
u'\u05d9' # 0x89 -> HEBREW LETTER YOD
u'\u05da' # 0x8A -> HEBREW LETTER FINAL KAF
u'\u05db' # 0x8B -> HEBREW LETTER KAF
u'\u05dc' # 0x8C -> HEBREW LETTER LAMED
u'\u05dd' # 0x8D -> HEBREW LETTER FINAL MEM
u'\u05de' # 0x8E -> HEBREW LETTER MEM
u'\u05df' # 0x8F -> HEBREW LETTER FINAL NUN
u'\u05e0' # 0x90 -> HEBREW LETTER NUN
u'\u05e1' # 0x91 -> HEBREW LETTER SAMEKH
u'\u05e2' # 0x92 -> HEBREW LETTER AYIN
u'\u05e3' # 0x93 -> HEBREW LETTER FINAL PE
u'\u05e4' # 0x94 -> HEBREW LETTER PE
u'\u05e5' # 0x95 -> HEBREW LETTER FINAL TSADI
u'\u05e6' # 0x96 -> HEBREW LETTER TSADI
u'\u05e7' # 0x97 -> HEBREW LETTER QOF
u'\u05e8' # 0x98 -> HEBREW LETTER RESH
u'\u05e9' # 0x99 -> HEBREW LETTER SHIN
u'\u05ea' # 0x9A -> HEBREW LETTER TAV
u'\ufffe' # 0x9B -> UNDEFINED
u'\xa3' # 0x9C -> POUND SIGN
u'\ufffe' # 0x9D -> UNDEFINED
u'\xd7' # 0x9E -> MULTIPLICATION SIGN
u'\ufffe' # 0x9F -> UNDEFINED
u'\ufffe' # 0xA0 -> UNDEFINED
u'\ufffe' # 0xA1 -> UNDEFINED
u'\ufffe' # 0xA2 -> UNDEFINED
u'\ufffe' # 0xA3 -> UNDEFINED
u'\ufffe' # 0xA4 -> UNDEFINED
u'\ufffe' # 0xA5 -> UNDEFINED
u'\ufffe' # 0xA6 -> UNDEFINED
u'\ufffe' # 0xA7 -> UNDEFINED
u'\ufffe' # 0xA8 -> UNDEFINED
u'\xae' # 0xA9 -> REGISTERED SIGN
u'\xac' # 0xAA -> NOT SIGN
u'\xbd' # 0xAB -> VULGAR FRACTION ONE HALF
u'\xbc' # 0xAC -> VULGAR FRACTION ONE QUARTER
u'\ufffe' # 0xAD -> UNDEFINED
u'\xab' # 0xAE -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xAF -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0xB0 -> LIGHT SHADE
u'\u2592' # 0xB1 -> MEDIUM SHADE
u'\u2593' # 0xB2 -> DARK SHADE
u'\u2502' # 0xB3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0xB4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\ufffe' # 0xB5 -> UNDEFINED
u'\ufffe' # 0xB6 -> UNDEFINED
u'\ufffe' # 0xB7 -> UNDEFINED
u'\xa9' # 0xB8 -> COPYRIGHT SIGN
u'\u2563' # 0xB9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0xBA -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0xBB -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0xBC -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\xa2' # 0xBD -> CENT SIGN
u'\xa5' # 0xBE -> YEN SIGN
u'\u2510' # 0xBF -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0xC0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0xC1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0xC2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0xC3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0xC4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0xC5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\ufffe' # 0xC6 -> UNDEFINED
u'\ufffe' # 0xC7 -> UNDEFINED
u'\u255a' # 0xC8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0xC9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0xCA -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0xCB -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0xCC -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0xCD -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0xCE -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\xa4' # 0xCF -> CURRENCY SIGN
u'\ufffe' # 0xD0 -> UNDEFINED
u'\ufffe' # 0xD1 -> UNDEFINED
u'\ufffe' # 0xD2 -> UNDEFINED
u'\ufffe' # 0xD3 -> UNDEFINEDS
u'\ufffe' # 0xD4 -> UNDEFINED
u'\ufffe' # 0xD5 -> UNDEFINED
u'\ufffe' # 0xD6 -> UNDEFINEDE
u'\ufffe' # 0xD7 -> UNDEFINED
u'\ufffe' # 0xD8 -> UNDEFINED
u'\u2518' # 0xD9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0xDA -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0xDB -> FULL BLOCK
u'\u2584' # 0xDC -> LOWER HALF BLOCK
u'\xa6' # 0xDD -> BROKEN BAR
u'\ufffe' # 0xDE -> UNDEFINED
u'\u2580' # 0xDF -> UPPER HALF BLOCK
u'\ufffe' # 0xE0 -> UNDEFINED
u'\ufffe' # 0xE1 -> UNDEFINED
u'\ufffe' # 0xE2 -> UNDEFINED
u'\ufffe' # 0xE3 -> UNDEFINED
u'\ufffe' # 0xE4 -> UNDEFINED
u'\ufffe' # 0xE5 -> UNDEFINED
u'\xb5' # 0xE6 -> MICRO SIGN
u'\ufffe' # 0xE7 -> UNDEFINED
u'\ufffe' # 0xE8 -> UNDEFINED
u'\ufffe' # 0xE9 -> UNDEFINED
u'\ufffe' # 0xEA -> UNDEFINED
u'\ufffe' # 0xEB -> UNDEFINED
u'\ufffe' # 0xEC -> UNDEFINED
u'\ufffe' # 0xED -> UNDEFINED
u'\xaf' # 0xEE -> MACRON
u'\xb4' # 0xEF -> ACUTE ACCENT
u'\xad' # 0xF0 -> SOFT HYPHEN
u'\xb1' # 0xF1 -> PLUS-MINUS SIGN
u'\u2017' # 0xF2 -> DOUBLE LOW LINE
u'\xbe' # 0xF3 -> VULGAR FRACTION THREE QUARTERS
u'\xb6' # 0xF4 -> PILCROW SIGN
u'\xa7' # 0xF5 -> SECTION SIGN
u'\xf7' # 0xF6 -> DIVISION SIGN
u'\xb8' # 0xF7 -> CEDILLA
u'\xb0' # 0xF8 -> DEGREE SIGN
u'\xa8' # 0xF9 -> DIAERESIS
u'\xb7' # 0xFA -> MIDDLE DOT
u'\xb9' # 0xFB -> SUPERSCRIPT ONE
u'\xb3' # 0xFC -> SUPERSCRIPT THREE
u'\xb2' # 0xFD -> SUPERSCRIPT TWO
u'\u25a0' # 0xFE -> BLACK SQUARE
u'\xa0' # 0xFF -> NO-BREAK SPACE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
oopy/micropython | tests/float/math_fun.py | 24 | 2538 | # Tests the functions imported from math
try:
from math import *
except ImportError:
print("SKIP")
raise SystemExit
test_values = [-100., -1.23456, -1, -0.5, 0.0, 0.5, 1.23456, 100.]
test_values_small = [-10., -1.23456, -1, -0.5, 0.0, 0.5, 1.23456, 10.] # so we don't overflow 32-bit precision
unit_range_test_values = [-1., -0.75, -0.5, -0.25, 0., 0.25, 0.5, 0.75, 1.]
functions = [('sqrt', sqrt, test_values),
('exp', exp, test_values_small),
('log', log, test_values),
('cos', cos, test_values),
('sin', sin, test_values),
('tan', tan, test_values),
('acos', acos, unit_range_test_values),
('asin', asin, unit_range_test_values),
('atan', atan, test_values),
('ceil', ceil, test_values),
('fabs', fabs, test_values),
('floor', floor, test_values),
('trunc', trunc, test_values),
('radians', radians, test_values),
('degrees', degrees, test_values),
]
for function_name, function, test_vals in functions:
print(function_name)
for value in test_vals:
try:
print("{:.5g}".format(function(value)))
except ValueError as e:
print(str(e))
tuple_functions = [('frexp', frexp, test_values),
('modf', modf, test_values),
]
for function_name, function, test_vals in tuple_functions:
print(function_name)
for value in test_vals:
x, y = function(value)
print("{:.5g} {:.5g}".format(x, y))
binary_functions = [('copysign', copysign, [(23., 42.), (-23., 42.), (23., -42.),
(-23., -42.), (1., 0.0), (1., -0.0)]),
('pow', pow, ((1., 0.), (0., 1.), (2., 0.5), (-3., 5.), (-3., -4.),)),
('atan2', atan2, ((1., 0.), (0., 1.), (2., 0.5), (-3., 5.), (-3., -4.),)),
('fmod', fmod, ((1., 1.), (0., 1.), (2., 0.5), (-3., 5.), (-3., -4.),)),
('ldexp', ldexp, ((1., 0), (0., 1), (2., 2), (3., -2), (-3., -4),)),
('log', log, ((2., 2.), (3., 2.), (4., 5.), (0., 1.), (1., 0.), (-1., 1.), (1., -1.), (2., 1.))),
]
for function_name, function, test_vals in binary_functions:
print(function_name)
for value1, value2 in test_vals:
try:
print("{:.5g}".format(function(value1, value2)))
except (ValueError, ZeroDivisionError) as e:
print(type(e))
| mit |
zrax/moul-scripts | Python/clftYeeshaPage08.py | 6 | 9539 | # -*- coding: utf-8 -*-
""" *==LICENSE==*
CyanWorlds.com Engine - MMOG client, server and tools
Copyright (C) 2011 Cyan Worlds, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Additional permissions under GNU GPL version 3 section 7
If you modify this Program, or any covered work, by linking or
combining it with any of RAD Game Tools Bink SDK, Autodesk 3ds Max SDK,
NVIDIA PhysX SDK, Microsoft DirectX SDK, OpenSSL library, Independent
JPEG Group JPEG library, Microsoft Windows Media SDK, or Apple QuickTime SDK
(or a modified version of those libraries),
containing parts covered by the terms of the Bink SDK EULA, 3ds Max EULA,
PhysX SDK EULA, DirectX SDK EULA, OpenSSL and SSLeay licenses, IJG
JPEG Library README, Windows Media SDK EULA, or QuickTime SDK EULA, the
licensors of this Program grant you additional
permission to convey the resulting work. Corresponding Source for a
non-source form of such a combination shall include the source code for
the parts of OpenSSL and IJG JPEG Library used as well as that of the covered
work.
You can contact Cyan Worlds, Inc. by email [email protected]
or by snail mail at:
Cyan Worlds, Inc.
14617 N Newport Hwy
Mead, WA 99021
*==LICENSE==* """
"""
Module: clftYeeshaPage08
Age: Cleft
Date: May 2003
Author: Adam Van Ornum
Manages and records the finding of Yeesha Pages
!!! NOTE: This file only applies to the cleft but is only used in the global xYeeshaPagesGUI.max file. !!!
"""
from Plasma import *
from PlasmaTypes import *
from PlasmaKITypes import *
from PlasmaVaultConstants import *
from PlasmaNetConstants import *
# define the attributes that will be entered in max
actClickableBook = ptAttribNamedActivator(1,"Act: Clickable Yeesha Page")
GUIDialogObject = ptAttribSceneobject(2, "GUIDialog scene object")
RespOpen = ptAttribResponder(3, "Open Responder")
RespLoop = ptAttribResponder(4, "Loop Responder")
RespClose = ptAttribResponder(5, "Close Responder")
#Linking Books GUI tags
DialogName="YeeshaPageGUI"
kPageButton = 100
kYeeshaPage01 = 201
kYeeshaPage02 = 202
kYeeshaPage03 = 203
kYeeshaPage04 = 204
kYeeshaPage05 = 205
kYeeshaPage06 = 206
kYeeshaPage07 = 207
kYeeshaPage08 = 208
kYeeshaPage09 = 209
kYeeshaPage10 = 210
kYeeshaPage12 = 212
kYeeshaPage13 = 213
kYeeshaPage14 = 214
kYeeshaPage15 = 215
kYeeshaPage16 = 216
kYeeshaPage17 = 217
kYeeshaPage18 = 218
kYeeshaPage19 = 219
kYeeshaPage20 = 220
kYeeshaPage21 = 221
kYeeshaPage22 = 222
kYeeshaPage23 = 223
kYeeshaPage24 = 224
kYeeshaPage25 = 225
kYeeshaPageCancel = 299
isOpen = 0
class clftYeeshaPage08(ptModifier):
"The Yeesha Page 08 cleft imager python code"
def __init__(self):
ptModifier.__init__(self)
self.id = 5312
self.version = 1
print "__init__clftYeeshaPage08 v.", self.version
def OnFirstUpdate(self):
PtLoadDialog(DialogName, self.key)
pass
def __del__(self):
"destructor - get rid of any dialogs that we might have loaded"
#~ PtUnloadDialog(DialogName)
def OnNotify(self,state,id,events):
global LocalAvatar
global isOpen
if id == actClickableBook.id and state and PtWasLocallyNotified(self.key):
#if not PtIsDialogLoaded(DialogName):
# PtLoadDialog(DialogName,self.key)
self.SetStdGUIVisibility(0)
PtShowDialog(DialogName)
RespOpen.run(self.key)
isOpen = 1
elif id == actClickableBook.id and not state and PtWasLocallyNotified(self.key):
if not isOpen:
self.SetStdGUIVisibility(0)
PtShowDialog(DialogName)
RespOpen.run(self.key)
isOpen = 1
elif id == RespOpen.id:
RespLoop.run(self.key)
def OnGUINotify(self,id,control,event):
global isOpen
btnID = 0
if isinstance(control,ptGUIControlButton):
btnID = control.getTagID()
if event == kShowHide:
if control.isEnabled():
#control.show()
if self.GotPage():
mydialog = PtGetDialogFromString(DialogName)
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage08)).disable()
elif event == kAction and btnID == kYeeshaPage08:
PtDebugPrint("DEBUG: clftYeeshaPage08.OnGUINotify():\tPicked up page")
RespClose.run(self.key)
isOpen = 0
PtHideDialog(DialogName)
self.SetStdGUIVisibility(1)
if self.GotPage():
PtDebugPrint ("DEBUG: clftYeeshaPage08.py: You've already found Yeesha Page #8. Move along. Move along.")
return
else:
PtDebugPrint ("DEBUG: clftYeeshaPage08.py: Yeesha Page #8 is new to you.")
PtDebugPrint ("DEBUG: clftYeeshaPage08.py: Trying to update the value of the SDL variable %s to 1" % ("YeeshaPage8"))
vault = ptVault()
if type(vault) != type(None): #is the Vault online?
psnlSDL = vault.getPsnlAgeSDL()
if psnlSDL:
YeeshaPageVar = psnlSDL.findVar("YeeshaPage8")
YeeshaPageVar.setInt(1)
vault.updatePsnlAgeSDL (psnlSDL)
mydialog = PtGetDialogFromString(DialogName)
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage08)).disable()
PtSendKIMessageInt(kStartBookAlert,0)
elif event == kAction and btnID == kYeeshaPageCancel:
RespClose.run(self.key)
isOpen = 0
PtHideDialog(DialogName)
self.SetStdGUIVisibility(1)
def GotPage(self):
vault = ptVault()
if type(vault) != type(None): #is the Vault online?
psnlSDL = vault.getPsnlAgeSDL()
if psnlSDL:
YeeshaPageVar = psnlSDL.findVar("YeeshaPage8")
PtDebugPrint ("DEBUG: clftYeeshaPage08.py: The previous value of the SDL variable %s is %s" % ("YeeshaPage8", YeeshaPageVar.getInt()))
if YeeshaPageVar.getInt() != 0:
PtDebugPrint ("DEBUG: clftYeeshaPage08.py: You've already found Yeesha Page #8. Move along. Move along.")
return 1
else:
return 0
else:
PtDebugPrint("ERROR: clftYeeshaPage08: Error trying to access the Chronicle psnlSDL. psnlSDL = %s" % ( psnlSDL))
return 0
else:
PtDebugPrint("ERROR: clftYeeshaPage08: Error trying to access the Vault. Can't access YeeshaPageChanges chronicle." )
return 0
def SetStdGUIVisibility(self, visible):
global DialogName
if visible:
GUIDialogObject.value.draw.enable()
else:
mydialog = PtGetDialogFromString(DialogName)
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage01)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage02)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage03)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage04)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage05)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage06)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage07)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage09)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage10)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage12)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage13)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage14)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage15)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage16)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage17)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage18)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage19)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage20)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage21)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage22)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage23)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage24)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage25)).hide()
ptGUIControlButton(mydialog.getControlFromTag(kYeeshaPage08)).show()
GUIDialogObject.value.draw.disable() | gpl-3.0 |
leon-github/cloudlight | mars/common/log.py | 1 | 17223 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 CloudLight, Inc
# All Rights Reserved.
"""Logging handler.
"""
import ConfigParser
import cStringIO
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import sys
import traceback
import json
from oslo.config import cfg
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config',
metavar='PATH',
help='If this option is specified, the logging configuration '
'file specified is used and overrides any other logging '
'options specified. Please see the Python logging module '
'documentation for details on logging configuration '
'files.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s'),
cfg.StrOpt('log-file',
metavar='PATH',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
help='(Optional) The base directory used for relative '
'--log-file paths'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='syslog facility to receive log lines')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error')
]
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user)s %(tenant)s] '
'%(instance)s%(message)s',
help='format string to use for log messages with context'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='format string to use for log messages without context'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='data to append to log format when level is DEBUG'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='prefix each line of exception output with this format'),
cfg.ListOpt('default_log_levels',
default=[
'amqplib=WARN',
'sqlalchemy=WARN',
'boto=WARN',
'suds=INFO',
'keystone=INFO',
'eventlet.wsgi.server=WARN'
],
help='list of logger=LEVEL pairs'),
cfg.BoolOpt('publish_errors',
default=False,
help='publish error events'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='make deprecations fatal'),
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='If an instance is passed with the log message, format '
'it like this'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='If an instance UUID is passed with the log message, '
'format it like this'),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
else:
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
pass
# context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
else:
instance_uuid = kwargs.pop('instance_uuid', None)
if instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra.update({'instance': instance_extra})
extra.update({"project": self.project})
extra.update({"version": self.version})
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [itertools.ifilter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return json.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(type, value, tb):
extra = {}
if CONF.verbose:
extra['exc_info'] = (type, value, tb)
getLogger(product_name).critical(str(value), **extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config):
try:
logging.config.fileConfig(log_config)
except ConfigParser.Error as exc:
raise LogConfigError(log_config, str(exc))
def setup(product_name):
"""Setup logging."""
if CONF.log_config:
_load_log_config(CONF.log_config)
else:
_setup_logging_from_conf()
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string):
cfg.set_defaults(log_opts,
logging_context_format_string=
logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
def _setup_logging_from_conf():
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
if CONF.use_syslog:
facility = _find_facility_from_conf()
syslog = logging.handlers.SysLogHandler(address='/dev/log',
facility=facility)
log_root.addHandler(syslog)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not CONF.log_file:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
"""
if CONF.publish_errors:
handler = importutils.import_object(
"neutron.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
"""
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
handler.setFormatter(ContextFormatter(datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
level = logging.getLevelName(level_name)
logger = logging.getLogger(mod)
logger.setLevel(level)
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg)
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
"""
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# NOTE(sdague): default the fancier formating params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id', None):
self._fmt = CONF.logging_context_format_string
else:
self._fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
self._fmt += " " + CONF.logging_debug_format_suffix
# Cache this on the record, Logger will respect our formated copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = cStringIO.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))
| apache-2.0 |
liorvh/golismero | tools/theHarvester/lib/graphs.py | 9 | 28558 | """
+-------------------------------------------------------------------+
| H T M L - G R A P H S (v4.8) |
| |
| Copyright Gerd Tentler www.gerd-tentler.de/tools |
| Created: Sep. 17, 2002 Last modified: Feb. 13, 2010 |
+-------------------------------------------------------------------+
| This program may be used and hosted free of charge by anyone for |
| personal purpose as long as this copyright notice remains intact. |
| |
| Obtain permission before selling the code for this program or |
| hosting this software on a commercial website or redistributing |
| this software over the Internet or in any other medium. In all |
| cases copyright must remain intact. |
+-------------------------------------------------------------------+
=====================================================================================================
Example:
import graphs
graph = graphs.BarGraph('hBar')
graph.values = [234, 125, 289, 147, 190]
print graph.create()
Returns HTML code
=====================================================================================================
"""
import re, math
class BarGraph:
"""creates horizontal and vertical bar graphs, progress bars and faders"""
def __init__(self, type = ''):
#----------------------------------------------------------------------------------------------------
# Configuration
#----------------------------------------------------------------------------------------------------
self.type = type and type or 'hBar' # graph type: "hBar", "vBar", "pBar", or "fader"
self.values = [] # graph data: list
self.graphBGColor = '' # graph background color: string
self.graphBorder = '' # graph border: string (CSS-spec: "size style color"; doesn't work with NN4)
self.graphPadding = 0 # graph padding: integer (pixels)
self.titles = [] # titles: array or string with comma-separated values
self.titleColor = 'black' # title font color: string
self.titleBGColor = '#C0E0FF' # title background color: string
self.titleBorder = '2px groove white' # title border: string (CSS specification)
self.titleFont = 'Arial, Helvetica' # title font family: string (CSS specification)
self.titleSize = 12 # title font size: integer (pixels)
self.titleAlign = 'center' # title text align: "left", "center", or "right"
self.titlePadding = 2 # title padding: integer (pixels)
self.labels = [] # label names: list or string with comma-separated values
self.labelColor = 'black' # label font color: string
self.labelBGColor = '#C0E0FF' # label background color: string
self.labelBorder = '2px groove white' # label border: string (CSS-spec: "size style color"; doesn't work with NN4)
self.labelFont = 'Arial, Helvetica' # label font family: string (CSS-spec)
self.labelSize = 12 # label font size: integer (pixels)
self.labelAlign = 'center' # label text align: "left", "center", or "right"
self.labelSpace = 0 # additional space between labels: integer (pixels)
self.barWidth = 20 # bar width: integer (pixels)
self.barLength = 1.0 # bar length ratio: float (from 0.1 to 2.9)
self.barColors = [] # bar colors OR bar images: list or string with comma-separated values
self.barBGColor = '' # bar background color: string
self.barBorder = '2px outset white' # bar border: string (CSS-spec: "size style color"; doesn't work with NN4)
self.barLevelColors = [] # bar level colors: ascending list (bLevel, bColor[,...]); draw bars >= bLevel with bColor
self.showValues = 0 # show values: 0 = % only, 1 = abs. and %, 2 = abs. only, 3 = none
self.baseValue = 0; # base value: integer or float (only hBar and vBar)
self.absValuesColor = 'black' # abs. values font color: string
self.absValuesBGColor = '#C0E0FF' # abs. values background color: string
self.absValuesBorder = '2px groove white' # abs. values border: string (CSS-spec: "size style color"; doesn't work with NN4)
self.absValuesFont = 'Arial, Helvetica' # abs. values font family: string (CSS-spec)
self.absValuesSize = 12 # abs. values font size: integer (pixels)
self.absValuesPrefix = '' # abs. values prefix: string (e.g. "$")
self.absValuesSuffix = '' # abs. values suffix: string (e.g. " kg")
self.percValuesColor = 'black' # perc. values font color: string
self.percValuesFont = 'Arial, Helvetica' # perc. values font family: string (CSS-spec)
self.percValuesSize = 12 # perc. values font size: integer (pixels)
self.percValuesDecimals = 0 # perc. values number of decimals: integer
self.charts = 1 # number of charts: integer
# hBar/vBar only:
self.legend = [] # legend items: list or string with comma-separated values
self.legendColor = 'black' # legend font color: string
self.legendBGColor = '#F0F0F0' # legend background color: string
self.legendBorder = '2px groove white' # legend border: string (CSS-spec: "size style color"; doesn't work with NN4)
self.legendFont = 'Arial, Helvetica' # legend font family: string (CSS-spec)
self.legendSize = 12 # legend font size: integer (pixels)
self.legendAlign = 'top' # legend vertical align: "top", "center", "bottom"
# debug mode: 0 = off, 1 = on; just views some extra information
self.debug = 0
#----------------------------------------------------------------------------------------------------
# default bar colors; only used if barColors isn't set
__colors = ('#0000FF', '#FF0000', '#00E000', '#A0A0FF', '#FFA0A0', '#00A000')
# error messages
__err_type = 'ERROR: Type must be "hBar", "vBar", "pBar", or "fader"'
# CSS names (don't change)
__cssGRAPH = ''
__cssBAR = ''
__cssBARBG = ''
__cssTITLE = ''
__cssLABEL = ''
__cssLABELBG = ''
__cssLEGEND = ''
__cssLEGENDBG = ''
__cssABSVALUES = ''
__cssPERCVALUES = ''
# search pattern for images
__img_pattern = re.compile(r'\.(jpg|jpeg|jpe|gif|png)')
def set_styles(self):
"""set graph styles"""
if self.graphBGColor: self.__cssGRAPH += 'background-color:' + self.graphBGColor + ';'
if self.graphBorder: self.__cssGRAPH += 'border:' + self.graphBorder + ';'
if self.barBorder: self.__cssBAR += 'border:' + self.barBorder + ';'
if self.barBGColor: self.__cssBARBG += 'background-color:' + self.barBGColor + ';'
if self.titleColor: self.__cssTITLE += 'color:' + self.titleColor + ';'
if self.titleBGColor: self.__cssTITLE += 'background-color:' + self.titleBGColor + ';'
if self.titleBorder: self.__cssTITLE += 'border:' + self.titleBorder + ';'
if self.titleFont: self.__cssTITLE += 'font-family:' + self.titleFont + ';'
if self.titleAlign: self.__cssTITLE += 'text-align:' + self.titleAlign + ';'
if self.titleSize: self.__cssTITLE += 'font-size:' + str(self.titleSize) + 'px;'
if self.titleBGColor: self.__cssTITLE += 'background-color:' + self.titleBGColor + ';'
if self.titlePadding: self.__cssTITLE += 'padding:' + str(self.titlePadding) + 'px;'
if self.labelColor: self.__cssLABEL += 'color:' + self.labelColor + ';'
if self.labelBGColor: self.__cssLABEL += 'background-color:' + self.labelBGColor + ';'
if self.labelBorder: self.__cssLABEL += 'border:' + self.labelBorder + ';'
if self.labelFont: self.__cssLABEL += 'font-family:' + self.labelFont + ';'
if self.labelSize: self.__cssLABEL += 'font-size:' + str(self.labelSize) + 'px;'
if self.labelAlign: self.__cssLABEL += 'text-align:' + self.labelAlign + ';'
if self.labelBGColor: self.__cssLABELBG += 'background-color:' + self.labelBGColor + ';'
if self.legendColor: self.__cssLEGEND += 'color:' + self.legendColor + ';'
if self.legendFont: self.__cssLEGEND += 'font-family:' + self.legendFont + ';'
if self.legendSize: self.__cssLEGEND += 'font-size:' + str(self.legendSize) + 'px;'
if self.legendBGColor: self.__cssLEGENDBG += 'background-color:' + self.legendBGColor + ';'
if self.legendBorder: self.__cssLEGENDBG += 'border:' + self.legendBorder + ';'
if self.absValuesColor: self.__cssABSVALUES += 'color:' + self.absValuesColor + ';'
if self.absValuesBGColor: self.__cssABSVALUES += 'background-color:' + self.absValuesBGColor + ';'
if self.absValuesBorder: self.__cssABSVALUES += 'border:' + self.absValuesBorder + ';'
if self.absValuesFont: self.__cssABSVALUES += 'font-family:' + self.absValuesFont + ';'
if self.absValuesSize: self.__cssABSVALUES += 'font-size:' + str(self.absValuesSize) + 'px;'
if self.percValuesColor: self.__cssPERCVALUES += 'color:' + self.percValuesColor + ';'
if self.percValuesFont: self.__cssPERCVALUES += 'font-family:' + self.percValuesFont + ';'
if self.percValuesSize: self.__cssPERCVALUES += 'font-size:' + str(self.percValuesSize) + 'px;'
def level_color(self, value, color):
"""return bar color for each level"""
if self.barLevelColors:
for i in range(0, len(self.barLevelColors), 2):
try:
if (self.barLevelColors[i] > 0 and value >= self.barLevelColors[i]) or \
(self.barLevelColors[i] < 0 and value <= self.barLevelColors[i]):
color = self.barLevelColors[i+1]
except IndexError: pass
return color
def build_bar(self, value, width, height, color):
"""return a single bar"""
title = self.absValuesPrefix + str(value) + self.absValuesSuffix
bg = self.__img_pattern.search(color) and 'background' or 'bgcolor'
bar = '<table border=0 cellspacing=0 cellpadding=0><tr>'
bar += '<td style="' + self.__cssBAR + '" ' + bg + '="' + color + '"'
bar += (value != '') and ' title="' + title + '">' or '>'
bar += '<div style="width:' + str(width) + 'px; height:' + str(height) + 'px;'
bar += ' line-height:1px; font-size:1px;"></div>'
bar += '</td></tr></table>'
return bar
def build_fader(self, value, width, height, x, color):
"""return a single fader"""
fader = '<table border=0 cellspacing=0 cellpadding=0><tr>'
x -= int(round(width / 2))
if x > 0: fader += '<td width=' + str(x) + '></td>'
fader += '<td>' + self.build_bar(value, width, height, color) + '</td>'
fader += '</tr></table>'
return fader
def build_value(self, val, max_dec, sum = 0, align = ''):
"""return a single bar/fader value"""
val = _number_format(val, max_dec)
if sum: sum = _number_format(sum, max_dec)
value = '<td style="' + self.__cssABSVALUES + '"'
if align: value += ' align=' + align
value += ' nowrap>'
value += ' ' + self.absValuesPrefix + str(val) + self.absValuesSuffix
if sum: value += ' / ' + self.absValuesPrefix + str(sum) + self.absValuesSuffix
value += ' </td>'
return value
def build_legend(self, barColors):
"""return the legend"""
if hasattr(self.legend, 'split'): self.legend = self.legend.split(',')
legend = '<table border=0 cellspacing=0 cellpadding=0><tr>'
legend += '<td style="' + self.__cssLEGENDBG + '">'
legend += '<table border=0 cellspacing=4 cellpadding=0>'
i = 0
for color in barColors:
if len(self.legend) >= i+1:
text = hasattr(self.legend[i], 'strip') and self.legend[i].strip() or str(self.legend[i])
else: text = ''
legend += '<tr>'
legend += '<td>' + self.build_bar('', self.barWidth, self.barWidth, color) + '</td>'
legend += '<td style="' + self.__cssLEGEND + '" nowrap>' + text + '</td>'
legend += '</tr>'
i += 1
legend += '</table></td></tr></table>'
return legend
def build_hTitle(self, titleLabel, titleValue, titleBar):
"""return horizontal titles"""
title = '<tr>'
title += '<td style="' + self.__cssTITLE + '">' + titleLabel + '</td>'
if titleValue != '': title += '<td style="' + self.__cssTITLE + '">' + titleValue + '</td>'
title += '<td style="' + self.__cssTITLE + '">' + titleBar + '</td>'
title += '</tr>'
return title
def create_hBar(self, value, percent, mPerc, mPerc_neg, max_neg, mul, valSpace, bColor, border, spacer, spacer_neg):
"""return a single horizontal bar with label and values (abs./perc.)"""
bar = '<table border=0 cellspacing=0 cellpadding=0 height=100%><tr>'
if percent < 0:
percent *= -1
bar += '<td style="' + self.__cssLABELBG + '" height=' + str(self.barWidth) + ' width=' + str(int(round((mPerc_neg - percent) * mul + valSpace))) + ' align=right nowrap>'
if self.showValues < 2: bar += '<span style="' + self.__cssPERCVALUES + '">' + str(_number_format(percent, self.percValuesDecimals)) + '%</span>'
bar += ' </td><td style="' + self.__cssLABELBG + '">'
bar += self.build_bar(value, int(round(percent * mul)), self.barWidth, bColor)
bar += '</td><td width=' + str(spacer) + '></td>'
else:
if max_neg:
bar += '<td style="' + self.__cssLABELBG + '" width=' + str(spacer_neg) + '>'
bar += '<table border=0 cellspacing=0 cellpadding=0><tr><td></td></tr></table></td>'
if percent:
bar += '<td>'
bar += self.build_bar(value, int(round(percent * mul)), self.barWidth, bColor)
bar += '</td>'
else: bar += '<td width=1 height=' + str(self.barWidth + (border * 2)) + '></td>'
bar += '<td style="' + self.__cssPERCVALUES + '" width=' + str(int(round((mPerc - percent) * mul + valSpace))) + ' align=left nowrap>'
if self.showValues < 2: bar += ' ' + str(_number_format(percent, self.percValuesDecimals)) + '%'
bar += ' </td>'
bar += '</tr></table>'
return bar
def create_vBar(self, value, percent, mPerc, mPerc_neg, max_neg, mul, valSpace, bColor, border, spacer, spacer_neg):
"""return a single vertical bar with label and values (abs./perc.)"""
bar = '<table border=0 cellspacing=0 cellpadding=0 width=100%><tr align=center>'
if percent < 0:
percent *= -1
bar += '<td height=' + str(spacer) + '></td></tr><tr align=center valign=top><td style="' + self.__cssLABELBG + '">'
bar += self.build_bar(value, self.barWidth, int(round(percent * mul)), bColor)
bar += '</td></tr><tr align=center valign=top>'
bar += '<td style="' + self.__cssLABELBG + '" height=' + str(int(round((mPerc_neg - percent) * mul + valSpace))) + ' nowrap>'
bar += (self.showValues < 2) and '<span style="' + self.__cssPERCVALUES + '">' + str(_number_format(percent, self.percValuesDecimals)) + '%</span>' or ' '
bar += '</td>'
else:
bar += '<td style="' + self.__cssPERCVALUES + '" valign=bottom height=' + str(int(round((mPerc - percent) * mul + valSpace))) + ' nowrap>'
if self.showValues < 2: bar += str(_number_format(percent, self.percValuesDecimals)) + '%'
bar += '</td>'
if percent:
bar += '</tr><tr align=center valign=bottom><td>'
bar += self.build_bar(value, self.barWidth, int(round(percent * mul)), bColor)
bar += '</td>'
else: bar += '</tr><tr><td width=' + str(self.barWidth + (border * 2)) + ' height=1></td>'
if max_neg:
bar += '</tr><tr><td style="' + self.__cssLABELBG + '" height=' + str(spacer_neg) + '>'
bar += '<table border=0 cellspacing=0 cellpadding=0><tr><td></td></tr></table></td>'
bar += '</tr></table>'
return bar
def create(self):
"""create a complete bar graph (horizontal, vertical, progress, or fader)"""
self.type = self.type.lower()
d = self.values
t = hasattr(self.titles, 'split') and self.titles.split(',') or self.titles
r = hasattr(self.labels, 'split') and self.labels.split(',') or self.labels
drc = hasattr(self.barColors, 'split') and self.barColors.split(',') or self.barColors
val = []
bc = []
if self.barLength < 0.1: self.barLength = 0.1
elif self.barLength > 2.9: self.barLength = 2.9
labels = (len(d) > len(r)) and len(d) or len(r)
if self.type == 'pbar' or self.type == 'fader':
if not self.barBGColor: self.barBGColor = self.labelBGColor
if self.labelBGColor == self.barBGColor and len(t) == 0:
self.labelBGColor = ''
self.labelBorder = ''
self.set_styles()
graph = '<table border=0 cellspacing=0 cellpadding=' + str(self.graphPadding) + '><tr>'
graph += '<td' + (self.__cssGRAPH and ' style="' + self.__cssGRAPH + '"' or '') + '>'
if self.legend and self.type != 'pbar' and self.type != 'fader':
graph += '<table border=0 cellspacing=0 cellpadding=0><tr><td>'
if self.charts > 1:
divide = math.ceil(labels / self.charts)
graph += '<table border=0 cellspacing=0 cellpadding=6><tr valign=top><td>'
else: divide = 0
sum = 0
max = 0
max_neg = 0
max_dec = 0
ccnt = 0
lcnt = 0
chart = 0
for i in range(labels):
if divide and i and not i % divide:
lcnt = 0
chart += 1
try: drv = len(d[i]) and [e for e in d[i]] or [d[i]]
except: drv = [d[i]]
j = 0
dec = 0
if len(val) <= chart: val.append([])
for v in drv:
s = str(v)
if s.find('.') != -1:
dec = len(s[s.find('.') + 1:])
if dec > max_dec: max_dec = dec
if len(val[chart]) <= lcnt: val[chart].append([])
val[chart][lcnt].append(v)
if v != 0: v -= self.baseValue
if v > max: max = v
elif v < max_neg: max_neg = v
if v < 0: v *= -1
sum += v
if len(bc) <= j:
if ccnt >= len(self.__colors): ccnt = 0
if len(drc) <= j or len(drc[j]) < 3:
bc.append(self.__colors[ccnt])
ccnt += 1
else: bc.append(drc[j].strip())
j += 1
lcnt += 1
border = int(self.barBorder[0])
mPerc = sum and int(round(max * 100.0 / sum)) or 0
if self.type == 'pbar' or self.type == 'fader': mul = 2
else: mul = mPerc and 100.0 / mPerc or 1
mul *= self.barLength
if self.showValues < 2:
if self.type == 'hbar':
valSpace = (self.percValuesDecimals * (self.percValuesSize / 1.6)) + (self.percValuesSize * 3.2)
else: valSpace = self.percValuesSize * 1.2
else: valSpace = self.percValuesSize
spacer = maxSize = int(round(mPerc * mul + valSpace + border * 2))
if max_neg:
mPerc_neg = sum and int(round(-max_neg * 100.0 / sum)) or 0
if mPerc_neg > mPerc and self.type != 'pbar' and self.type != 'fader':
mul = 100.0 / mPerc_neg * self.barLength
spacer_neg = int(round(mPerc_neg * mul + valSpace + border * 2))
maxSize += spacer_neg
else: mPerc_neg = spacer_neg = 0
titleLabel = ''
titleValue = ''
titleBar = ''
if len(t) > 0:
titleLabel = (t[0] == '') and ' ' or t[0]
if self.showValues == 1 or self.showValues == 2:
titleValue = (t[1] == '') and ' ' or t[1]
titleBar = (t[2] == '') and ' ' or t[2]
else: titleBar = (t[1] == '') and ' ' or t[1]
chart = 0
lcnt = 0
for v in val:
graph += '<table border=0 cellspacing=2 cellpadding=0>'
if self.type == 'hbar':
if len(t) > 0: graph += self.build_hTitle(titleLabel, titleValue, titleBar)
for i in range(len(v)):
label = (lcnt < len(r)) and r[lcnt].strip() or str(lcnt + 1)
rowspan = len(v[i])
graph += '<tr><td style="' + self.__cssLABEL + '"' + ((rowspan > 1) and ' rowspan=' + str(rowspan) or '') + '>'
graph += ' ' + label + ' </td>'
for j in range(len(v[i])):
value = v[i][j] and v[i][j] - self.baseValue or 0
percent = sum and value * 100.0 / sum or 0
value = _number_format(v[i][j], max_dec)
bColor = self.level_color(v[i][j], bc[j])
if self.showValues == 1 or self.showValues == 2:
graph += self.build_value(v[i][j], max_dec, 0, 'right')
graph += '<td' + (self.__cssBARBG and ' style="' + self.__cssBARBG + '"' or '') + ' height=100% width=' + str(maxSize) + '>'
graph += self.create_hBar(value, percent, mPerc, mPerc_neg, max_neg, mul, valSpace, bColor, border, spacer, spacer_neg)
graph += '</td></tr>'
if j < len(v[i]) - 1: graph += '<tr>'
if self.labelSpace and i < len(v)-1: graph += '<tr><td colspan=3 height=' + str(self.labelSpace) + '></td></tr>'
lcnt += 1
elif self.type == 'vbar':
graph += '<tr align=center valign=bottom>'
if titleBar != '':
titleBar = titleBar.replace('-', '-<br>')
graph += '<td style="' + self.__cssTITLE + '" valign=middle>' + titleBar + '</td>'
for i in range(len(v)):
for j in range(len(v[i])):
value = v[i][j] and v[i][j] - self.baseValue or 0
percent = sum and value * 100.0 / sum or 0
value = _number_format(v[i][j], max_dec)
bColor = self.level_color(v[i][j], bc[j])
graph += '<td' + (self.__cssBARBG and ' style="' + self.__cssBARBG + '"' or '') + '>'
graph += self.create_vBar(value, percent, mPerc, mPerc_neg, max_neg, mul, valSpace, bColor, border, spacer, spacer_neg)
graph += '</td>'
if self.labelSpace: graph += '<td width=' + str(self.labelSpace) + '></td>'
if self.showValues == 1 or self.showValues == 2:
graph += '</tr><tr align=center>'
if titleValue != '': graph += '<td style="' + self.__cssTITLE + '">' + titleValue + '</td>'
for i in range(len(v)):
for j in range(len(v[i])):
graph += self.build_value(v[i][j], max_dec)
if self.labelSpace: graph += '<td width=' + str(self.labelSpace) + '></td>'
graph += '</tr><tr>'
if titleLabel != '': graph += '<td style="' + self.__cssTITLE + '">' + titleLabel + '</td>'
for i in range(len(v)):
label = (lcnt < len(r)) and r[lcnt].strip() or str(lcnt + 1)
colspan = len(v[i])
graph += '<td style="' + self.__cssLABEL + '"' + ((colspan > 1) and ' colspan=' + str(colspan) or '') + '>'
graph += ' ' + label + ' </td>'
if self.labelSpace: graph += '<td width=' + str(self.labelSpace) + '></td>'
lcnt += 1
graph += '</tr>'
elif self.type == 'pbar' or self.type == 'fader':
if len(t) > 0: graph += self.build_hTitle(titleLabel, titleValue, titleBar)
for i in range(len(v)):
try: m = (len(v[i]) > 1) and True or False
except: m = False
if m or not i:
label = (lcnt < len(r)) and r[lcnt].strip() or str(i + 1)
graph += '<tr>'
if len(r):
graph += '<td style="' + self.__cssLABEL + '">'
graph += ' ' + label + ' </td>'
try: sum = v[i][1] and v[i][1] or v[-1][0]
except: sum = v[-1][0]
percent = sum and v[i][0] * 100.0 / sum or 0
value = _number_format(v[i][0], max_dec)
if self.showValues == 1 or self.showValues == 2:
graph += self.build_value(v[i][0], max_dec, sum, 'right')
graph += '<td' + (self.__cssBARBG and ' style="' + self.__cssBARBG + '"' or '') + '>'
self.barColors = (len(drc) >= i+1) and drc[i].strip() or self.__colors[0]
bColor = self.level_color(v[i][0], self.barColors)
graph += '<table border=0 cellspacing=0 cellpadding=0><tr><td>'
if self.type == 'fader':
graph += self.build_fader(value, int(round(self.barWidth / 2)), self.barWidth, int(round(percent * mul)), bColor)
else: graph += self.build_bar(value, int(round(percent * mul)), self.barWidth, bColor)
graph += '</td><td width=' + str(int(round((100 - percent) * mul))) + '></td>'
graph += '</tr></table></td>'
if self.showValues < 2: graph += '<td style="' + self.__cssPERCVALUES + '" nowrap> ' + str(_number_format(percent, self.percValuesDecimals)) + '%</td>'
graph += '</tr>'
if self.labelSpace and i < len(v)-1: graph += '<td colspan=3 height=' + str(self.labelSpace) + '></td>'
lcnt += 1
else: graph += '<tr><td>' + self.__err_type + '</td></tr>'
graph += '</table>'
if chart < self.charts - 1 and len(val[chart+1]):
graph += '</td>'
if self.type == 'vbar': graph += '</tr><tr valign=top>'
graph += '<td>'
chart += 1
if self.charts > 1: graph += '</td></tr></table>'
if self.legend and self.type != 'pbar' and self.type != 'fader':
graph += '</td><td width=10> </td><td' + (self.legendAlign and ' valign=' + self.legendAlign or '') + '>'
graph += self.build_legend(bc)
graph += '</td></tr></table>'
if self.debug:
graph += "<br>sum=%s max=%s max_neg=%s max_dec=%s " % (sum, max, max_neg, max_dec)
graph += "mPerc=%s mPerc_neg=%s mul=%s valSpace=%s" % (mPerc, mPerc_neg, mul, valSpace)
graph += '</td></tr></table>'
return graph
def _number_format(val, dec):
"""return float with dec decimals; if dec is 0, return integer"""
return dec and ('%.' + str(dec) + 'f') % val or int(round(val))
if __name__ == '__main__':
print __doc__
| gpl-2.0 |
adamchainz/ansible | test/units/plugins/strategy/test_strategy_base.py | 69 | 21292 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.mock.loader import DictDataLoader
import uuid
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.executor.task_result import TaskResult
from ansible.inventory.host import Host
from ansible.module_utils.six.moves import queue as Queue
from ansible.playbook.block import Block
from ansible.playbook.handler import Handler
from ansible.plugins.strategy import StrategyBase
class TestStrategyBase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_strategy_base_init(self):
queue_items = []
def _queue_empty(*args, **kwargs):
return len(queue_items) == 0
def _queue_get(*args, **kwargs):
if len(queue_items) == 0:
raise Queue.Empty
else:
return queue_items.pop()
def _queue_put(item, *args, **kwargs):
queue_items.append(item)
mock_queue = MagicMock()
mock_queue.empty.side_effect = _queue_empty
mock_queue.get.side_effect = _queue_get
mock_queue.put.side_effect = _queue_put
mock_tqm = MagicMock(TaskQueueManager)
mock_tqm._final_q = mock_queue
mock_tqm._options = MagicMock()
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
strategy_base = StrategyBase(tqm=mock_tqm)
strategy_base.cleanup()
def test_strategy_base_run(self):
queue_items = []
def _queue_empty(*args, **kwargs):
return len(queue_items) == 0
def _queue_get(*args, **kwargs):
if len(queue_items) == 0:
raise Queue.Empty
else:
return queue_items.pop()
def _queue_put(item, *args, **kwargs):
queue_items.append(item)
mock_queue = MagicMock()
mock_queue.empty.side_effect = _queue_empty
mock_queue.get.side_effect = _queue_get
mock_queue.put.side_effect = _queue_put
mock_tqm = MagicMock(TaskQueueManager)
mock_tqm._final_q = mock_queue
mock_tqm._stats = MagicMock()
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
mock_tqm.send_callback.return_value = None
for attr in ('RUN_OK', 'RUN_ERROR', 'RUN_FAILED_HOSTS', 'RUN_UNREACHABLE_HOSTS'):
setattr(mock_tqm, attr, getattr(TaskQueueManager, attr))
mock_iterator = MagicMock()
mock_iterator._play = MagicMock()
mock_iterator._play.handlers = []
mock_play_context = MagicMock()
mock_tqm._failed_hosts = dict()
mock_tqm._unreachable_hosts = dict()
mock_tqm._options = MagicMock()
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
strategy_base = StrategyBase(tqm=mock_tqm)
mock_host = MagicMock()
mock_host.name = 'host1'
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context), mock_tqm.RUN_OK)
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=TaskQueueManager.RUN_ERROR), mock_tqm.RUN_ERROR)
mock_tqm._failed_hosts = dict(host1=True)
mock_iterator.get_failed_hosts.return_value = [mock_host]
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=False), mock_tqm.RUN_FAILED_HOSTS)
mock_tqm._unreachable_hosts = dict(host1=True)
mock_iterator.get_failed_hosts.return_value = []
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=False), mock_tqm.RUN_UNREACHABLE_HOSTS)
strategy_base.cleanup()
def test_strategy_base_get_hosts(self):
queue_items = []
def _queue_empty(*args, **kwargs):
return len(queue_items) == 0
def _queue_get(*args, **kwargs):
if len(queue_items) == 0:
raise Queue.Empty
else:
return queue_items.pop()
def _queue_put(item, *args, **kwargs):
queue_items.append(item)
mock_queue = MagicMock()
mock_queue.empty.side_effect = _queue_empty
mock_queue.get.side_effect = _queue_get
mock_queue.put.side_effect = _queue_put
mock_hosts = []
for i in range(0, 5):
mock_host = MagicMock()
mock_host.name = "host%02d" % (i + 1)
mock_host.has_hostkey = True
mock_hosts.append(mock_host)
mock_inventory = MagicMock()
mock_inventory.get_hosts.return_value = mock_hosts
mock_tqm = MagicMock()
mock_tqm._final_q = mock_queue
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
mock_tqm.get_inventory.return_value = mock_inventory
mock_play = MagicMock()
mock_play.hosts = ["host%02d" % (i + 1) for i in range(0, 5)]
strategy_base = StrategyBase(tqm=mock_tqm)
mock_tqm._failed_hosts = []
mock_tqm._unreachable_hosts = []
self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts)
mock_tqm._failed_hosts = ["host01"]
self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts[1:])
self.assertEqual(strategy_base.get_failed_hosts(play=mock_play), [mock_hosts[0]])
mock_tqm._unreachable_hosts = ["host02"]
self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts[2:])
strategy_base.cleanup()
@patch.object(WorkerProcess, 'run')
def test_strategy_base_queue_task(self, mock_worker):
def fake_run(self):
return
mock_worker.run.side_effect = fake_run
fake_loader = DictDataLoader()
mock_var_manager = MagicMock()
mock_host = MagicMock()
mock_host.get_vars.return_value = dict()
mock_host.has_hostkey = True
mock_inventory = MagicMock()
mock_inventory.get.return_value = mock_host
mock_options = MagicMock()
mock_options.module_path = None
tqm = TaskQueueManager(
inventory=mock_inventory,
variable_manager=mock_var_manager,
loader=fake_loader,
options=mock_options,
passwords=None,
)
tqm._initialize_processes(3)
tqm.hostvars = dict()
try:
strategy_base = StrategyBase(tqm=tqm)
strategy_base._queue_task(host=mock_host, task=MagicMock(), task_vars=dict(), play_context=MagicMock())
self.assertEqual(strategy_base._cur_worker, 1)
self.assertEqual(strategy_base._pending_results, 1)
strategy_base._queue_task(host=mock_host, task=MagicMock(), task_vars=dict(), play_context=MagicMock())
self.assertEqual(strategy_base._cur_worker, 2)
self.assertEqual(strategy_base._pending_results, 2)
strategy_base._queue_task(host=mock_host, task=MagicMock(), task_vars=dict(), play_context=MagicMock())
self.assertEqual(strategy_base._cur_worker, 0)
self.assertEqual(strategy_base._pending_results, 3)
finally:
tqm.cleanup()
def test_strategy_base_process_pending_results(self):
mock_tqm = MagicMock()
mock_tqm._terminated = False
mock_tqm._failed_hosts = dict()
mock_tqm._unreachable_hosts = dict()
mock_tqm.send_callback.return_value = None
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
queue_items = []
def _queue_empty(*args, **kwargs):
return len(queue_items) == 0
def _queue_get(*args, **kwargs):
if len(queue_items) == 0:
raise Queue.Empty
else:
return queue_items.pop()
def _queue_put(item, *args, **kwargs):
queue_items.append(item)
mock_queue = MagicMock()
mock_queue.empty.side_effect = _queue_empty
mock_queue.get.side_effect = _queue_get
mock_queue.put.side_effect = _queue_put
mock_tqm._final_q = mock_queue
mock_tqm._stats = MagicMock()
mock_tqm._stats.increment.return_value = None
mock_play = MagicMock()
mock_host = MagicMock()
mock_host.name = 'test01'
mock_host.vars = dict()
mock_host.get_vars.return_value = dict()
mock_host.has_hostkey = True
mock_task = MagicMock()
mock_task._role = None
mock_task._parent = None
mock_task.ignore_errors = False
mock_task._uuid = uuid.uuid4()
mock_task.loop = None
mock_task.copy.return_value = mock_task
mock_handler_task = MagicMock(Handler)
mock_handler_task.name = 'test handler'
mock_handler_task.action = 'foo'
mock_handler_task._parent = None
mock_handler_task.get_name.return_value = "test handler"
mock_handler_task.has_triggered.return_value = False
mock_handler_task._uuid = 'xxxxxxxxxxxxx'
mock_handler_task.copy.return_value = mock_handler_task
mock_iterator = MagicMock()
mock_iterator._play = mock_play
mock_iterator.mark_host_failed.return_value = None
mock_iterator.get_next_task_for_host.return_value = (None, None)
mock_iterator.get_original_task.return_value = mock_task
mock_handler_block = MagicMock()
mock_handler_block.block = [mock_handler_task]
mock_handler_block.rescue = []
mock_handler_block.always = []
mock_play.handlers = [mock_handler_block]
mock_tqm._notified_handlers = {mock_handler_task._uuid: []}
mock_tqm._listening_handlers = {}
mock_group = MagicMock()
mock_group.add_host.return_value = None
def _get_host(host_name):
if host_name == 'test01':
return mock_host
return None
def _get_group(group_name):
if group_name in ('all', 'foo'):
return mock_group
return None
mock_inventory = MagicMock()
mock_inventory._hosts_cache = dict()
mock_inventory.hosts.return_value = mock_host
mock_inventory.get_host.side_effect = _get_host
mock_inventory.get_group.side_effect = _get_group
mock_inventory.clear_pattern_cache.return_value = None
mock_inventory.get_host_vars.return_value = {}
mock_inventory.hosts.get.return_value = mock_host
mock_var_mgr = MagicMock()
mock_var_mgr.set_host_variable.return_value = None
mock_var_mgr.set_host_facts.return_value = None
mock_var_mgr.get_vars.return_value = dict()
strategy_base = StrategyBase(tqm=mock_tqm)
strategy_base._inventory = mock_inventory
strategy_base._variable_manager = mock_var_mgr
strategy_base._blocked_hosts = dict()
def _has_dead_workers():
return False
strategy_base._tqm.has_dead_workers.side_effect = _has_dead_workers
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 0)
task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(changed=True))
queue_items.append(task_result)
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"failed":true}')
queue_items.append(task_result)
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
mock_iterator.is_failed.return_value = True
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
# self.assertIn('test01', mock_tqm._failed_hosts)
# del mock_tqm._failed_hosts['test01']
mock_iterator.is_failed.return_value = False
task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"unreachable": true}')
queue_items.append(task_result)
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
self.assertIn('test01', mock_tqm._unreachable_hosts)
del mock_tqm._unreachable_hosts['test01']
task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"skipped": true}')
queue_items.append(task_result)
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(add_host=dict(host_name='newhost01', new_groups=['foo']))))
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(add_group=dict(group_name='foo'))))
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(changed=True, _ansible_notify=['test handler'])))
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
self.assertIn(mock_handler_task._uuid, strategy_base._notified_handlers)
self.assertIn(mock_host, strategy_base._notified_handlers[mock_handler_task._uuid])
# queue_items.append(('set_host_var', mock_host, mock_task, None, 'foo', 'bar'))
# results = strategy_base._process_pending_results(iterator=mock_iterator)
# self.assertEqual(len(results), 0)
# self.assertEqual(strategy_base._pending_results, 1)
# queue_items.append(('set_host_facts', mock_host, mock_task, None, 'foo', dict()))
# results = strategy_base._process_pending_results(iterator=mock_iterator)
# self.assertEqual(len(results), 0)
# self.assertEqual(strategy_base._pending_results, 1)
# queue_items.append(('bad'))
# self.assertRaises(AnsibleError, strategy_base._process_pending_results, iterator=mock_iterator)
strategy_base.cleanup()
def test_strategy_base_load_included_file(self):
fake_loader = DictDataLoader({
"test.yml": """
- debug: msg='foo'
""",
"bad.yml": """
""",
})
queue_items = []
def _queue_empty(*args, **kwargs):
return len(queue_items) == 0
def _queue_get(*args, **kwargs):
if len(queue_items) == 0:
raise Queue.Empty
else:
return queue_items.pop()
def _queue_put(item, *args, **kwargs):
queue_items.append(item)
mock_queue = MagicMock()
mock_queue.empty.side_effect = _queue_empty
mock_queue.get.side_effect = _queue_get
mock_queue.put.side_effect = _queue_put
mock_tqm = MagicMock()
mock_tqm._final_q = mock_queue
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
strategy_base = StrategyBase(tqm=mock_tqm)
strategy_base._loader = fake_loader
strategy_base.cleanup()
mock_play = MagicMock()
mock_block = MagicMock()
mock_block._play = mock_play
mock_block.vars = dict()
mock_task = MagicMock()
mock_task._block = mock_block
mock_task._role = None
mock_task._parent = None
mock_iterator = MagicMock()
mock_iterator.mark_host_failed.return_value = None
mock_inc_file = MagicMock()
mock_inc_file._task = mock_task
mock_inc_file._filename = "test.yml"
res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
mock_inc_file._filename = "bad.yml"
res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
self.assertEqual(res, [])
@patch.object(WorkerProcess, 'run')
def test_strategy_base_run_handlers(self, mock_worker):
def fake_run(*args):
return
mock_worker.side_effect = fake_run
mock_play_context = MagicMock()
mock_handler_task = MagicMock(Handler)
mock_handler_task.action = 'foo'
mock_handler_task.get_name.return_value = "test handler"
mock_handler_task.has_triggered.return_value = False
mock_handler_task.listen = None
mock_handler_task._role = None
mock_handler_task._parent = None
mock_handler_task._uuid = 'xxxxxxxxxxxxxxxx'
mock_handler = MagicMock()
mock_handler.block = [mock_handler_task]
mock_handler.flag_for_host.return_value = False
mock_play = MagicMock()
mock_play.handlers = [mock_handler]
mock_host = MagicMock(Host)
mock_host.name = "test01"
mock_host.has_hostkey = True
mock_inventory = MagicMock()
mock_inventory.get_hosts.return_value = [mock_host]
mock_inventory.get.return_value = mock_host
mock_var_mgr = MagicMock()
mock_var_mgr.get_vars.return_value = dict()
mock_iterator = MagicMock()
mock_iterator._play = mock_play
mock_iterator.get_original_task.return_value = mock_handler_task
fake_loader = DictDataLoader()
mock_options = MagicMock()
mock_options.module_path = None
tqm = TaskQueueManager(
inventory=mock_inventory,
variable_manager=mock_var_mgr,
loader=fake_loader,
options=mock_options,
passwords=None,
)
tqm._initialize_processes(3)
tqm._initialize_notified_handlers(mock_play)
tqm.hostvars = dict()
try:
strategy_base = StrategyBase(tqm=tqm)
strategy_base._inventory = mock_inventory
strategy_base._notified_handlers = {mock_handler_task._uuid: [mock_host]}
task_result = TaskResult(Host('host01'), Handler(), dict(changed=False))
tqm._final_q.put(task_result)
result = strategy_base.run_handlers(iterator=mock_iterator, play_context=mock_play_context)
finally:
strategy_base.cleanup()
tqm.cleanup()
| gpl-3.0 |
carpyncho/feets | doc/source/JSAnimation/examples.py | 4 | 3126 | import numpy as np
from matplotlib import pyplot as plt
from matplotlib import animation
from JSAnimation import IPython_display
def basic_animation(frames=100, interval=30):
"""Plot a basic sine wave with oscillating amplitude"""
fig = plt.figure()
ax = plt.axes(xlim=(0, 10), ylim=(-2, 2))
line, = ax.plot([], [], lw=2)
x = np.linspace(0, 10, 1000)
def init():
line.set_data([], [])
return line,
def animate(i):
y = np.cos(i * 0.02 * np.pi) * np.sin(x - i * 0.02 * np.pi)
line.set_data(x, y)
return line,
return animation.FuncAnimation(fig, animate, init_func=init,
frames=frames, interval=interval)
def lorenz_animation(N_trajectories=20, rseed=1, frames=200, interval=30):
"""Plot a 3D visualization of the dynamics of the Lorenz system"""
from scipy import integrate
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.colors import cnames
def lorentz_deriv(coords, t0, sigma=10., beta=8./3, rho=28.0):
"""Compute the time-derivative of a Lorentz system."""
x, y, z = coords
return [sigma * (y - x), x * (rho - z) - y, x * y - beta * z]
# Choose random starting points, uniformly distributed from -15 to 15
np.random.seed(rseed)
x0 = -15 + 30 * np.random.random((N_trajectories, 3))
# Solve for the trajectories
t = np.linspace(0, 2, 500)
x_t = np.asarray([integrate.odeint(lorentz_deriv, x0i, t)
for x0i in x0])
# Set up figure & 3D axis for animation
fig = plt.figure()
ax = fig.add_axes([0, 0, 1, 1], projection='3d')
ax.axis('off')
# choose a different color for each trajectory
colors = plt.cm.jet(np.linspace(0, 1, N_trajectories))
# set up lines and points
lines = sum([ax.plot([], [], [], '-', c=c)
for c in colors], [])
pts = sum([ax.plot([], [], [], 'o', c=c, ms=4)
for c in colors], [])
# prepare the axes limits
ax.set_xlim((-25, 25))
ax.set_ylim((-35, 35))
ax.set_zlim((5, 55))
# set point-of-view: specified by (altitude degrees, azimuth degrees)
ax.view_init(30, 0)
# initialization function: plot the background of each frame
def init():
for line, pt in zip(lines, pts):
line.set_data([], [])
line.set_3d_properties([])
pt.set_data([], [])
pt.set_3d_properties([])
return lines + pts
# animation function: called sequentially
def animate(i):
# we'll step two time-steps per frame. This leads to nice results.
i = (2 * i) % x_t.shape[1]
for line, pt, xi in zip(lines, pts, x_t):
x, y, z = xi[:i + 1].T
line.set_data(x, y)
line.set_3d_properties(z)
pt.set_data(x[-1:], y[-1:])
pt.set_3d_properties(z[-1:])
ax.view_init(30, 0.3 * i)
fig.canvas.draw()
return lines + pts
return animation.FuncAnimation(fig, animate, init_func=init,
frames=frames, interval=interval)
| mit |
welliam/data-structures | src/test_traversal.py | 1 | 5030 | '''Test traversal of adjacency_list.py.'''
import pytest
from .graph import Graph
@pytest.fixture
def self_looped():
g = Graph()
g.add_edge('a', 'a', 0)
return g
# The following fixtures for graphs have two associated variables:
# <NAME>_DEPTH and <NAME>_BREADTH
# which are lists of tuples of two values. The second of which must
# come after the first when the test is being run with the proper
# traversal method for the variable started at 'a'
def comesbefore(t, a, b):
"""Used in testing traversal methods.
Because which branch we traverse first is not guaranteed (or
relevant), we can't test simple equality on the output of
traversal methods-- this method is used instead."""
return b in t[t.index(a):]
@pytest.fixture
def simple():
"""A simple, non-looped graph."""
g = Graph()
g.add_edge('a', 'b', 0)
g.add_edge('b', 'c', 0)
g.add_edge('b', 'd', 0)
return g
SIMPLE_DEPTH = [('a', 'b'), ('b', 'c'), ('b', 'd')]
SIMPLE_BREADTH = SIMPLE_DEPTH # same in this case
@pytest.fixture
def complex():
"""A graph with a non-self referential loop."""
g = Graph()
g.add_edge('a', 'b', 0)
g.add_edge('b', 'c', 0)
g.add_edge('c', 'a', 0)
g.add_edge('a', 'dead end', 0)
return g
COMPLEX_DEPTH = [('a', 'b'), ('b', 'c'), ('a', 'dead end')]
COMPLEX_BREADTH = [('a', 'b'), ('b', 'c'), ('dead end', 'c')]
@pytest.fixture
def complex_2():
"""A complex graph with multiple loops."""
g = Graph()
g.add_edge('a', 'b', 0)
g.add_edge('b', 'c', 0)
g.add_edge('c', 'a', 0)
g.add_edge('c', 'b', 0)
g.add_edge('a', 'dead end', 0)
return g
# the same variables as for complex are relevant
@pytest.fixture
def tree():
"""A graph which resembles a binary tree."""
g = Graph()
g.add_edge('0-0', '1-0', 0)
g.add_edge('0-0', '1-1', 0)
g.add_edge('1-0', '2-0', 0)
g.add_edge('1-0', '2-1', 0)
g.add_edge('1-1', '2-2', 0)
g.add_edge('1-1', '2-3', 0)
return g
TREE_DEPTH = [
('0-0', '1-0'),
('1-0', '2-0'),
('1-0', '2-1'),
('0-0', '1-1'),
('1-1', '2-2'),
('1-1', '2-3')
]
TREE_BREADTH = [
('0-0', '1-0'),
('0-0', '1-1'),
('1-0', '2-0'),
('1-0', '2-1'),
('1-0', '2-2'),
('1-0', '2-3'),
('1-1', '2-0'),
('1-1', '2-1'),
('1-1', '2-2'),
('1-1', '2-3')
]
# depth first
def test_depth_traversal_empty(self_looped):
"""Test that depth first traversal throws error on an absent node."""
with pytest.raises(KeyError):
self_looped.depth_first_traversal('b')
def test_depth_traversal_self_looped(self_looped):
"""Test that depth first traversal is traversing at all."""
assert self_looped.depth_first_traversal('a') == ['a']
@pytest.mark.parametrize('a, b', SIMPLE_DEPTH)
def test_depth_traversal_simple(simple, a, b):
"""Test that depth first traverses a nonlooped graph."""
assert comesbefore(simple.depth_first_traversal('a'), a, b)
@pytest.mark.parametrize('a, b', COMPLEX_DEPTH)
def test_depth_traversal_complex(complex, a, b):
"""Test that depth first traverses a more complex looped graph."""
assert comesbefore(complex.depth_first_traversal('a'), a, b)
@pytest.mark.parametrize('a, b', COMPLEX_DEPTH)
def test_depth_traversal_complex_2(complex_2, a, b):
"""Test that depth first traverses an even more complex graph."""
assert comesbefore(complex_2.depth_first_traversal('a'), a, b)
@pytest.mark.parametrize('a, b', TREE_DEPTH)
def test_depth_traversal_tree(tree, a, b):
"""Test that depth first traverses an even more complex graph."""
assert comesbefore(tree.depth_first_traversal('0-0'), a, b)
# breadth first
def test_breadth_traversal_empty(self_looped):
"""Test that breadth first traversal throws error on an absent node."""
with pytest.raises(KeyError):
self_looped.breadth_first_traversal('b')
def test_breadth_traversal_self_looped(self_looped):
"""Test that breadth first traversal is traversing at all."""
assert self_looped.breadth_first_traversal('a') == ['a']
@pytest.mark.parametrize('a, b', SIMPLE_BREADTH)
def test_breadth_traversal_simple(simple, a, b):
"""Test that breadth first traverses a nonlooped graph."""
assert comesbefore(simple.breadth_first_traversal('a'), a, b)
@pytest.mark.parametrize('a, b', COMPLEX_BREADTH)
def test_breadth_traversal_complex(complex, a, b):
"""Test that breadth first traverses a more complex looped graph."""
assert comesbefore(complex.breadth_first_traversal('a'), a, b)
@pytest.mark.parametrize('a, b', COMPLEX_BREADTH)
def test_breadth_traversal_complex_2(complex_2, a, b):
"""Test that breadth first traverses an even more complex graph."""
assert comesbefore(complex_2.breadth_first_traversal('a'), a, b)
@pytest.mark.parametrize('a, b', TREE_BREADTH)
def test_breadth_traversal_tree(tree, a, b):
"""Test that breadth first traverses an even more complex graph."""
assert comesbefore(tree.breadth_first_traversal('0-0'), a, b)
| mit |
seanaedmiston/gensim | gensim/corpora/lowcorpus.py | 73 | 7185 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <[email protected]>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Corpus in GibbsLda++ format of List-Of-Words.
"""
from __future__ import with_statement
import logging
from gensim import utils
from gensim.corpora import IndexedCorpus
from six import iteritems, iterkeys
from six.moves import xrange, zip as izip
logger = logging.getLogger('gensim.corpora.lowcorpus')
def split_on_space(s):
return [word for word in utils.to_unicode(s).strip().split(' ') if word]
class LowCorpus(IndexedCorpus):
"""
List_Of_Words corpus handles input in GibbsLda++ format.
Quoting http://gibbslda.sourceforge.net/#3.2_Input_Data_Format::
Both data for training/estimating the model and new data (i.e., previously
unseen data) have the same format as follows:
[M]
[document1]
[document2]
...
[documentM]
in which the first line is the total number for documents [M]. Each line
after that is one document. [documenti] is the ith document of the dataset
that consists of a list of Ni words/terms.
[documenti] = [wordi1] [wordi2] ... [wordiNi]
in which all [wordij] (i=1..M, j=1..Ni) are text strings and they are separated
by the blank character.
"""
def __init__(self, fname, id2word=None, line2words=split_on_space):
"""
Initialize the corpus from a file.
`id2word` and `line2words` are optional parameters.
If provided, `id2word` is a dictionary mapping between word_ids (integers)
and words (strings). If not provided, the mapping is constructed from
the documents.
`line2words` is a function which converts lines into tokens. Defaults to
simple splitting on spaces.
"""
IndexedCorpus.__init__(self, fname)
logger.info("loading corpus from %s" % fname)
self.fname = fname # input file, see class doc for format
self.line2words = line2words # how to translate lines into words (simply split on space by default)
self.num_docs = self._calculate_num_docs()
if not id2word:
# build a list of all word types in the corpus (distinct words)
logger.info("extracting vocabulary from the corpus")
all_terms = set()
self.use_wordids = False # return documents as (word, wordCount) 2-tuples
for doc in self:
all_terms.update(word for word, wordCnt in doc)
all_terms = sorted(all_terms) # sort the list of all words; rank in that list = word's integer id
self.id2word = dict(izip(xrange(len(all_terms)), all_terms)) # build a mapping of word id(int) -> word (string)
else:
logger.info("using provided word mapping (%i ids)" % len(id2word))
self.id2word = id2word
self.num_terms = len(self.word2id)
self.use_wordids = True # return documents as (wordIndex, wordCount) 2-tuples
logger.info("loaded corpus with %i documents and %i terms from %s" %
(self.num_docs, self.num_terms, fname))
def _calculate_num_docs(self):
# the first line in input data is the number of documents (integer). throws exception on bad input.
with utils.smart_open(self.fname) as fin:
try:
result = int(next(fin))
except StopIteration:
result = 0
return result
def __len__(self):
return self.num_docs
def line2doc(self, line):
words = self.line2words(line)
if self.use_wordids:
# get all distinct terms in this document, ignore unknown words
uniq_words = set(words).intersection(iterkeys(self.word2id))
# the following creates a unique list of words *in the same order*
# as they were in the input. when iterating over the documents,
# the (word, count) pairs will appear in the same order as they
# were in the input (bar duplicates), which looks better.
# if this was not needed, we might as well have used useWords = set(words)
use_words, marker = [], set()
for word in words:
if (word in uniq_words) and (word not in marker):
use_words.append(word)
marker.add(word)
# construct a list of (wordIndex, wordFrequency) 2-tuples
doc = list(zip(map(self.word2id.get, use_words),
map(words.count, use_words)))
else:
uniq_words = set(words)
# construct a list of (word, wordFrequency) 2-tuples
doc = list(zip(uniq_words, map(words.count, uniq_words)))
# return the document, then forget it and move on to the next one
# note that this way, only one doc is stored in memory at a time, not the whole corpus
return doc
def __iter__(self):
"""
Iterate over the corpus, returning one bag-of-words vector at a time.
"""
with utils.smart_open(self.fname) as fin:
for lineno, line in enumerate(fin):
if lineno > 0: # ignore the first line = number of documents
yield self.line2doc(line)
@staticmethod
def save_corpus(fname, corpus, id2word=None, metadata=False):
"""
Save a corpus in the List-of-words format.
This function is automatically called by `LowCorpus.serialize`; don't
call it directly, call `serialize` instead.
"""
if id2word is None:
logger.info("no word id mapping provided; initializing from corpus")
id2word = utils.dict_from_corpus(corpus)
logger.info("storing corpus in List-Of-Words format into %s" % fname)
truncated = 0
offsets = []
with utils.smart_open(fname, 'wb') as fout:
fout.write(utils.to_utf8('%i\n' % len(corpus)))
for doc in corpus:
words = []
for wordid, value in doc:
if abs(int(value) - value) > 1e-6:
truncated += 1
words.extend([utils.to_unicode(id2word[wordid])] * int(value))
offsets.append(fout.tell())
fout.write(utils.to_utf8('%s\n' % ' '.join(words)))
if truncated:
logger.warning("List-of-words format can only save vectors with "
"integer elements; %i float entries were truncated to integer value" %
truncated)
return offsets
def docbyoffset(self, offset):
"""
Return the document stored at file position `offset`.
"""
with utils.smart_open(self.fname) as f:
f.seek(offset)
return self.line2doc(f.readline())
@property
def id2word(self):
return self._id2word
@id2word.setter
def id2word(self, val):
self._id2word = val
self.word2id = dict((v, k) for k, v in iteritems(val))
# endclass LowCorpus
| gpl-3.0 |
BeATz-UnKNoWN/python-for-android | python3-alpha/python3-src/Lib/abc.py | 52 | 7650 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) according to PEP 3119."""
from _weakrefset import WeakSet
def abstractmethod(funcobj):
"""A decorator indicating abstract methods.
Requires that the metaclass is ABCMeta or derived from it. A
class that has a metaclass derived from ABCMeta cannot be
instantiated unless all of its abstract methods are overridden.
The abstract methods can be called using any of the normal
'super' call mechanisms.
Usage:
class C(metaclass=ABCMeta):
@abstractmethod
def my_abstract_method(self, ...):
...
"""
funcobj.__isabstractmethod__ = True
return funcobj
class abstractclassmethod(classmethod):
"""A decorator indicating abstract classmethods.
Similar to abstractmethod.
Usage:
class C(metaclass=ABCMeta):
@abstractclassmethod
def my_abstract_classmethod(cls, ...):
...
"""
__isabstractmethod__ = True
def __init__(self, callable):
callable.__isabstractmethod__ = True
super().__init__(callable)
class abstractstaticmethod(staticmethod):
"""A decorator indicating abstract staticmethods.
Similar to abstractmethod.
Usage:
class C(metaclass=ABCMeta):
@abstractstaticmethod
def my_abstract_staticmethod(...):
...
"""
__isabstractmethod__ = True
def __init__(self, callable):
callable.__isabstractmethod__ = True
super().__init__(callable)
class abstractproperty(property):
"""A decorator indicating abstract properties.
Requires that the metaclass is ABCMeta or derived from it. A
class that has a metaclass derived from ABCMeta cannot be
instantiated unless all of its abstract properties are overridden.
The abstract properties can be called using any of the normal
'super' call mechanisms.
Usage:
class C(metaclass=ABCMeta):
@abstractproperty
def my_abstract_property(self):
...
This defines a read-only property; you can also define a read-write
abstract property using the 'long' form of property declaration:
class C(metaclass=ABCMeta):
def getx(self): ...
def setx(self, value): ...
x = abstractproperty(getx, setx)
"""
__isabstractmethod__ = True
class ABCMeta(type):
"""Metaclass for defining Abstract Base Classes (ABCs).
Use this metaclass to create an ABC. An ABC can be subclassed
directly, and then acts as a mix-in class. You can also register
unrelated concrete classes (even built-in classes) and unrelated
ABCs as 'virtual subclasses' -- these and their descendants will
be considered subclasses of the registering ABC by the built-in
issubclass() function, but the registering ABC won't show up in
their MRO (Method Resolution Order) nor will method
implementations defined by the registering ABC be callable (not
even via super()).
"""
# A global counter that is incremented each time a class is
# registered as a virtual subclass of anything. It forces the
# negative cache to be cleared before its next use.
_abc_invalidation_counter = 0
def __new__(mcls, name, bases, namespace):
cls = super().__new__(mcls, name, bases, namespace)
# Compute set of abstract method names
abstracts = {name
for name, value in namespace.items()
if getattr(value, "__isabstractmethod__", False)}
for base in bases:
for name in getattr(base, "__abstractmethods__", set()):
value = getattr(cls, name, None)
if getattr(value, "__isabstractmethod__", False):
abstracts.add(name)
cls.__abstractmethods__ = frozenset(abstracts)
# Set up inheritance registry
cls._abc_registry = WeakSet()
cls._abc_cache = WeakSet()
cls._abc_negative_cache = WeakSet()
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
return cls
def register(cls, subclass):
"""Register a virtual subclass of an ABC."""
if not isinstance(subclass, type):
raise TypeError("Can only register classes")
if issubclass(subclass, cls):
return # Already a subclass
# Subtle: test for cycles *after* testing for "already a subclass";
# this means we allow X.register(X) and interpret it as a no-op.
if issubclass(cls, subclass):
# This would create a cycle, which is bad for the algorithm below
raise RuntimeError("Refusing to create an inheritance cycle")
cls._abc_registry.add(subclass)
ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache
def _dump_registry(cls, file=None):
"""Debug helper to print the ABC registry."""
print("Class: %s.%s" % (cls.__module__, cls.__name__), file=file)
print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file)
for name in sorted(cls.__dict__.keys()):
if name.startswith("_abc_"):
value = getattr(cls, name)
print("%s: %r" % (name, value), file=file)
def __instancecheck__(cls, instance):
"""Override for isinstance(instance, cls)."""
# Inline the cache checking
subclass = instance.__class__
if subclass in cls._abc_cache:
return True
subtype = type(instance)
if subtype is subclass:
if (cls._abc_negative_cache_version ==
ABCMeta._abc_invalidation_counter and
subclass in cls._abc_negative_cache):
return False
# Fall back to the subclass check.
return cls.__subclasscheck__(subclass)
return any(cls.__subclasscheck__(c) for c in {subclass, subtype})
def __subclasscheck__(cls, subclass):
"""Override for issubclass(subclass, cls)."""
# Check cache
if subclass in cls._abc_cache:
return True
# Check negative cache; may have to invalidate
if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter:
# Invalidate the negative cache
cls._abc_negative_cache = WeakSet()
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
elif subclass in cls._abc_negative_cache:
return False
# Check the subclass hook
ok = cls.__subclasshook__(subclass)
if ok is not NotImplemented:
assert isinstance(ok, bool)
if ok:
cls._abc_cache.add(subclass)
else:
cls._abc_negative_cache.add(subclass)
return ok
# Check if it's a direct subclass
if cls in getattr(subclass, '__mro__', ()):
cls._abc_cache.add(subclass)
return True
# Check if it's a subclass of a registered class (recursive)
for rcls in cls._abc_registry:
if issubclass(subclass, rcls):
cls._abc_cache.add(subclass)
return True
# Check if it's a subclass of a subclass (recursive)
for scls in cls.__subclasses__():
if issubclass(subclass, scls):
cls._abc_cache.add(subclass)
return True
# No dice; update negative cache
cls._abc_negative_cache.add(subclass)
return False
| apache-2.0 |
shaufi10/odoo | addons/l10n_fr/__init__.py | 424 | 1447 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2008 JAILLET Simon - CrysaLEAD - www.crysalead.fr
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
import l10n_fr
import report
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
phillxnet/rockstor-core | src/rockstor/storageadmin/models/user.py | 2 | 3142 | """
Copyright (c) 2012-2021 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import grp
import chardet
from django.conf import settings
from django.contrib.auth.models import User as DjangoUser
from django.core.validators import validate_email
from django.db import models
from storageadmin.models import Group
from system.users import ifp_get_groupname
class User(models.Model):
user = models.OneToOneField(DjangoUser, null=True, blank=True, related_name="suser")
username = models.CharField(max_length=4096, unique=True, default="")
uid = models.IntegerField(default=settings.START_UID)
gid = models.IntegerField(default=settings.START_UID)
public_key = models.CharField(max_length=4096, null=True, blank=True)
smb_shares = models.ManyToManyField("SambaShare", related_name="admin_users")
shell = models.CharField(max_length=1024, null=True)
homedir = models.CharField(max_length=1024, null=True)
email = models.CharField(
max_length=1024, null=True, blank=True, validators=[validate_email]
)
# 'admin' field represents indicator of Rockstor web admin capability.
admin = models.BooleanField(default=True)
group = models.ForeignKey(Group, null=True, blank=True)
@property
def groupname(self, *args, **kwargs):
if self.group is not None:
return self.group.groupname
if self.gid is not None:
try:
groupname = grp.getgrgid(self.gid).gr_name
charset = chardet.detect(groupname)
groupname = groupname.decode(charset["encoding"])
return groupname
except Exception:
# Failed to fetch user using grp, so let's try with infofipe
return ifp_get_groupname(self.gid)
return None
@property
def managed_user(self, *args, **kwargs):
return getattr(self, "editable", True)
@managed_user.setter
def managed_user(self, val, *args, **kwargs):
self.editable = val
@property
def has_pincard(self, *args, **kwargs):
return getattr(self, "pincard_exist", False)
@has_pincard.setter
def has_pincard(self, val, *args, **kwargs):
self.pincard_exist = val
@property
def pincard_allowed(self, *args, **kwargs):
return getattr(self, "pincard_enabled", "no")
@pincard_allowed.setter
def pincard_allowed(self, val, *args, **kwargs):
self.pincard_enabled = val
class Meta:
app_label = "storageadmin"
| gpl-3.0 |
juniorh/dummyDbGen | postgresql/genDocDb.postgres.testData.py | 1 | 3203 | #!/usr/bin/env python
# How to use
# >python genDocDb.postgres.py -h localhost -P 5432 -u username -p password -d database -t table -n 1000 -r 1
# library:
# pip install psycopg2
import argparse
import psycopg2
import random
import math
import time
import sys
def get_args_parser():
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(
"-h", "--host",
default="localhost",
nargs='?',
type=str,
help="Connect to host.")
parser.add_argument(
"-P", "--port",
default=5432,
nargs='?',
type=int,
help="Port number to use for connection.")
parser.add_argument(
"-u", "--username",
default=None,
nargs='?',
type=str,
help="Username for login to server.")
parser.add_argument(
"-p", "--password",
default=None,
nargs='?',
type=str,
help="Password for login to server.")
parser.add_argument(
"-d", "--database",
default=None,
nargs='?',
type=str,
help="Select database.")
parser.add_argument(
"-t", "--table",
default=None,
nargs='?',
type=str,
help="Select table")
parser.add_argument(
"-r", "--report",
default=0,
nargs='?',
type=int,
help="Print report every r second")
parser.add_argument(
"-i", "--input",
default=None,
nargs='?',
type=str,
help="Store key to file")
parser.add_argument(
"-v", "--verbose",
default=False,
action='store_true',
help="Verbose query")
parser.add_argument(
"--help",
default=False,
action='store_true',
help="Show this help"
)
return parser
scheme = "public"
defaultdb = "postgres"
if __name__ == '__main__':
parser = get_args_parser()
args = parser.parse_args()
conn = None
db = None
t_start = None
r_ok = 0
r_fail = 0
r_multi = 0
f = None
if args.help or not args.database or not args.table or not args.username or not args.password :
parser.print_help()
parser.exit()
sys.exit()
try:
conn = psycopg2.connect(host=args.host,port=int(args.port),user=args.username,password=args.password,database=args.database)
db = conn.cursor()
#print "Connection: "+str(conn.status)
except Exception, err:
print err
sys.exit()
if args.input:
try:
f = open(args.input,"r")
except Exception, err:
print err
sys.exit()
# Generate dummy data
t = time.time()
t_start = t
while True:
line = f.readline()
if not line:
break
keys = line.split(';')
query = "select * from "+args.table+" where name = '"+keys[0]+"';"
# print query
db.execute(query)
res = db.fetchall()
if args.verbose:
print query
print res
if len(res):
r_ok = r_ok+1
if len(res) > 1:
r_multi = r_multi+1
else:
r_fail = r_fail+1
if args.report:
if time.time() - t > args.report:
t = time.time()
print "r_ok:"+str(r_ok)+" r_fail:"+str(r_fail)+" r_multi:"+str(r_multi)+" current_value:"+str(res)
conn.close()
print "Last_value:"+str(res)+"\n"
print "Finish test read from postgres : "+"r_ok:"+str(r_ok)+" r_fail:"+str(r_fail)+" r_multi:"+str(r_multi)+" time:"+str(time.time()-t_start)
| gpl-2.0 |
asimshankar/tensorflow | tensorflow/contrib/graph_editor/tests/edit_test.py | 132 | 3104 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.contrib.graph_editor."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import graph_editor as ge
from tensorflow.contrib.graph_editor.tests import match
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class EditTest(test.TestCase):
"""edit module test.
Generally the tests are in two steps:
- modify an existing graph.
- then make sure it has the expected topology using the graph matcher.
"""
def setUp(self):
self.graph = ops.Graph()
with self.graph.as_default():
self.a = constant_op.constant([1., 1.], shape=[2], name="a")
with ops.name_scope("foo"):
self.b = constant_op.constant([2., 2.], shape=[2], name="b")
self.c = math_ops.add(self.a, self.b, name="c")
self.d = constant_op.constant([3., 3.], shape=[2], name="d")
with ops.name_scope("bar"):
self.e = math_ops.add(self.c, self.d, name="e")
self.f = math_ops.add(self.c, self.d, name="f")
self.g = math_ops.add(self.c, self.a, name="g")
with ops.control_dependencies([self.c.op]):
self.h = math_ops.add(self.f, self.g, name="h")
def test_detach(self):
"""Test for ge.detach."""
sgv = ge.sgv(self.c.op, self.a.op)
control_outputs = ge.ControlOutputs(self.graph)
ge.detach(sgv, control_ios=control_outputs)
# make sure the detached graph is as expected.
self.assertTrue(
match.OpMatcher("^foo/c$").input_ops("a", "geph__b_0")(self.c.op))
def test_connect(self):
"""Test for ge.connect."""
with self.graph.as_default():
x = constant_op.constant([1., 1.], shape=[2], name="x")
y = constant_op.constant([2., 2.], shape=[2], name="y")
z = math_ops.add(x, y, name="z")
sgv = ge.sgv(x.op, y.op, z.op)
ge.connect(sgv, ge.sgv(self.e.op).remap_inputs([0]))
self.assertTrue(
match.OpMatcher("^foo/bar/e$").input_ops("^z$", "foo/d$")(self.e.op))
def test_bypass(self):
"""Test for ge.bypass."""
ge.bypass(ge.sgv(self.f.op).remap_inputs([0]))
self.assertTrue(
match.OpMatcher("^foo/bar/h$").input_ops("^foo/c$", "foo/bar/g$")(
self.h.op))
if __name__ == "__main__":
test.main()
| apache-2.0 |
yatish27/mase | src/stockflow.py | 9 | 2560 | from __future__ import print_function, division
import sys
sys.dont_write_bytecode = True
from ok import *
import random
r = random.random
isa = isinstance
"""
# Compartmental Modeling
## Diapers
q +-----+ r +-----+
---->| C |---->| D |--> s
^ +-----+ +-+---+
| |
+-----------------+
C = stock of clean diapers
D = stock of dirty diapers
q = inflow of clean diapers
r = flow of clean diapers to dirty diapers
s = out-flow of dirty diapers
"""
class o:
"""Emulate Javascript's uber simple objects.
Note my convention: I use "`i`" not "`this`."""
def has(i) : return i.__dict__
def __init__(i,**d) : i.has().update(d)
def __setitem__(i,k,v) : i.has()[k] = v
def __getitem__(i,k) : return i.has()[k]
def __repr__(i) : return 'o'+str(i.has())
def copy(i):
j = o()
for k in i.has(): j[k] = i[k]
return j
def asList(i,keys=[]):
keys = keys or i.keys()
return [i[k] for k in keys]
class Has:
def __init__(i,init,lo=0,hi=100):
i.init,i.lo,i.hi = init,lo,hi
def restrain(i,x):
return max(i.lo,
min(i.hi, x))
def rank(i):
if isa(i,Flow) : return 3
if isa(i,Stock): return 1
if isa(i,Aux) : return 2
def __repr__(i):
return str(dict(what=i.__class__.__name__,
name= i.name,init= i.init,
lo = i.lo, hi = i.hi))
class Flow(Has) : pass
class Stock(Has): pass
class Aux(Has) : pass
F,S,A=Flow,Stock,Aux
class Model:
def about(i):
tmp=i.have()
for k,v in tmp.has().items():
v.name = k
return tmp
def run(i,dt=1,tmax=100):
print(r())
t,u, keep = 0, o(), []
about = i.about()
keys = sorted(about.keys,
key=lambda z:z.rank())
print(keys)
for k,a in about.items():
u[k] = a.init
keep = [["t"] + keys,
[0] + about.asList(u,keys)]
while t < tmax:
v = copy(u)
i.step(dt,t,u,v)
for k in about:
v[k] = about[k].restrain(v[k])
keep += [[dt] + about.asList(u,keys)]
t += dt
return keep
class Diapers(Model):
def have(i):
return o(C = S(20), D = S(0),
q = F(0), r = F(8), s = F(0))
def step(i,dt,t,u,v):
def saturday(x): return int(x) % 7 == 6
v.C += dt*(u.q - u.r)
v.D += dt*(u.r - u.s)
v.q = 70 if saturday(t) else 0
v.s = u.D if saturday(t) else 0
if t == 27: # special case (the day i forget)
v.s = 0
@ok
def _diapers1():
print(Diapers().about()) | unlicense |
tonybaloney/st2 | contrib/linux/sensors/file_watch_sensor.py | 1 | 1965 | import os
from logshipper.tail import Tail
from st2reactor.sensor.base import Sensor
class FileWatchSensor(Sensor):
def __init__(self, sensor_service, config=None):
super(FileWatchSensor, self).__init__(sensor_service=sensor_service,
config=config)
self._trigger_ref = 'linux.file_watch.line'
self._logger = self._sensor_service.get_logger(__name__)
self._file_paths = [] # stores a list of file paths we are monitoring
self._tail = None
def setup(self):
self._tail = Tail(filenames=[])
self._tail.handler = self._handle_line
self._tail.should_run = True
def run(self):
self._tail.run()
def cleanup(self):
if self._tail:
self._tail.should_run = False
try:
self._tail.notifier.stop()
except Exception:
pass
def add_trigger(self, trigger):
file_path = trigger['parameters'].get('file_path', None)
if not file_path:
self._logger.error('Received trigger type without "file_path" field.')
return
self._tail.add_file(filename=file_path)
self._logger.info('Added file "%s"' % (file_path))
def update_trigger(self, trigger):
pass
def remove_trigger(self, trigger):
file_path = trigger['parameters'].get('file_path', None)
if not file_path:
self._logger.error('Received trigger type without "file_path" field.')
return
self._tail.remove_file(filename=file_path)
self._logger.info('Removed file "%s"' % (file_path))
def _handle_line(self, file_path, line):
trigger = self._trigger_ref
payload = {
'file_path': file_path,
'file_name': os.path.basename(file_path),
'line': line
}
self.sensor_service.dispatch(trigger=trigger, payload=payload)
| apache-2.0 |
skumar07/Air-Share-Real | boilerplate/external/babel/messages/tests/data/setup.py | 19 | 1044 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: sw=4 ts=4 fenc=utf-8
# =============================================================================
# $Id: setup.py 114 2007-06-14 21:17:14Z palgarvio $
# =============================================================================
# $URL: http://svn.edgewall.org/repos/babel/tags/0.9.6/babel/messages/test/data/setup.py $
# $LastChangedDate: 2007-06-14 23:17:14 +0200 (do, 14 jun 2007) $
# $Rev: 114 $
# $LastChangedBy: palgarvio $
# =============================================================================
# Copyright (C) 2006 Ufsoft.org - Pedro Algarvio <[email protected]>
#
# Please view LICENSE for additional licensing information.
# =============================================================================
# THIS IS A BOGUS PROJECT
from setuptools import setup, find_packages
setup(
name = 'TestProject',
version = '0.1',
license = 'BSD',
author = 'Foo Bar',
author_email = '[email protected]',
packages = find_packages(),
)
| lgpl-3.0 |
mlc0202/ssdb | deps/cpy/antlr3/tokens.py | 99 | 12016 | """ANTLR3 runtime package"""
# begin[licence]
#
# [The "BSD licence"]
# Copyright (c) 2005-2008 Terence Parr
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# end[licence]
from antlr3.constants import EOF, DEFAULT_CHANNEL, INVALID_TOKEN_TYPE
############################################################################
#
# basic token interface
#
############################################################################
class Token(object):
"""@brief Abstract token baseclass."""
def getText(self):
"""@brief Get the text of the token.
Using setter/getter methods is deprecated. Use o.text instead.
"""
raise NotImplementedError
def setText(self, text):
"""@brief Set the text of the token.
Using setter/getter methods is deprecated. Use o.text instead.
"""
raise NotImplementedError
def getType(self):
"""@brief Get the type of the token.
Using setter/getter methods is deprecated. Use o.type instead."""
raise NotImplementedError
def setType(self, ttype):
"""@brief Get the type of the token.
Using setter/getter methods is deprecated. Use o.type instead."""
raise NotImplementedError
def getLine(self):
"""@brief Get the line number on which this token was matched
Lines are numbered 1..n
Using setter/getter methods is deprecated. Use o.line instead."""
raise NotImplementedError
def setLine(self, line):
"""@brief Set the line number on which this token was matched
Using setter/getter methods is deprecated. Use o.line instead."""
raise NotImplementedError
def getCharPositionInLine(self):
"""@brief Get the column of the tokens first character,
Columns are numbered 0..n-1
Using setter/getter methods is deprecated. Use o.charPositionInLine instead."""
raise NotImplementedError
def setCharPositionInLine(self, pos):
"""@brief Set the column of the tokens first character,
Using setter/getter methods is deprecated. Use o.charPositionInLine instead."""
raise NotImplementedError
def getChannel(self):
"""@brief Get the channel of the token
Using setter/getter methods is deprecated. Use o.channel instead."""
raise NotImplementedError
def setChannel(self, channel):
"""@brief Set the channel of the token
Using setter/getter methods is deprecated. Use o.channel instead."""
raise NotImplementedError
def getTokenIndex(self):
"""@brief Get the index in the input stream.
An index from 0..n-1 of the token object in the input stream.
This must be valid in order to use the ANTLRWorks debugger.
Using setter/getter methods is deprecated. Use o.index instead."""
raise NotImplementedError
def setTokenIndex(self, index):
"""@brief Set the index in the input stream.
Using setter/getter methods is deprecated. Use o.index instead."""
raise NotImplementedError
def getInputStream(self):
"""@brief From what character stream was this token created.
You don't have to implement but it's nice to know where a Token
comes from if you have include files etc... on the input."""
raise NotImplementedError
def setInputStream(self, input):
"""@brief From what character stream was this token created.
You don't have to implement but it's nice to know where a Token
comes from if you have include files etc... on the input."""
raise NotImplementedError
############################################################################
#
# token implementations
#
# Token
# +- CommonToken
# \- ClassicToken
#
############################################################################
class CommonToken(Token):
"""@brief Basic token implementation.
This implementation does not copy the text from the input stream upon
creation, but keeps start/stop pointers into the stream to avoid
unnecessary copy operations.
"""
def __init__(self, type=None, channel=DEFAULT_CHANNEL, text=None,
input=None, start=None, stop=None, oldToken=None):
Token.__init__(self)
if oldToken is not None:
self.type = oldToken.type
self.line = oldToken.line
self.charPositionInLine = oldToken.charPositionInLine
self.channel = oldToken.channel
self.index = oldToken.index
self._text = oldToken._text
if isinstance(oldToken, CommonToken):
self.input = oldToken.input
self.start = oldToken.start
self.stop = oldToken.stop
else:
self.type = type
self.input = input
self.charPositionInLine = -1 # set to invalid position
self.line = 0
self.channel = channel
#What token number is this from 0..n-1 tokens; < 0 implies invalid index
self.index = -1
# We need to be able to change the text once in a while. If
# this is non-null, then getText should return this. Note that
# start/stop are not affected by changing this.
self._text = text
# The char position into the input buffer where this token starts
self.start = start
# The char position into the input buffer where this token stops
# This is the index of the last char, *not* the index after it!
self.stop = stop
def getText(self):
if self._text is not None:
return self._text
if self.input is None:
return None
return self.input.substring(self.start, self.stop)
def setText(self, text):
"""
Override the text for this token. getText() will return this text
rather than pulling from the buffer. Note that this does not mean
that start/stop indexes are not valid. It means that that input
was converted to a new string in the token object.
"""
self._text = text
text = property(getText, setText)
def getType(self):
return self.type
def setType(self, ttype):
self.type = ttype
def getLine(self):
return self.line
def setLine(self, line):
self.line = line
def getCharPositionInLine(self):
return self.charPositionInLine
def setCharPositionInLine(self, pos):
self.charPositionInLine = pos
def getChannel(self):
return self.channel
def setChannel(self, channel):
self.channel = channel
def getTokenIndex(self):
return self.index
def setTokenIndex(self, index):
self.index = index
def getInputStream(self):
return self.input
def setInputStream(self, input):
self.input = input
def __str__(self):
if self.type == EOF:
return "<EOF>"
channelStr = ""
if self.channel > 0:
channelStr = ",channel=" + str(self.channel)
txt = self.text
if txt is not None:
txt = txt.replace("\n","\\\\n")
txt = txt.replace("\r","\\\\r")
txt = txt.replace("\t","\\\\t")
else:
txt = "<no text>"
return "[@%d,%d:%d=%r,<%d>%s,%d:%d]" % (
self.index,
self.start, self.stop,
txt,
self.type, channelStr,
self.line, self.charPositionInLine
)
class ClassicToken(Token):
"""@brief Alternative token implementation.
A Token object like we'd use in ANTLR 2.x; has an actual string created
and associated with this object. These objects are needed for imaginary
tree nodes that have payload objects. We need to create a Token object
that has a string; the tree node will point at this token. CommonToken
has indexes into a char stream and hence cannot be used to introduce
new strings.
"""
def __init__(self, type=None, text=None, channel=DEFAULT_CHANNEL,
oldToken=None
):
Token.__init__(self)
if oldToken is not None:
self.text = oldToken.text
self.type = oldToken.type
self.line = oldToken.line
self.charPositionInLine = oldToken.charPositionInLine
self.channel = oldToken.channel
self.text = text
self.type = type
self.line = None
self.charPositionInLine = None
self.channel = channel
self.index = None
def getText(self):
return self.text
def setText(self, text):
self.text = text
def getType(self):
return self.type
def setType(self, ttype):
self.type = ttype
def getLine(self):
return self.line
def setLine(self, line):
self.line = line
def getCharPositionInLine(self):
return self.charPositionInLine
def setCharPositionInLine(self, pos):
self.charPositionInLine = pos
def getChannel(self):
return self.channel
def setChannel(self, channel):
self.channel = channel
def getTokenIndex(self):
return self.index
def setTokenIndex(self, index):
self.index = index
def getInputStream(self):
return None
def setInputStream(self, input):
pass
def toString(self):
channelStr = ""
if self.channel > 0:
channelStr = ",channel=" + str(self.channel)
txt = self.text
if txt is None:
txt = "<no text>"
return "[@%r,%r,<%r>%s,%r:%r]" % (self.index,
txt,
self.type,
channelStr,
self.line,
self.charPositionInLine
)
__str__ = toString
__repr__ = toString
EOF_TOKEN = CommonToken(type=EOF)
INVALID_TOKEN = CommonToken(type=INVALID_TOKEN_TYPE)
# In an action, a lexer rule can set token to this SKIP_TOKEN and ANTLR
# will avoid creating a token for this symbol and try to fetch another.
SKIP_TOKEN = CommonToken(type=INVALID_TOKEN_TYPE)
| bsd-3-clause |
paradoxxxzero/pyjade | pyjade/parser.py | 1 | 10534 | from __future__ import absolute_import
from .lexer import Lexer
from . import nodes
import six
class Parser(object):
def __init__(self,str,filename=None,**options):
self.input = str
self.lexer = Lexer(str,**options)
self.filename = filename
self.bloks = {}
self.options = options
self.contexts = [self]
self.extending = False
self._spaces = None
def context(self,parser):
if parser: self.context.append(parser)
else: self.contexts.pop()
def advance(self):
return self.lexer.advance()
def skip(self,n):
while n>1: # > 0?
self.advance()
n -= 1
def peek(self):
p = self.lookahead(1)
return p
def line(self):
return self.lexer.lineno
def lookahead(self,n):
return self.lexer.lookahead(n)
def parse(self):
block = nodes.Block()
parser = None
block.line = self.line()
while 'eos' != self.peek().type:
if 'newline' == self.peek().type: self.advance()
else: block.append(self.parseExpr())
parser = self.extending
if parser:
self.context(parser)
ast = parser.parse()
self.context()
return ast
return block
def expect(self,type):
t = self.peek().type
if t == type: return self.advance()
else:
raise Exception('expected "%s" but got "%s" in file %s on line %d' %
(type, t, self.filename, self.line()))
def accept(self,type):
if self.peek().type == type: return self.advance()
def parseExpr(self):
t = self.peek().type
if 'yield' == t:
self.advance()
block = nodes.Block()
block._yield = True
return block
elif t in ('id','class'):
tok = self.advance()
new_div = self.lexer.tok('tag','div')
new_div.inline_level = tok.inline_level
self.lexer.stash.append(new_div)
self.lexer.stash.append(tok)
return self.parseExpr()
funcName = 'parse%s'%t.capitalize()
if hasattr(self,funcName):
return getattr(self,funcName)()
else:
raise Exception('unexpected token "%s" in file %s on line %d' %
(t, self.filename, self.line()))
def parseString(self):
tok = self.expect('string')
node = nodes.String(tok.val, inline=tok.inline_level > 0)
node.line = self.line()
return node
def parseText(self):
tok = self.expect('text')
node = nodes.Text(tok.val)
node.line = self.line()
return node
def parseBlockExpansion(self):
if ':'== self.peek().type:
self.advance()
return nodes.Block(self.parseExpr())
else:
return self.block()
def parseAssignment(self):
tok = self.expect('assignment')
return nodes.Assignment(tok.name,tok.val)
def parseCode(self):
tok = self.expect('code')
if not tok.val and 'indent'==self.peek().type:
node = nodes.BlockCode(
tok.val, self.parseTextBlock(), tok.buffer, tok.escape)
node.line = self.line()
else:
node = nodes.Code(tok.val,tok.buffer,tok.escape) #tok.escape
block,i = None,1
node.line = self.line()
while self.lookahead(i) and 'newline'==self.lookahead(i).type:
i+= 1
block = 'indent' == self.lookahead(i).type
if block:
self.skip(i-1)
node.block = self.block()
return node
def parseComment(self):
tok = self.expect('comment')
if 'indent'==self.peek().type:
node = nodes.BlockComment(tok.val, self.block(), tok.buffer)
else:
node = nodes.Comment(tok.val,tok.buffer)
node.line = self.line()
return node
def parseDoctype(self):
tok = self.expect('doctype')
node = nodes.Doctype(tok.val)
node.line = self.line()
return node
def parseFilter(self):
tok = self.expect('filter')
attrs = self.accept('attrs')
self.lexer.pipeless = True
block = self.parseTextBlock()
self.lexer.pipeless = False
node = nodes.Filter(tok.val, block, attrs and attrs.attrs)
node.line = self.line()
return node
def parseASTFilter(self):
tok = self.expect('tag')
attrs = self.accept('attrs')
self.expect(':')
block = self.block()
node = nodes.Filter(tok.val, block, attrs and attrs.attrs)
node.line = self.line()
return node
def parseEach(self):
tok = self.expect('each')
node = nodes.Each(tok.code, tok.keys)
node.line = self.line()
node.block = self.block()
return node
def parseConditional(self):
tok = self.expect('conditional')
node = nodes.Conditional(tok.val, tok.sentence)
node.line = self.line()
node.block = self.block()
while True:
t = self.peek()
if 'conditional' == t.type and node.can_append(t.val):
node.append(self.parseConditional())
else:
break
return node
def parseExtends(self):
path = self.expect('extends').val.strip('"\'')
return nodes.Extends(path)
def parseCall(self):
tok = self.expect('call')
name = tok.val
args = tok.args
if args is None:
args = ""
block = self.block() if 'indent' == self.peek().type else None
return nodes.Mixin(name,args,block,True)
def parseMixin(self):
tok = self.expect('mixin')
name = tok.val
args = tok.args
if args is None:
args = ""
block = self.block() if 'indent' == self.peek().type else None
return nodes.Mixin(name,args,block,block is None)
def parseBlock(self):
block = self.expect('block')
mode = block.mode
name = block.val.strip()
block = self.block(cls=nodes.CodeBlock) if 'indent'==self.peek().type else nodes.CodeBlock(nodes.Literal(''))
block.mode = mode
block.name = name
return block
def parseInclude(self):
path = self.expect('include').val.strip()
return nodes.Include(path)
def parseTextBlock(self, tag=None):
text = nodes.Text()
text.line = self.line()
if (tag):
text.parent == tag
spaces = self.expect('indent').val
if not self._spaces: self._spaces = spaces
indent = ' '*(spaces-self._spaces)
while 'outdent' != self.peek().type:
t = self.peek().type
if 'newline'==t:
text.append('\n')
self.advance()
elif 'indent'==t:
text.append('\n')
for node in self.parseTextBlock().nodes: text.append(node)
text.append('\n')
else:
text.append(indent+self.advance().val)
if spaces == self._spaces: self._spaces = None
self.expect('outdent')
return text
def block(self,cls=nodes.Block):
block = cls()
block.line = self.line()
self.expect('indent')
while 'outdent' != self.peek().type:
if 'newline'== self.peek().type:
self.advance()
else:
block.append(self.parseExpr())
self.expect('outdent')
return block
def processInline(self, current_tag, current_level):
next_level = current_level + 1
while self.peek().inline_level == next_level:
current_tag.block.append(self.parseExpr())
if self.peek().inline_level > next_level:
self.processInline(current_tag, next_level)
def processTagText(self, tag):
if self.peek().inline_level < tag.inline_level:
return
if not self.lookahead(2).inline_level > tag.inline_level:
tag.text = self.parseText()
return
while self.peek().inline_level == tag.inline_level and self.peek().type == 'string':
tag.block.append(self.parseExpr())
if self.peek().inline_level > tag.inline_level:
self.processInline(tag, tag.inline_level)
def parseTag(self):
i = 2
if 'attrs'==self.lookahead(i).type: i += 1
if ':'==self.lookahead(i).type:
if 'indent' == self.lookahead(i+1).type:
raise Exception('unexpected token "indent" in file %s on line %d' %
(self.filename, self.line()))
tok = self.advance()
tag = nodes.Tag(tok.val)
tag.inline_level = tok.inline_level
dot = None
tag.line = self.line()
while True:
t = self.peek().type
if t in ('id','class'):
tok = self.advance()
tag.setAttribute(tok.type,'"%s"'%tok.val,True)
continue
elif 'attrs'==t:
tok = self.advance()
for n,v in six.iteritems(tok.attrs):
tag.setAttribute(n,v,n in tok.static_attrs)
continue
else:
break
v = self.peek().val
if '.'== v:
dot = tag.textOnly = True
self.advance()
elif '<'== v: # For inline elements
tag.inline = True
self.advance()
t = self.peek().type
if 'code'==t: tag.code = self.parseCode()
elif ':'==t:
self.advance()
tag.block = nodes.Block()
tag.block.append(self.parseExpr())
elif 'string'==t: self.processTagText(tag)
elif 'text'==t: tag.text = self.parseText()
while 'newline' == self.peek().type: self.advance()
if 'indent' == self.peek().type:
if tag.textOnly:
self.lexer.pipeless = True
tag.block = self.parseTextBlock(tag)
self.lexer.pipeless = False
else:
block = self.block()
if tag.block:
for node in block.nodes:
tag.block.append(node)
else:
tag.block = block
return tag
| mit |
highlando/krypy | krypy/recycling/linsys.py | 1 | 5723 | # -*- coding: utf8 -*-
import numpy
from .. import utils, deflation, linsys
from . import factories, evaluators
class _RecyclingSolver(object):
'''Base class for recycling solvers.'''
def __init__(self, DeflatedSolver,
vector_factory=None
):
'''Initialize recycling solver base.
:param DeflatedSolver: a deflated solver from
:py:mod:`~krypy.deflation`.
:param vector_factory: (optional) An instance of a subclass of
:py:class:`krypy.recycling.factories._DeflationVectorFactory`
that constructs deflation vectors for recycling. Defaults to `None`
which means that no recycling is used.
Also the following strings are allowed as shortcuts:
* ``'RitzApproxKrylov'``: uses the approximate Krylov subspace bound
evaluator :py:class:`krypy.recycling.evaluators.RitzApproxKrylov`.
* ``'RitzAprioriCg'``: uses the CG :math:`\kappa`-bound
(:py:class:`krypy.utils.BoundCG`) as an a priori bound with
:py:class:`krypy.recycling.evaluators.RitzApriori`.
* ``'RitzAprioriMinres'``: uses the MINRES bound
(:py:class:`krypy.utils.BoundMinres`) as an a priori bound with
:py:class:`krypy.recycling.evaluators.RitzApriori`.
After a run of the provided ``DeflatedSolver`` via :py:meth:`solve`,
the resulting instance of the ``DeflatedSolver`` is available in the
attribute ``last_solver``.
'''
self._DeflatedSolver = DeflatedSolver
self._vector_factory = vector_factory
self.timings = utils.Timings()
'''Timings from last run of :py:meth:`solve`.
Timings of the vector factory runs and the actual solution processes.
'''
self.last_solver = None
'''``DeflatedSolver`` instance from last run of :py:meth:`solve`.
Instance of ``DeflatedSolver`` that resulted from the last call to
:py:meth:`solve`. Initialized with ``None`` before the first run.'''
def solve(self, linear_system,
vector_factory=None,
*args, **kwargs):
'''Solve the given linear system with recycling.
The provided `vector_factory` determines which vectors are used for
deflation.
:param linear_system: the :py:class:`~krypy.linsys.LinearSystem` that
is about to be solved.
:param vector_factory: (optional) see description in constructor.
All remaining arguments are passed to the ``DeflatedSolver``.
:returns: instance of ``DeflatedSolver`` which was used to obtain the
approximate solution. The approximate solution is available under the
attribute ``xk``.
'''
# replace linear_system with equivalent TimedLinearSystem on demand
if not isinstance(linear_system, linsys.TimedLinearSystem):
linear_system = linsys.ConvertedTimedLinearSystem(linear_system)
with self.timings['vector_factory']:
if vector_factory is None:
vector_factory = self._vector_factory
# construct vector_factory if strings are provided
if vector_factory == 'RitzApproxKrylov':
vector_factory = factories.RitzFactory(
subset_evaluator=evaluators.RitzApproxKrylov()
)
elif vector_factory == 'RitzAprioriCg':
vector_factory = factories.RitzFactory(
subset_evaluator=evaluators.RitzApriori(
Bound=utils.BoundCG
)
)
elif vector_factory == 'RitzAprioriMinres':
vector_factory = factories.RitzFactory(
subset_evaluator=evaluators.RitzApriori(
Bound=utils.BoundMinres
)
)
# get deflation vectors
if self.last_solver is None or vector_factory is None:
U = numpy.zeros((linear_system.N, 0))
else:
U = vector_factory.get(self.last_solver)
with self.timings['solve']:
# solve deflated linear system
self.last_solver = self._DeflatedSolver(linear_system,
U=U,
store_arnoldi=True,
*args, **kwargs)
# return solver instance
return self.last_solver
class RecyclingCg(_RecyclingSolver):
'''Recycling preconditioned CG method.
See :py:class:`~krypy.recycling.linsys._RecyclingSolver` for the
documentation of the available parameters.
'''
def __init__(self, *args, **kwargs):
super(RecyclingCg, self).__init__(deflation.DeflatedCg,
*args, **kwargs)
class RecyclingMinres(_RecyclingSolver):
'''Recycling preconditioned MINRES method.
See :py:class:`~krypy.recycling.linsys._RecyclingSolver` for the
documentation of the available parameters.
'''
def __init__(self, *args, **kwargs):
super(RecyclingMinres, self).__init__(deflation.DeflatedMinres,
*args, **kwargs)
class RecyclingGmres(_RecyclingSolver):
'''Recycling preconditioned GMRES method.
See :py:class:`~krypy.recycling.linsys._RecyclingSolver` for the
documentation of the available parameters.
'''
def __init__(self, *args, **kwargs):
super(RecyclingGmres, self).__init__(deflation.DeflatedGmres,
*args, **kwargs)
| mit |
apagac/cfme_tests | cfme/common/vm.py | 1 | 41764 | # -*- coding: utf-8 -*-
"""Module containing classes with common behaviour for both VMs and Instances of all types."""
import json
from datetime import date
from datetime import datetime
from datetime import timedelta
import attr
from cached_property import cached_property
from riggerlib import recursive_update
from cfme.base.login import BaseLoggedInPage
from cfme.common import CustomButtonEventsMixin
from cfme.common import PolicyProfileAssignable
from cfme.common import Taggable
from cfme.common.vm_console import ConsoleMixin
from cfme.common.vm_views import DriftAnalysis
from cfme.common.vm_views import DriftHistory
from cfme.common.vm_views import VMPropertyDetailView
from cfme.exceptions import CFMEException
from cfme.exceptions import ItemNotFound
from cfme.exceptions import OptionNotAvailable
from cfme.modeling.base import BaseCollection
from cfme.modeling.base import BaseEntity
from cfme.services.requests import RequestsView
from cfme.utils import ParamClassName
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.appliance.implementations.ui import navigator
from cfme.utils.blockers import BZ
from cfme.utils.log import logger
from cfme.utils.net import find_pingable
from cfme.utils.pretty import Pretty
from cfme.utils.rest import assert_response
from cfme.utils.timeutil import parsetime
from cfme.utils.update import Updateable
from cfme.utils.version import LOWEST
from cfme.utils.version import VersionPicker
from cfme.utils.virtual_machines import deploy_template
from cfme.utils.wait import wait_for
def base_types(template=False):
from pkg_resources import iter_entry_points
search = "template" if template else "vm"
return {
ep.name: ep.resolve() for ep in iter_entry_points('manageiq.{}_categories'.format(search))
}
def instance_types(category, template=False):
from pkg_resources import iter_entry_points
search = "template" if template else "vm"
return {
ep.name: ep.resolve() for ep in iter_entry_points(
'manageiq.{}_types.{}'.format(search, category))
}
def all_types(template=False):
all_types = base_types(template)
for category in all_types.keys():
all_types.update(instance_types(category, template))
return all_types
class _TemplateMixin(object):
pass
@attr.s
class BaseVM(
BaseEntity,
Pretty,
Updateable,
PolicyProfileAssignable,
Taggable,
ConsoleMixin,
CustomButtonEventsMixin,
):
"""Base VM and Template class that holds the largest common functionality between VMs,
instances, templates and images.
In order to inherit these, you have to implement the ``on_details`` method.
"""
pretty_attrs = ['name', 'provider', 'template_name']
###
# To be set or implemented
#
ALL_LIST_LOCATION = None
TO_OPEN_EDIT = None # Name of the item in Configuration that puts you in the form
QUADICON_TYPE = "vm"
# Titles of the delete buttons in configuration
REMOVE_SELECTED = 'Remove selected items from Inventory'
REMOVE_SINGLE = 'Remove Virtual Machine from Inventory'
RETIRE_DATE_FMT = parsetime.saved_report_title_format
_param_name = ParamClassName('name')
DETAILS_VIEW_CLASS = None
###
# Shared behaviour
#
PROVISION_CANCEL = 'Add of new VM Provision Request was cancelled by the user'
PROVISION_START = ('VM Provision Request was Submitted, you will be notified when your VMs '
'are ready')
name = attr.ib()
provider = attr.ib()
def __new__(cls, *args, **kwargs):
if cls in [BaseVM, VM, Template]:
raise NotImplementedError('This class cannot be instantiated.')
else:
# magic {waves hands}
return object.__new__(cls)
###
# Properties
#
@property
def is_vm(self):
return not isinstance(self, _TemplateMixin)
@property
def quadicon_type(self):
return self.QUADICON_TYPE
###
# Methods
#
def check_compliance(self, timeout=240):
"""Initiates compliance check and waits for it to finish."""
view = navigate_to(self, "Details")
original_state = self.compliance_status
view.toolbar.policy.item_select("Check Compliance of Last Known Configuration",
handle_alert=True)
view.flash.assert_no_error()
wait_for(
lambda: self.compliance_status != original_state,
num_sec=timeout, delay=5, message="compliance of {} checked".format(self.name)
)
@property
def compliance_status(self):
"""Returns the title of the compliance SummaryTable. The title contains datetime so it can
be compared.
Returns:
:py:class:`NoneType` if no title is present (no compliance checks before), otherwise str
"""
view = navigate_to(self, "Details")
view.toolbar.reload.click()
return view.entities.summary("Compliance").get_text_of("Status")
@property
def compliant(self):
"""Check if the VM is compliant.
Returns:
:py:class:`bool`
"""
text = self.compliance_status.strip().lower()
if text.startswith("non-compliant"):
return False
elif text.startswith("compliant"):
return True
else:
raise ValueError("{} is not a known state for compliance".format(text))
def delete(self, cancel=False, from_details=False):
"""Deletes the VM/Instance from the VMDB.
Args:
cancel: Whether to cancel the action in the alert.
from_details: Whether to use the details view or list view.
"""
if from_details:
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select(self.REMOVE_SINGLE,
handle_alert=not cancel)
else:
view = navigate_to(self.parent, 'All')
self.find_quadicon().check()
view.toolbar.configuration.item_select(self.REMOVE_SELECTED, handle_alert=not cancel)
@property
def ip_address(self):
"""Fetches IP Address of VM
First looks to see if any of the mgmt ips returned by 'all_ips' are pingable
Then defaults to whatever mgmt.ip returns
"""
return find_pingable(self.mgmt)
@property
def all_ip_addresses(self):
"""Fetches all IP Addresses of a VM, pingable or otherwise."""
# TODO: Implement sentaku for this property with ViaMGMT impl
view = navigate_to(self, "Details", use_resetter=False)
try:
return view.entities.summary('Properties').get_text_of("IP Address")
except NameError:
# since some providers have plural 'Addresses'.
return view.entities.summary('Properties').get_text_of("IP Addresses").split(", ")
@property
def mac_address(self):
"""Fetches MAC Address of VM"""
# TODO: We should update this with wrapanapi method when it becomes available.
view = navigate_to(self, "Details", use_resetter=False)
try:
return view.entities.summary('Properties').get_text_of("MAC Address")
except NameError:
# since some providers have plural 'Addresses'.
return view.entities.summary('Properties').get_text_of("MAC Addresses")
@property
def is_retired(self):
"""Check retirement status of vm"""
view = navigate_to(self, "Details", use_resetter=False)
if view.entities.summary('Lifecycle').get_text_of('Retirement Date').lower() != 'never':
try:
retirement_state = VersionPicker({
LOWEST: 'Retirement state',
'5.10': 'Retirement State'
})
status = view.entities.summary('Lifecycle').get_text_of(retirement_state).lower()
return status == 'retired'
except NameError:
return False
else:
return False
def find_quadicon(self, from_any_provider=False, from_archived_all=False,
from_orphaned_all=False, use_search=True):
"""Find and return a quadicon belonging to a specific vm
Args:
from_any_provider: Whether to look for it anywhere (root of the tree). Useful when
looking up archived or orphaned VMs
Returns: entity of appropriate type
Raises: ItemNotFound
"""
# TODO(all): Refactor this method replace it with vm methods like get_state
if from_any_provider:
view = navigate_to(self.parent, 'All')
elif from_archived_all:
view = navigate_to(self.appliance.provider_based_collection(self.provider),
'ArchivedAll')
elif from_orphaned_all:
view = navigate_to(self.appliance.provider_based_collection(self.provider),
'OrphanedAll')
else:
view = navigate_to(self, 'AllForProvider', use_resetter=False)
view.toolbar.view_selector.select('Grid View')
try:
return view.entities.get_entity(name=self.name, surf_pages=True, use_search=use_search)
except ItemNotFound:
raise ItemNotFound("VM '{}' not found in UI!".format(self.name))
def open_console(self, console='VM Console', invokes_alert=None):
"""
Initiates the opening of one of the console types supported by the Access
button. Presently we only support VM Console, which is the HTML5 Console.
In case of VMware provider it could be VMRC, VNC/HTML5, WebMKS, but we only
support VNC/HTML5.
Possible values for 'console' could be 'VM Console' and 'Web Console', but Web
Console is not supported as well.
Args:
console: one of the supported console types given by the Access button.
invokes_alert: If the particular console will invoke a CFME popup/alert
setting this to true will handle this.
"""
# TODO: implement vmrc vm console
if console not in ['VM Console']:
raise NotImplementedError('Not supported console type: {}'.format(console))
view = navigate_to(self, 'Details')
# Click console button given by type
view.toolbar.access.item_select(console, handle_alert=invokes_alert)
self.vm_console
def open_details(self, properties=None):
"""Clicks on details infoblock"""
view = navigate_to(self, 'Details')
view.entities.summary(properties[0]).click_at(properties[1])
return self.create_view(VMPropertyDetailView)
@property
def last_analysed(self):
"""Returns the contents of the ``Last Analysed`` field in summary"""
view = navigate_to(self, "Details")
view.toolbar.reload.click()
return view.entities.summary("Lifecycle").get_text_of("Last Analyzed").strip()
def load_details(self, refresh=False, from_any_provider=False):
"""Navigates to an VM's details page.
Args:
refresh: Refreshes the VM page if already there
from_any_provider: Archived/Orphaned VMs need this
"""
if from_any_provider:
view = navigate_to(self, 'AnyProviderDetails', use_resetter=False)
else:
view = navigate_to(self, 'Details', use_resetter=False)
if refresh:
view.toolbar.reload.click()
view.wait_displayed()
return view
def open_edit(self):
"""Loads up the edit page of the object."""
return navigate_to(self, 'Edit')
def open_timelines(self):
"""Navigates to an VM's timeline page.
Returns:
:py:class:`TimelinesView` object
"""
return navigate_to(self, 'Timelines')
def rediscover(self):
"""Deletes the VM from the provider and lets it discover again"""
self.delete(from_details=True)
self.wait_for_delete()
self.provider.refresh_provider_relationships()
self.wait_to_appear()
def rediscover_if_analysis_data_present(self):
"""Rediscovers the object if it has some analysis data present.
Returns:
Boolean if the rediscovery happened.
"""
if self.last_analysed.lower() != 'never':
self.rediscover()
return True
return False
def refresh_relationships(self, from_details=False, cancel=False, from_any_provider=False):
"""Executes a refresh of relationships.
Args:
from_details: Whether or not to perform action from instance details page
cancel: Whether or not to cancel the refresh relationships action
"""
if from_details:
view = navigate_to(self, 'Details', use_resetter=False)
else:
view = navigate_to(self.parent, 'All')
self.find_quadicon(from_any_provider=from_any_provider).check()
view.toolbar.configuration.item_select("Refresh Relationships and Power States",
handle_alert=not cancel)
@property
def retirement_date(self):
"""Returns the retirement date of the selected machine, or 'Never'
Returns:
:py:class:`str` object
"""
view = navigate_to(self, "Details")
return view.entities.summary("Lifecycle").get_text_of("Retirement Date").strip()
def smartstate_scan(self, cancel=False, from_details=False, wait_for_task_result=False):
"""Initiates fleecing from the UI.
Args:
cancel: Whether or not to cancel the refresh relationships action
from_details: Whether or not to perform action from instance details page
"""
if from_details:
view = navigate_to(self, 'Details', use_resetter=False)
else:
view = navigate_to(self.parent, 'All')
self.find_quadicon().check()
view.toolbar.configuration.item_select('Perform SmartState Analysis',
handle_alert=not cancel)
if wait_for_task_result:
task = self.appliance.collections.tasks.instantiate(
name='Scan from Vm {}'.format(self.name), tab='AllTasks')
task.wait_for_finished()
return task
def wait_to_disappear(self, timeout=600):
"""Wait for a VM to disappear within CFME
Args:
timeout: time (in seconds) to wait for it to appear
"""
wait_for(
lambda: self.exists,
num_sec=timeout, delay=5, fail_func=self.browser.refresh, fail_condition=True,
message="wait for vm to not exist")
wait_for_delete = wait_to_disappear # An alias for more fitting verbosity
def wait_to_appear(self, timeout=600, load_details=True):
"""Wait for a VM to appear within CFME
Args:
timeout: time (in seconds) to wait for it to appear
load_details: when found, should it load the vm details
"""
def _refresh():
self.provider.refresh_provider_relationships()
self.appliance.browser.widgetastic.browser.refresh() # strange because ViaUI
wait_for(
lambda: self.exists,
num_sec=timeout, delay=5, fail_func=_refresh,
message="wait for vm to appear")
if load_details:
navigate_to(self, "Details", use_resetter=False)
def set_ownership(self, user=None, group=None, click_cancel=False, click_reset=False):
"""Set instance ownership
Args:
user (User): user object for ownership
group (Group): group object for ownership
click_cancel (bool): Whether to cancel form submission
click_reset (bool): Whether to reset form after filling
"""
view = navigate_to(self, 'SetOwnership', wait_for_view=0)
fill_result = view.form.fill({
'user_name': user.name if user else None,
'group_name': group.description if group else group})
if not fill_result:
view.form.cancel_button.click()
view = self.create_view(navigator.get_class(self, 'Details').VIEW)
view.flash.assert_success_message('Set Ownership was cancelled by the user')
return
# Only if the form changed
if click_reset:
view.form.reset_button.click()
view.flash.assert_message('All changes have been reset', 'warning')
# Cancel after reset
assert view.form.is_displayed
view.form.cancel_button.click()
elif click_cancel:
view.form.cancel_button.click()
view.flash.assert_success_message('Set Ownership was cancelled by the user')
else:
# save the form
view.form.save_button.click()
view = self.create_view(navigator.get_class(self, 'Details').VIEW)
view.flash.assert_success_message('Ownership saved for selected {}'
.format(self.VM_TYPE))
def unset_ownership(self):
"""Remove user ownership and return group to EvmGroup-Administrator"""
view = navigate_to(self, 'SetOwnership', wait_for_view=0)
fill_result = view.form.fill({
'user_name': '<No Owner>', 'group_name': 'EvmGroup-administrator'
})
if fill_result:
view.form.save_button.click()
msg = 'Ownership saved for selected {}'.format(self.VM_TYPE)
else:
view.form.cancel_button.click()
logger.warning('No change during unset_ownership')
msg = 'Set Ownership was cancelled by the user'
view = self.create_view(navigator.get_class(self, 'Details').VIEW)
view.flash.assert_success_message(msg)
def rename(self, new_vm_name, cancel=False, reset=False):
"""Rename the VM
Args:
new_vm_name: object for renaming vm
cancel (bool): Whether to cancel form submission
reset (bool): Whether to reset form after filling
"""
view = navigate_to(self, 'Rename')
changed = view.vm_name.fill(new_vm_name)
if changed:
if reset:
view.reset_button.click()
view.flash.assert_no_error()
view.cancel_button.click()
else:
# save the form
view.save_button.click()
view.flash.assert_no_error()
self.name = new_vm_name
return self
if cancel:
view.cancel_button.click()
view.flash.assert_no_error()
@attr.s
class BaseVMCollection(BaseCollection):
ENTITY = BaseVM
def instantiate(self, name, provider, template_name=None):
"""Factory class method that determines the correct subclass for given provider.
For reference how does that work, refer to the entrypoints in the setup.py
Args:
name: Name of the VM/Instance as it appears in the UI
provider: The provider object (not the string!)
template_name: Source template name. Useful when the VM/Instance does not exist and you
want to create it.
"""
# When this collection is filtered and used for instantiation, the ENTITY attribute
# points to BaseVM instead of a specific VM type ENTITY class
# For this reason we don't use self.ENTITY, but instead lookup the entity class
# through the provider's attributes
if isinstance(self, TemplateCollection):
# This is a Template derived class, not a VM
return provider.template_class.from_collection(self, name, provider)
else:
return provider.vm_class.from_collection(self, name, provider, template_name)
def create(self, vm_name, provider, form_values=None, cancel=False, check_existing=False,
find_in_cfme=False, wait=True, request_description=None, auto_approve=False,
override=False):
"""Provisions an vm/instance with the given properties through CFME
Args:
vm_name: the vm/instance's name
provider: provider object
form_values: dictionary of form values for provisioning, structured into tabs
cancel: boolean, whether or not to cancel form filling
check_existing: verify if such vm_name exists
find_in_cfme: verify that vm was created and appeared in CFME
wait: wait for vm provision request end
request_description: request description that test needs to search in request table.
auto_approve: if true the request is approved before waiting for completion.
override: To override any failure related exception
Note:
Calling create on a sub-class of instance will generate the properly formatted
dictionary when the correct fields are supplied.
"""
vm = self.instantiate(vm_name, provider)
if check_existing and vm.exists:
return vm
if not provider.is_refreshed():
provider.refresh_provider_relationships()
wait_for(provider.is_refreshed, func_kwargs={'refresh_delta': 10}, timeout=600)
if not form_values:
form_values = vm.vm_default_args
else:
inst_args = vm.vm_default_args
form_values = recursive_update(inst_args, form_values)
env = form_values.get('environment') or {}
if env.get('automatic_placement'):
form_values['environment'] = {'automatic_placement': True}
form_values.update({'provider_name': provider.name})
if not form_values.get('template_name'):
template_name = (provider.data.get('provisioning').get('image', {}).get('name') or
provider.data.get('provisioning').get('template'))
vm.template_name = template_name
form_values.update({'template_name': template_name})
view = navigate_to(self, 'Provision')
view.form.fill(form_values)
if cancel:
view.form.cancel_button.click()
view = self.browser.create_view(BaseLoggedInPage)
view.flash.assert_success_message(self.ENTITY.PROVISION_CANCEL)
view.flash.assert_no_error()
else:
view.form.submit_button.click()
view = vm.appliance.browser.create_view(RequestsView)
if not BZ(1608967, forced_streams=['5.10']).blocks:
wait_for(lambda: view.flash.messages, fail_condition=[], timeout=10, delay=2,
message='wait for Flash Success')
# This flash message is not flashed in 5.10.
if self.appliance.version < 5.10:
wait_for(lambda: view.flash.messages, fail_condition=[], timeout=10, delay=2,
message='wait for Flash Success')
view.flash.assert_no_error()
if wait:
if request_description is None:
request_description = 'Provision from [{}] to [{}]'.format(
form_values.get('template_name'), vm.name)
provision_request = vm.appliance.collections.requests.instantiate(
request_description)
logger.info('Waiting for cfme provision request for vm %s', vm.name)
if auto_approve:
provision_request.approve_request(method='ui', reason="Approved")
provision_request.wait_for_request(method='ui', num_sec=1200)
if provision_request.is_succeeded(method='ui'):
logger.info('Waiting for vm %s to appear on provider %s', vm.name,
provider.key)
wait_for(provider.mgmt.does_vm_exist, [vm.name],
handle_exception=True, num_sec=600)
elif override:
logger.info('Overriding exception to check failure condition.')
else:
raise Exception(
"Provisioning vm {} failed with: {}"
.format(vm.name, provision_request.row.last_message.text)
)
if find_in_cfme:
vm.wait_to_appear(timeout=800)
return vm
def create_rest(self, vm_name, provider, form_values=None, check_existing=False):
"""Provisions a VM/Instance with the default self.vm_default_args_rest.
self.vm_default_args_rest may be overridden by form_values.
For more details about rest attributes please check:
https://access.redhat.com/documentation/en-us/red_hat_cloudforms/4.6/html-single/
red_hat_cloudforms_rest_api/index#provision-request-supported-attributes or
http://manageiq.org/docs/reference/fine/api/appendices/provision_attributes
NOTE: placement_auto defaults to True for requests made from the API or CloudForms Automate.
Args:
vm_name: vm name
provider: provider object
form_values: overrides default provision arguments or extends it.
check_existing: cancel creation if VM exists
Return: Instance object
"""
vm = self.instantiate(vm_name, provider)
if check_existing and vm.exists:
return vm
else:
if not provider.is_refreshed():
provider.refresh_provider_relationships()
wait_for(provider.is_refreshed, func_kwargs={'refresh_delta': 10}, timeout=600)
if not form_values:
form_values = vm.vm_default_args_rest
else:
inst_args = vm.vm_default_args_rest
form_values = recursive_update(inst_args, form_values)
response = self.appliance.rest_api.collections.provision_requests.action.create(
**form_values)[0]
assert_response(self.appliance)
provision_request = vm.appliance.collections.requests.instantiate(
description=response.description)
provision_request.wait_for_request(num_sec=900)
if provision_request.is_succeeded():
wait_for(lambda: provider.mgmt.does_vm_exist(vm.name), num_sec=1000, delay=5,
message="VM {} becomes visible".format(vm.name))
else:
logger.error("Provisioning failed with the message {}".
format(provision_request.rest.message))
raise CFMEException(provision_request.rest.message)
return vm
@attr.s
class VM(BaseVM):
template_name = attr.ib(default=None)
TO_RETIRE = None
# May be overriden by implementors of BaseVM
STATE_ON = "on"
STATE_OFF = "off"
STATE_PAUSED = "paused"
STATE_SUSPENDED = "suspended"
@cached_property
def mgmt(self):
"""
Returns the wrapanapi VM entity object to manipulate this VM directly via the provider API
"""
return self.provider.mgmt.get_vm(self.name)
@property
def exists_on_provider(self):
return self.provider.mgmt.does_vm_exist(self.name)
def retire(self):
view = navigate_to(self, 'Details', use_resetter=False)
view.toolbar.reload.click()
view.toolbar.lifecycle.item_select(self.TO_RETIRE, handle_alert=True)
view.flash.assert_no_error()
def power_control_from_cfme(self, option, cancel=True, from_details=False):
"""Power controls a VM from within CFME
Args:
option: corresponds to option values under the power button
cancel: Whether or not to cancel the power operation on confirmation
from_details: Whether or not to perform action from instance details page
Raises:
OptionNotAvailable: option param is not visible or enabled
"""
if from_details:
view = navigate_to(self, 'Details', use_resetter=False)
else:
view = navigate_to(self.parent, 'All')
if self.is_pwr_option_available_in_cfme(option=option, from_details=from_details):
view.toolbar.power.item_select(option, handle_alert=not cancel)
logger.info(
"Power control action of VM/instance %s, option %s, cancel %s executed",
self.name, option, str(cancel))
else:
raise OptionNotAvailable(option + " is not visible or enabled")
def wait_candu_data_available(self, timeout=600):
"""Waits until C&U data are available for this VM/Instance
Args:
timeout: Timeout passed to :py:func:`utils.wait.wait_for`
"""
view = navigate_to(self, 'Details', use_resetter=False)
view.toolbar.reload.click()
wait_for(
lambda: view.toolbar.monitoring.item_enabled("Utilization"),
delay=10, handle_exception=True, num_sec=timeout,
fail_func=view.toolbar.reload.click)
def capture_historical_data(self, interval="hourly", back="6.days"):
"""Capture historical utilization data for this VM/Instance
Args:
interval: Data interval (hourly/ daily)
back: back time interval from which you want data
"""
ret = self.appliance.ssh_client.run_rails_command(
"'vm = Vm.where(:ems_id => {prov_id}).where(:name => {vm_name})[0];\
vm.perf_capture({interval}, {back}.ago.utc, Time.now.utc)'".format(
prov_id=self.provider.id,
vm_name=json.dumps(self.name),
interval=json.dumps(interval),
back=back,
)
)
return ret.success
def wait_for_vm_state_change(self, desired_state=None, timeout=300, from_details=False,
with_relationship_refresh=True, from_any_provider=False):
"""Wait for VM to come to desired state in the UI.
This function waits just the needed amount of time thanks to wait_for.
Args:
desired_state: on, off, suspended... for available states, see
:py:class:`EC2Instance` and :py:class:`OpenStackInstance`
timeout: Specify amount of time (in seconds) to wait
from_any_provider: Archived/Orphaned vms need this
Raises:
TimedOutError:
When instance does not come up to desired state in specified period of time.
ItemNotFound:
When unable to find the instance passed
"""
def _looking_for_state_change():
if from_details:
view = navigate_to(self, "Details", use_resetter=False)
view.toolbar.reload.click()
current_state = view.entities.summary("Power Management").get_text_of("Power State")
return current_state == desired_state
else:
return self.find_quadicon(
from_any_provider=from_any_provider).data['state'] == desired_state
return wait_for(
_looking_for_state_change,
num_sec=timeout,
delay=30,
fail_func=lambda: self.refresh_relationships(from_details=from_details,
from_any_provider=from_any_provider) if
with_relationship_refresh else None)
def is_pwr_option_available_in_cfme(self, option, from_details=False):
"""Checks to see if a power option is available on the VM
Args:
option: corresponds to option values under the power button,
see :py:class:`EC2Instance` and :py:class:`OpenStackInstance`
from_details: Whether or not to perform action from instance details page
"""
if from_details:
view = navigate_to(self, 'Details', use_resetter=False)
view.toolbar.reload.click()
else:
view = navigate_to(self.parent, "All")
entity = self.find_quadicon()
entity.check()
if view.toolbar.power.has_item(option):
return view.toolbar.power.item_enabled(option)
else:
return False
def create_on_provider(self, timeout=900, find_in_cfme=False, delete_on_failure=True, **kwargs):
"""Create the VM on the provider via MgmtSystem. `deploy_template` handles errors during
VM provision on MgmtSystem sideNS deletes VM if provisioned incorrectly
Args:
timeout: Number of seconds to wait for the VM to appear in CFME
Will not wait at all, if set to 0 (Defaults to ``900``)
find_in_cfme: Verifies that VM exists in CFME UI
delete_on_failure: Attempts to remove VM on UI navigation failure
"""
vm = deploy_template(self.provider.key, self.name, self.template_name, **kwargs)
try:
if find_in_cfme:
self.wait_to_appear(timeout=timeout, load_details=False)
except Exception:
logger.warning("Couldn't find VM or Instance '%s' in CFME", self.name)
if delete_on_failure:
logger.info("Removing VM or Instance from mgmt system")
self.cleanup_on_provider()
raise
return vm
def cleanup_on_provider(self):
"""Clean up entity on the provider if it has been created on the provider
Helper method to avoid NotFoundError's during test case tear down.
"""
if self.exists_on_provider:
self.mgmt.cleanup()
else:
logger.debug('cleanup_on_provider: entity "%s" does not exist', self.name)
def set_retirement_date(self, when=None, offset=None, warn=None):
"""Overriding common method to use widgetastic views/widgets properly
Args:
when: :py:class:`datetime.datetime` object, when to retire (date in future)
offset: :py:class:`dict` with months, weeks, days, hours keys. other keys ignored
warn: When to warn, fills the select in the form in case the ``when`` is specified.
Note: this should be moved up to the common VM class when infra+cloud+common are all WT
If when and offset are both None, this removes retirement date
Examples:
# To set a specific retirement date 2 days from today
two_days_later = datetime.date.today() + datetime.timedelta(days=2)
vm.set_retirement_date(when=two_days_later)
# To set a retirement offset 2 weeks from now
vm.set_retirement_date(offset={weeks=2})
Offset is dict to remove ambiguity between timedelta/datetime and months/weeks/days/hours
timedelta supports creation with weeks, but not months
timedelta supports days attr, but not weeks or months
timedelta days attr will report a total summary, not the component that was passed to it
For these reasons timedelta isn't appropriate for offset
An enhancement to cfme.utils.timeutil extending timedelta would be great for making this a
bit cleaner
"""
view = navigate_to(self, 'SetRetirement')
fill_date = None
fill_offset = None
# explicit is/not None use here because of empty strings and dicts
if when is not None and offset is not None:
raise ValueError('set_retirement_date takes when or offset, but not both')
if when is not None and not isinstance(when, (datetime, date)):
raise ValueError('when argument must be a datetime object')
# due to major differences between the forms and their interaction, I'm splitting this
# method into two major blocks, one for each version. As a result some patterns will be
# repeated in both blocks
# This will allow for making changes to one version or the other without strange
# interaction in the logic
# format the date
# needs 4 digit year for fill
# displayed 2 digit year for flash message
# 59z/G-release retirement
changed = False # just in case it isn't set in logic
if when is not None and offset is None:
# Specific datetime retire, H+M are 00:00 by default if just date passed
fill_date = when.strftime('%m/%d/%Y %H:%M') # 4 digit year
msg_date = when.strftime('%m/%d/%y %H:%M UTC') # two digit year and timestamp
msg = 'Retirement date set to {}'.format(msg_date)
elif when is None and offset is None:
# clearing retirement date with space in textinput,
# using space here as with empty string calendar input is not cleared correctly
fill_date = ' '
msg = 'Retirement date removed'
elif offset is not None:
# retirement by offset
fill_date = None
fill_offset = {k: v
for k, v in offset.items()
if k in ['months', 'weeks', 'days', 'hours']}
# hack together an offset
# timedelta can take weeks, but not months
# copy and pop, only used to generate message, not used for form fill
offset_copy = fill_offset.copy()
if 'months' in offset_copy:
new_weeks = offset_copy.get('weeks', 0) + int(offset_copy.pop('months', 0)) * 4
offset_copy.update({'weeks': new_weeks})
msg_date = datetime.utcnow() + timedelta(**offset_copy)
msg = 'Retirement date set to {}'.format(msg_date.strftime('%m/%d/%y %H:%M UTC'))
# TODO move into before_fill when no need to click away from datetime picker
view.form.fill({
'retirement_mode':
'Time Delay from Now' if fill_offset else 'Specific Date and Time'})
view.flush_widget_cache() # since retirement_date is conditional widget
if fill_date is not None: # specific check because of empty string
# two part fill, widget seems to block warn selection when open
changed_date = view.form.fill({
'retirement_date': {'datetime_select': fill_date}})
view.title.click() # close datetime widget
changed_warn = view.form.fill({'retirement_warning': warn})
changed = changed_date or changed_warn
elif fill_offset:
changed = view.form.fill({
'retirement_date': fill_offset, 'retirement_warning': warn})
# Form save and flash messages are the same between versions
if changed:
view.form.save.click()
else:
logger.info('No form changes for setting retirement, clicking cancel')
view.form.cancel.click()
msg = 'Set/remove retirement date was cancelled by the user'
if self.DETAILS_VIEW_CLASS is not None:
view = self.create_view(self.DETAILS_VIEW_CLASS, wait='5s')
view.flash.assert_success_message(msg)
def equal_drift_results(self, drift_section, section, *indexes):
"""Compares drift analysis results of a row specified by it's title text.
Args:
drift_section (str): Title text of the row to compare
section (str): Accordion section where the change happened
indexes: Indexes of results to compare starting with 1 for first row (latest result).
Compares all available drifts, if left empty (default)
Note:
There have to be at least 2 drift results available for this to work.
Returns:
:py:class:`bool`
"""
def _select_rows(indexes):
for i in indexes:
drift_history_view.history_table[i][0].click()
# mark by indexes or mark all
details_view = navigate_to(self, "Details")
details_view.entities.summary("Relationships").click_at("Drift History")
drift_history_view = self.create_view(DriftHistory, wait='10s')
if indexes:
_select_rows(indexes)
else:
# We can't compare more than 10 drift results at once
# so when selecting all, we have to limit it to the latest 10
rows_number = len(list(drift_history_view.history_table.rows()))
if rows_number > 10:
_select_rows(list(range(10)))
else:
_select_rows(list(range(rows_number)))
drift_history_view.analyze_button.click()
drift_analysis_view = self.create_view(DriftAnalysis, wait='10s')
drift_analysis_view.drift_sections.check_node(section)
drift_analysis_view.apply_button.click()
if not drift_analysis_view.toolbar.all_attributes.active:
drift_analysis_view.toolbar.all_attributes.click()
return drift_analysis_view.drift_analysis.is_changed(drift_section)
@attr.s
class VMCollection(BaseVMCollection):
ENTITY = VM
@attr.s
class Template(BaseVM, _TemplateMixin):
"""A base class for all templates.
"""
@cached_property
def mgmt(self):
"""Holds wrapanapi template entity object for this template."""
return self.provider.mgmt.get_template(self.name)
@property
def exists_on_provider(self):
return self.provider.mgmt.does_template_exist(self.name)
@attr.s
class TemplateCollection(BaseVMCollection):
ENTITY = Template
| gpl-2.0 |
petewarden/tensorflow | tensorflow/python/autograph/pyct/ast_util_test.py | 8 | 8400 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ast_util module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ast
import collections
import textwrap
import gast
from tensorflow.python.autograph.pyct import anno
from tensorflow.python.autograph.pyct import ast_util
from tensorflow.python.autograph.pyct import loader
from tensorflow.python.autograph.pyct import parser
from tensorflow.python.autograph.pyct import qual_names
from tensorflow.python.platform import test
class AstUtilTest(test.TestCase):
def setUp(self):
super(AstUtilTest, self).setUp()
self._invocation_counts = collections.defaultdict(lambda: 0)
def test_rename_symbols_basic(self):
node = parser.parse('a + b')
node = qual_names.resolve(node)
node = ast_util.rename_symbols(
node, {qual_names.QN('a'): qual_names.QN('renamed_a')})
self.assertIsInstance(node.value.left.id, str)
source = parser.unparse(node, include_encoding_marker=False)
self.assertEqual(source.strip(), '(renamed_a + b)')
def test_rename_symbols_attributes(self):
node = parser.parse('b.c = b.c.d')
node = qual_names.resolve(node)
node = ast_util.rename_symbols(
node, {qual_names.from_str('b.c'): qual_names.QN('renamed_b_c')})
source = parser.unparse(node, include_encoding_marker=False)
self.assertEqual(source.strip(), 'renamed_b_c = renamed_b_c.d')
def test_rename_symbols_nonlocal(self):
node = parser.parse('nonlocal a, b, c')
node = qual_names.resolve(node)
node = ast_util.rename_symbols(
node, {qual_names.from_str('b'): qual_names.QN('renamed_b')})
source = parser.unparse(node, include_encoding_marker=False)
self.assertEqual(source.strip(), 'nonlocal a, renamed_b, c')
def test_rename_symbols_global(self):
node = parser.parse('global a, b, c')
node = qual_names.resolve(node)
node = ast_util.rename_symbols(
node, {qual_names.from_str('b'): qual_names.QN('renamed_b')})
source = parser.unparse(node, include_encoding_marker=False)
self.assertEqual(source.strip(), 'global a, renamed_b, c')
def test_rename_symbols_annotations(self):
node = parser.parse('a[i]')
node = qual_names.resolve(node)
anno.setanno(node, 'foo', 'bar')
orig_anno = anno.getanno(node, 'foo')
node = ast_util.rename_symbols(node,
{qual_names.QN('a'): qual_names.QN('b')})
self.assertIs(anno.getanno(node, 'foo'), orig_anno)
def test_rename_symbols_function(self):
node = parser.parse('def f():\n pass')
node = ast_util.rename_symbols(node,
{qual_names.QN('f'): qual_names.QN('f1')})
source = parser.unparse(node, include_encoding_marker=False)
self.assertEqual(source.strip(), 'def f1():\n pass')
def test_copy_clean(self):
node = parser.parse(
textwrap.dedent("""
def f(a):
return a + 1
"""))
setattr(node, '__foo', 'bar')
new_node = ast_util.copy_clean(node)
self.assertIsNot(new_node, node)
self.assertFalse(hasattr(new_node, '__foo'))
def test_copy_clean_preserves_annotations(self):
node = parser.parse(
textwrap.dedent("""
def f(a):
return a + 1
"""))
anno.setanno(node, 'foo', 'bar')
anno.setanno(node, 'baz', 1)
new_node = ast_util.copy_clean(node, preserve_annos={'foo'})
self.assertEqual(anno.getanno(new_node, 'foo'), 'bar')
self.assertFalse(anno.hasanno(new_node, 'baz'))
def test_keywords_to_dict(self):
keywords = parser.parse_expression('f(a=b, c=1, d=\'e\')').keywords
d = ast_util.keywords_to_dict(keywords)
# Make sure we generate a usable dict node by attaching it to a variable and
# compiling everything.
node = parser.parse('def f(b): pass')
node.body.append(ast.Return(d))
result, _, _ = loader.load_ast(node)
self.assertDictEqual(result.f(3), {'a': 3, 'c': 1, 'd': 'e'})
def assertMatch(self, target_str, pattern_str):
node = parser.parse_expression(target_str)
pattern = parser.parse_expression(pattern_str)
self.assertTrue(ast_util.matches(node, pattern))
def assertNoMatch(self, target_str, pattern_str):
node = parser.parse_expression(target_str)
pattern = parser.parse_expression(pattern_str)
self.assertFalse(ast_util.matches(node, pattern))
def test_matches_symbols(self):
self.assertMatch('foo', '_')
self.assertNoMatch('foo()', '_')
self.assertMatch('foo + bar', 'foo + _')
self.assertNoMatch('bar + bar', 'foo + _')
self.assertNoMatch('foo - bar', 'foo + _')
def test_matches_function_args(self):
self.assertMatch('super(Foo, self).__init__(arg1, arg2)',
'super(_).__init__(_)')
self.assertMatch('super().__init__()', 'super(_).__init__(_)')
self.assertNoMatch('super(Foo, self).bar(arg1, arg2)',
'super(_).__init__(_)')
self.assertMatch('super(Foo, self).__init__()', 'super(Foo, _).__init__(_)')
self.assertNoMatch('super(Foo, self).__init__()',
'super(Bar, _).__init__(_)')
def _mock_apply_fn(self, target, source):
target = parser.unparse(target, include_encoding_marker=False)
source = parser.unparse(source, include_encoding_marker=False)
self._invocation_counts[(target.strip(), source.strip())] += 1
def test_apply_to_single_assignments_dynamic_unpack(self):
node = parser.parse('a, b, c = d')
ast_util.apply_to_single_assignments(node.targets, node.value,
self._mock_apply_fn)
self.assertDictEqual(self._invocation_counts, {
('a', 'd[0]'): 1,
('b', 'd[1]'): 1,
('c', 'd[2]'): 1,
})
def test_apply_to_single_assignments_static_unpack(self):
node = parser.parse('a, b, c = d, e, f')
ast_util.apply_to_single_assignments(node.targets, node.value,
self._mock_apply_fn)
self.assertDictEqual(self._invocation_counts, {
('a', 'd'): 1,
('b', 'e'): 1,
('c', 'f'): 1,
})
def test_parallel_walk(self):
src = """
def f(a):
return a + 1
"""
node = parser.parse(textwrap.dedent(src))
for child_a, child_b in ast_util.parallel_walk(node, node):
self.assertEqual(child_a, child_b)
def test_parallel_walk_string_leaves(self):
src = """
def f(a):
global g
"""
node = parser.parse(textwrap.dedent(src))
for child_a, child_b in ast_util.parallel_walk(node, node):
self.assertEqual(child_a, child_b)
def test_parallel_walk_inconsistent_trees(self):
node_1 = parser.parse(
textwrap.dedent("""
def f(a):
return a + 1
"""))
node_2 = parser.parse(
textwrap.dedent("""
def f(a):
return a + (a * 2)
"""))
node_3 = parser.parse(
textwrap.dedent("""
def f(a):
return a + 2
"""))
with self.assertRaises(ValueError):
for _ in ast_util.parallel_walk(node_1, node_2):
pass
# There is not particular reason to reject trees that differ only in the
# value of a constant.
# TODO(mdan): This should probably be allowed.
with self.assertRaises(ValueError):
for _ in ast_util.parallel_walk(node_1, node_3):
pass
def assertLambdaNodes(self, matching_nodes, expected_bodies):
self.assertEqual(len(matching_nodes), len(expected_bodies))
for node in matching_nodes:
self.assertIsInstance(node, gast.Lambda)
self.assertIn(
parser.unparse(node.body, include_encoding_marker=False).strip(),
expected_bodies)
if __name__ == '__main__':
test.main()
| apache-2.0 |
balazssimon/ml-playground | udemy/lazyprogrammer/reinforcement-learning-python/grid_world.py | 1 | 2827 | class Grid: # Environment
def __init__(self, width, height, start):
self.width = width
self.height = height
self.i = start[0]
self.j = start[1]
def set(self, rewards, actions):
# rewards should be a dict of: (i, j): r (row, col): reward
# actions should be a dict of: (i, j): A (row, col): list of possible actions
self.rewards = rewards
self.actions = actions
def set_state(self, s):
self.i = s[0]
self.j = s[1]
def current_state(self):
return (self.i, self.j)
def is_terminal(self, s):
return s not in self.actions
def move(self, action):
# check if legal move first
if action in self.actions[(self.i, self.j)]:
if action == 'U':
self.i -= 1
elif action == 'D':
self.i += 1
elif action == 'R':
self.j += 1
elif action == 'L':
self.j -= 1
# return a reward (if any)
return self.rewards.get((self.i, self.j), 0)
def undo_move(self, action):
# these are the opposite of what U/D/L/R should normally do
if action == 'U':
self.i += 1
elif action == 'D':
self.i -= 1
elif action == 'R':
self.j -= 1
elif action == 'L':
self.j += 1
# raise an exception if we arrive somewhere we shouldn't be
# should never happen
assert(self.current_state() in self.all_states())
def game_over(self):
# returns true if game is over, else false
# true if we are in a state where no actions are possible
return (self.i, self.j) not in self.actions
def all_states(self):
# possibly buggy but simple way to get all states
# either a position that has possible next actions
# or a position that yields a reward
return set(self.actions.keys()) | set(self.rewards.keys())
def standard_grid():
# define a grid that describes the reward for arriving at each state
# and possible actions at each state
# the grid looks like this
# x means you can't go there
# s means start position
# number means reward at that state
# . . . 1
# . x . -1
# s . . .
g = Grid(3, 4, (2, 0))
rewards = {(0, 3): 1, (1, 3): -1}
actions = {
(0, 0): ('D', 'R'),
(0, 1): ('L', 'R'),
(0, 2): ('L', 'D', 'R'),
(1, 0): ('U', 'D'),
(1, 2): ('U', 'D', 'R'),
(2, 0): ('U', 'R'),
(2, 1): ('L', 'R'),
(2, 2): ('L', 'R', 'U'),
(2, 3): ('L', 'U'),
}
g.set(rewards, actions)
return g
def negative_grid(step_cost=-0.1):
# in this game we want to try to minimize the number of moves
# so we will penalize every move
g = standard_grid()
g.rewards.update({
(0, 0): step_cost,
(0, 1): step_cost,
(0, 2): step_cost,
(1, 0): step_cost,
(1, 2): step_cost,
(2, 0): step_cost,
(2, 1): step_cost,
(2, 2): step_cost,
(2, 3): step_cost,
})
return g
| apache-2.0 |
nordri/check_domains | lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/base.py | 31 | 7885 | """
PostgreSQL database backend for Django.
Requires psycopg 2: http://initd.org/projects/psycopg2
"""
from django.conf import settings
from django.db.backends import (BaseDatabaseFeatures, BaseDatabaseWrapper,
BaseDatabaseValidation)
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
from django.db.backends.postgresql_psycopg2.client import DatabaseClient
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation
from django.db.backends.postgresql_psycopg2.version import get_version
from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.utils import InterfaceError
from django.utils.encoding import force_str
from django.utils.functional import cached_property
from django.utils.safestring import SafeText, SafeBytes
from django.utils.timezone import utc
try:
import psycopg2 as Database
import psycopg2.extensions
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e)
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
psycopg2.extensions.register_adapter(SafeBytes, psycopg2.extensions.QuotedString)
psycopg2.extensions.register_adapter(SafeText, psycopg2.extensions.QuotedString)
def utc_tzinfo_factory(offset):
if offset != 0:
raise AssertionError("database connection isn't set to UTC")
return utc
class DatabaseFeatures(BaseDatabaseFeatures):
needs_datetime_string_cast = False
can_return_id_from_insert = True
requires_rollback_on_dirty_transaction = True
has_real_datatype = True
can_defer_constraint_checks = True
has_select_for_update = True
has_select_for_update_nowait = True
has_bulk_insert = True
uses_savepoints = True
supports_tablespaces = True
supports_transactions = True
can_introspect_ip_address_field = True
can_introspect_small_integer_field = True
can_distinct_on_fields = True
can_rollback_ddl = True
supports_combined_alters = True
nulls_order_largest = True
closed_cursor_error_class = InterfaceError
has_case_insensitive_like = False
requires_sqlparse_for_splitting = False
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'postgresql'
operators = {
'exact': '= %s',
'iexact': '= UPPER(%s)',
'contains': 'LIKE %s',
'icontains': 'LIKE UPPER(%s)',
'regex': '~ %s',
'iregex': '~* %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE %s',
'endswith': 'LIKE %s',
'istartswith': 'LIKE UPPER(%s)',
'iendswith': 'LIKE UPPER(%s)',
}
pattern_ops = {
'startswith': "LIKE %s || '%%%%'",
'istartswith': "LIKE UPPER(%s) || '%%%%'",
}
Database = Database
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
opts = self.settings_dict["OPTIONS"]
RC = psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED
self.isolation_level = opts.get('isolation_level', RC)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def get_connection_params(self):
settings_dict = self.settings_dict
# None may be used to connect to the default 'postgres' db
if settings_dict['NAME'] == '':
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME value.")
conn_params = {
'database': settings_dict['NAME'] or 'postgres',
}
conn_params.update(settings_dict['OPTIONS'])
if 'autocommit' in conn_params:
del conn_params['autocommit']
if 'isolation_level' in conn_params:
del conn_params['isolation_level']
if settings_dict['USER']:
conn_params['user'] = settings_dict['USER']
if settings_dict['PASSWORD']:
conn_params['password'] = force_str(settings_dict['PASSWORD'])
if settings_dict['HOST']:
conn_params['host'] = settings_dict['HOST']
if settings_dict['PORT']:
conn_params['port'] = settings_dict['PORT']
return conn_params
def get_new_connection(self, conn_params):
return Database.connect(**conn_params)
def init_connection_state(self):
settings_dict = self.settings_dict
self.connection.set_client_encoding('UTF8')
tz = 'UTC' if settings.USE_TZ else settings_dict.get('TIME_ZONE')
if tz:
try:
get_parameter_status = self.connection.get_parameter_status
except AttributeError:
# psycopg2 < 2.0.12 doesn't have get_parameter_status
conn_tz = None
else:
conn_tz = get_parameter_status('TimeZone')
if conn_tz != tz:
cursor = self.connection.cursor()
try:
cursor.execute(self.ops.set_time_zone_sql(), [tz])
finally:
cursor.close()
# Commit after setting the time zone (see #17062)
if not self.get_autocommit():
self.connection.commit()
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = utc_tzinfo_factory if settings.USE_TZ else None
return cursor
def _set_isolation_level(self, isolation_level):
assert isolation_level in range(1, 5) # Use set_autocommit for level = 0
if self.psycopg2_version >= (2, 4, 2):
self.connection.set_session(isolation_level=isolation_level)
else:
self.connection.set_isolation_level(isolation_level)
def _set_autocommit(self, autocommit):
with self.wrap_database_errors:
if self.psycopg2_version >= (2, 4, 2):
self.connection.autocommit = autocommit
else:
if autocommit:
level = psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
else:
level = self.isolation_level
self.connection.set_isolation_level(level)
def check_constraints(self, table_names=None):
"""
To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they
are returned to deferred.
"""
self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE')
self.cursor().execute('SET CONSTRAINTS ALL DEFERRED')
def is_usable(self):
try:
# Use a psycopg cursor directly, bypassing Django's utilities.
self.connection.cursor().execute("SELECT 1")
except Database.Error:
return False
else:
return True
def schema_editor(self, *args, **kwargs):
"Returns a new instance of this backend's SchemaEditor"
return DatabaseSchemaEditor(self, *args, **kwargs)
@cached_property
def psycopg2_version(self):
version = psycopg2.__version__.split(' ', 1)[0]
return tuple(int(v) for v in version.split('.'))
@cached_property
def pg_version(self):
with self.temporary_connection():
return get_version(self.connection)
| gpl-3.0 |
knifenomad/django | tests/delete/tests.py | 222 | 18346 | from __future__ import unicode_literals
from math import ceil
from django.db import IntegrityError, connection, models
from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.utils.six.moves import range
from .models import (
A, M, MR, R, S, T, Avatar, Base, Child, HiddenUser, HiddenUserProfile,
M2MFrom, M2MTo, MRNull, Parent, RChild, User, create_a, get_default_r,
)
class OnDeleteTests(TestCase):
def setUp(self):
self.DEFAULT = get_default_r()
def test_auto(self):
a = create_a('auto')
a.auto.delete()
self.assertFalse(A.objects.filter(name='auto').exists())
def test_auto_nullable(self):
a = create_a('auto_nullable')
a.auto_nullable.delete()
self.assertFalse(A.objects.filter(name='auto_nullable').exists())
def test_setvalue(self):
a = create_a('setvalue')
a.setvalue.delete()
a = A.objects.get(pk=a.pk)
self.assertEqual(self.DEFAULT, a.setvalue.pk)
def test_setnull(self):
a = create_a('setnull')
a.setnull.delete()
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.setnull)
def test_setdefault(self):
a = create_a('setdefault')
a.setdefault.delete()
a = A.objects.get(pk=a.pk)
self.assertEqual(self.DEFAULT, a.setdefault.pk)
def test_setdefault_none(self):
a = create_a('setdefault_none')
a.setdefault_none.delete()
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.setdefault_none)
def test_cascade(self):
a = create_a('cascade')
a.cascade.delete()
self.assertFalse(A.objects.filter(name='cascade').exists())
def test_cascade_nullable(self):
a = create_a('cascade_nullable')
a.cascade_nullable.delete()
self.assertFalse(A.objects.filter(name='cascade_nullable').exists())
def test_protect(self):
a = create_a('protect')
self.assertRaises(IntegrityError, a.protect.delete)
def test_do_nothing(self):
# Testing DO_NOTHING is a bit harder: It would raise IntegrityError for a normal model,
# so we connect to pre_delete and set the fk to a known value.
replacement_r = R.objects.create()
def check_do_nothing(sender, **kwargs):
obj = kwargs['instance']
obj.donothing_set.update(donothing=replacement_r)
models.signals.pre_delete.connect(check_do_nothing)
a = create_a('do_nothing')
a.donothing.delete()
a = A.objects.get(pk=a.pk)
self.assertEqual(replacement_r, a.donothing)
models.signals.pre_delete.disconnect(check_do_nothing)
def test_do_nothing_qscount(self):
"""
Test that a models.DO_NOTHING relation doesn't trigger a query.
"""
b = Base.objects.create()
with self.assertNumQueries(1):
# RelToBase should not be queried.
b.delete()
self.assertEqual(Base.objects.count(), 0)
def test_inheritance_cascade_up(self):
child = RChild.objects.create()
child.delete()
self.assertFalse(R.objects.filter(pk=child.pk).exists())
def test_inheritance_cascade_down(self):
child = RChild.objects.create()
parent = child.r_ptr
parent.delete()
self.assertFalse(RChild.objects.filter(pk=child.pk).exists())
def test_cascade_from_child(self):
a = create_a('child')
a.child.delete()
self.assertFalse(A.objects.filter(name='child').exists())
self.assertFalse(R.objects.filter(pk=a.child_id).exists())
def test_cascade_from_parent(self):
a = create_a('child')
R.objects.get(pk=a.child_id).delete()
self.assertFalse(A.objects.filter(name='child').exists())
self.assertFalse(RChild.objects.filter(pk=a.child_id).exists())
def test_setnull_from_child(self):
a = create_a('child_setnull')
a.child_setnull.delete()
self.assertFalse(R.objects.filter(pk=a.child_setnull_id).exists())
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.child_setnull)
def test_setnull_from_parent(self):
a = create_a('child_setnull')
R.objects.get(pk=a.child_setnull_id).delete()
self.assertFalse(RChild.objects.filter(pk=a.child_setnull_id).exists())
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.child_setnull)
def test_o2o_setnull(self):
a = create_a('o2o_setnull')
a.o2o_setnull.delete()
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.o2o_setnull)
class DeletionTests(TestCase):
def test_m2m(self):
m = M.objects.create()
r = R.objects.create()
MR.objects.create(m=m, r=r)
r.delete()
self.assertFalse(MR.objects.exists())
r = R.objects.create()
MR.objects.create(m=m, r=r)
m.delete()
self.assertFalse(MR.objects.exists())
m = M.objects.create()
r = R.objects.create()
m.m2m.add(r)
r.delete()
through = M._meta.get_field('m2m').remote_field.through
self.assertFalse(through.objects.exists())
r = R.objects.create()
m.m2m.add(r)
m.delete()
self.assertFalse(through.objects.exists())
m = M.objects.create()
r = R.objects.create()
MRNull.objects.create(m=m, r=r)
r.delete()
self.assertFalse(not MRNull.objects.exists())
self.assertFalse(m.m2m_through_null.exists())
def test_bulk(self):
s = S.objects.create(r=R.objects.create())
for i in range(2 * GET_ITERATOR_CHUNK_SIZE):
T.objects.create(s=s)
# 1 (select related `T` instances)
# + 1 (select related `U` instances)
# + 2 (delete `T` instances in batches)
# + 1 (delete `s`)
self.assertNumQueries(5, s.delete)
self.assertFalse(S.objects.exists())
def test_instance_update(self):
deleted = []
related_setnull_sets = []
def pre_delete(sender, **kwargs):
obj = kwargs['instance']
deleted.append(obj)
if isinstance(obj, R):
related_setnull_sets.append(list(a.pk for a in obj.setnull_set.all()))
models.signals.pre_delete.connect(pre_delete)
a = create_a('update_setnull')
a.setnull.delete()
a = create_a('update_cascade')
a.cascade.delete()
for obj in deleted:
self.assertIsNone(obj.pk)
for pk_list in related_setnull_sets:
for a in A.objects.filter(id__in=pk_list):
self.assertIsNone(a.setnull)
models.signals.pre_delete.disconnect(pre_delete)
def test_deletion_order(self):
pre_delete_order = []
post_delete_order = []
def log_post_delete(sender, **kwargs):
pre_delete_order.append((sender, kwargs['instance'].pk))
def log_pre_delete(sender, **kwargs):
post_delete_order.append((sender, kwargs['instance'].pk))
models.signals.post_delete.connect(log_post_delete)
models.signals.pre_delete.connect(log_pre_delete)
r = R.objects.create(pk=1)
s1 = S.objects.create(pk=1, r=r)
s2 = S.objects.create(pk=2, r=r)
T.objects.create(pk=1, s=s1)
T.objects.create(pk=2, s=s2)
r.delete()
self.assertEqual(
pre_delete_order, [(T, 2), (T, 1), (S, 2), (S, 1), (R, 1)]
)
self.assertEqual(
post_delete_order, [(T, 1), (T, 2), (S, 1), (S, 2), (R, 1)]
)
models.signals.post_delete.disconnect(log_post_delete)
models.signals.pre_delete.disconnect(log_pre_delete)
def test_relational_post_delete_signals_happen_before_parent_object(self):
deletions = []
def log_post_delete(instance, **kwargs):
self.assertTrue(R.objects.filter(pk=instance.r_id))
self.assertIs(type(instance), S)
deletions.append(instance.id)
r = R.objects.create(pk=1)
S.objects.create(pk=1, r=r)
models.signals.post_delete.connect(log_post_delete, sender=S)
try:
r.delete()
finally:
models.signals.post_delete.disconnect(log_post_delete)
self.assertEqual(len(deletions), 1)
self.assertEqual(deletions[0], 1)
@skipUnlessDBFeature("can_defer_constraint_checks")
def test_can_defer_constraint_checks(self):
u = User.objects.create(
avatar=Avatar.objects.create()
)
a = Avatar.objects.get(pk=u.avatar_id)
# 1 query to find the users for the avatar.
# 1 query to delete the user
# 1 query to delete the avatar
# The important thing is that when we can defer constraint checks there
# is no need to do an UPDATE on User.avatar to null it out.
# Attach a signal to make sure we will not do fast_deletes.
calls = []
def noop(*args, **kwargs):
calls.append('')
models.signals.post_delete.connect(noop, sender=User)
self.assertNumQueries(3, a.delete)
self.assertFalse(User.objects.exists())
self.assertFalse(Avatar.objects.exists())
self.assertEqual(len(calls), 1)
models.signals.post_delete.disconnect(noop, sender=User)
@skipIfDBFeature("can_defer_constraint_checks")
def test_cannot_defer_constraint_checks(self):
u = User.objects.create(
avatar=Avatar.objects.create()
)
# Attach a signal to make sure we will not do fast_deletes.
calls = []
def noop(*args, **kwargs):
calls.append('')
models.signals.post_delete.connect(noop, sender=User)
a = Avatar.objects.get(pk=u.avatar_id)
# The below doesn't make sense... Why do we need to null out
# user.avatar if we are going to delete the user immediately after it,
# and there are no more cascades.
# 1 query to find the users for the avatar.
# 1 query to delete the user
# 1 query to null out user.avatar, because we can't defer the constraint
# 1 query to delete the avatar
self.assertNumQueries(4, a.delete)
self.assertFalse(User.objects.exists())
self.assertFalse(Avatar.objects.exists())
self.assertEqual(len(calls), 1)
models.signals.post_delete.disconnect(noop, sender=User)
def test_hidden_related(self):
r = R.objects.create()
h = HiddenUser.objects.create(r=r)
HiddenUserProfile.objects.create(user=h)
r.delete()
self.assertEqual(HiddenUserProfile.objects.count(), 0)
def test_large_delete(self):
TEST_SIZE = 2000
objs = [Avatar() for i in range(0, TEST_SIZE)]
Avatar.objects.bulk_create(objs)
# Calculate the number of queries needed.
batch_size = connection.ops.bulk_batch_size(['pk'], objs)
# The related fetches are done in batches.
batches = int(ceil(float(len(objs)) / batch_size))
# One query for Avatar.objects.all() and then one related fast delete for
# each batch.
fetches_to_mem = 1 + batches
# The Avatar objects are going to be deleted in batches of GET_ITERATOR_CHUNK_SIZE
queries = fetches_to_mem + TEST_SIZE // GET_ITERATOR_CHUNK_SIZE
self.assertNumQueries(queries, Avatar.objects.all().delete)
self.assertFalse(Avatar.objects.exists())
def test_large_delete_related(self):
TEST_SIZE = 2000
s = S.objects.create(r=R.objects.create())
for i in range(TEST_SIZE):
T.objects.create(s=s)
batch_size = max(connection.ops.bulk_batch_size(['pk'], range(TEST_SIZE)), 1)
# TEST_SIZE // batch_size (select related `T` instances)
# + 1 (select related `U` instances)
# + TEST_SIZE // GET_ITERATOR_CHUNK_SIZE (delete `T` instances in batches)
# + 1 (delete `s`)
expected_num_queries = (ceil(TEST_SIZE // batch_size) +
ceil(TEST_SIZE // GET_ITERATOR_CHUNK_SIZE) + 2)
self.assertNumQueries(expected_num_queries, s.delete)
self.assertFalse(S.objects.exists())
self.assertFalse(T.objects.exists())
def test_delete_with_keeping_parents(self):
child = RChild.objects.create()
parent_id = child.r_ptr_id
child.delete(keep_parents=True)
self.assertFalse(RChild.objects.filter(id=child.id).exists())
self.assertTrue(R.objects.filter(id=parent_id).exists())
def test_queryset_delete_returns_num_rows(self):
"""
QuerySet.delete() should return the number of deleted rows and a
dictionary with the number of deletions for each object type.
"""
Avatar.objects.bulk_create([Avatar(desc='a'), Avatar(desc='b'), Avatar(desc='c')])
avatars_count = Avatar.objects.count()
deleted, rows_count = Avatar.objects.all().delete()
self.assertEqual(deleted, avatars_count)
# more complex example with multiple object types
r = R.objects.create()
h1 = HiddenUser.objects.create(r=r)
HiddenUser.objects.create(r=r)
HiddenUserProfile.objects.create(user=h1)
existed_objs = {
R._meta.label: R.objects.count(),
HiddenUser._meta.label: HiddenUser.objects.count(),
A._meta.label: A.objects.count(),
MR._meta.label: MR.objects.count(),
HiddenUserProfile._meta.label: HiddenUserProfile.objects.count(),
}
deleted, deleted_objs = R.objects.all().delete()
for k, v in existed_objs.items():
self.assertEqual(deleted_objs[k], v)
def test_model_delete_returns_num_rows(self):
"""
Model.delete() should return the number of deleted rows and a
dictionary with the number of deletions for each object type.
"""
r = R.objects.create()
h1 = HiddenUser.objects.create(r=r)
h2 = HiddenUser.objects.create(r=r)
HiddenUser.objects.create(r=r)
HiddenUserProfile.objects.create(user=h1)
HiddenUserProfile.objects.create(user=h2)
m1 = M.objects.create()
m2 = M.objects.create()
MR.objects.create(r=r, m=m1)
r.m_set.add(m1)
r.m_set.add(m2)
r.save()
existed_objs = {
R._meta.label: R.objects.count(),
HiddenUser._meta.label: HiddenUser.objects.count(),
A._meta.label: A.objects.count(),
MR._meta.label: MR.objects.count(),
HiddenUserProfile._meta.label: HiddenUserProfile.objects.count(),
M.m2m.through._meta.label: M.m2m.through.objects.count(),
}
deleted, deleted_objs = r.delete()
self.assertEqual(deleted, sum(existed_objs.values()))
for k, v in existed_objs.items():
self.assertEqual(deleted_objs[k], v)
class FastDeleteTests(TestCase):
def test_fast_delete_fk(self):
u = User.objects.create(
avatar=Avatar.objects.create()
)
a = Avatar.objects.get(pk=u.avatar_id)
# 1 query to fast-delete the user
# 1 query to delete the avatar
self.assertNumQueries(2, a.delete)
self.assertFalse(User.objects.exists())
self.assertFalse(Avatar.objects.exists())
def test_fast_delete_m2m(self):
t = M2MTo.objects.create()
f = M2MFrom.objects.create()
f.m2m.add(t)
# 1 to delete f, 1 to fast-delete m2m for f
self.assertNumQueries(2, f.delete)
def test_fast_delete_revm2m(self):
t = M2MTo.objects.create()
f = M2MFrom.objects.create()
f.m2m.add(t)
# 1 to delete t, 1 to fast-delete t's m_set
self.assertNumQueries(2, f.delete)
def test_fast_delete_qs(self):
u1 = User.objects.create()
u2 = User.objects.create()
self.assertNumQueries(1, User.objects.filter(pk=u1.pk).delete)
self.assertEqual(User.objects.count(), 1)
self.assertTrue(User.objects.filter(pk=u2.pk).exists())
def test_fast_delete_joined_qs(self):
a = Avatar.objects.create(desc='a')
User.objects.create(avatar=a)
u2 = User.objects.create()
expected_queries = 1 if connection.features.update_can_self_select else 2
self.assertNumQueries(expected_queries,
User.objects.filter(avatar__desc='a').delete)
self.assertEqual(User.objects.count(), 1)
self.assertTrue(User.objects.filter(pk=u2.pk).exists())
def test_fast_delete_inheritance(self):
c = Child.objects.create()
p = Parent.objects.create()
# 1 for self, 1 for parent
# However, this doesn't work as child.parent access creates a query,
# and this means we will be generating extra queries (a lot for large
# querysets). This is not a fast-delete problem.
# self.assertNumQueries(2, c.delete)
c.delete()
self.assertFalse(Child.objects.exists())
self.assertEqual(Parent.objects.count(), 1)
self.assertEqual(Parent.objects.filter(pk=p.pk).count(), 1)
# 1 for self delete, 1 for fast delete of empty "child" qs.
self.assertNumQueries(2, p.delete)
self.assertFalse(Parent.objects.exists())
# 1 for self delete, 1 for fast delete of empty "child" qs.
c = Child.objects.create()
p = c.parent_ptr
self.assertNumQueries(2, p.delete)
self.assertFalse(Parent.objects.exists())
self.assertFalse(Child.objects.exists())
def test_fast_delete_large_batch(self):
User.objects.bulk_create(User() for i in range(0, 2000))
# No problems here - we aren't going to cascade, so we will fast
# delete the objects in a single query.
self.assertNumQueries(1, User.objects.all().delete)
a = Avatar.objects.create(desc='a')
User.objects.bulk_create(User(avatar=a) for i in range(0, 2000))
# We don't hit parameter amount limits for a, so just one query for
# that + fast delete of the related objs.
self.assertNumQueries(2, a.delete)
self.assertEqual(User.objects.count(), 0)
| bsd-3-clause |
IngenicSemiconductor/KERNEL-WARRIOR | Documentation/target/tcm_mod_builder.py | 3119 | 42754 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: [email protected]
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_transport.h>\n"
buf += "#include <target/target_core_fabric_ops.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_fabric_lib.h>\n"
buf += "#include <target/target_core_device.h>\n"
buf += "#include <target/target_core_tpg.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!(se_nacl_new))\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!(tpg)) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!(" + fabric_mod_port + ")) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "__NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd_to_pool = " + fabric_mod_name + "_release_cmd,\n"
buf += " .release_cmd_direct = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .new_cmd_failure = " + fabric_mod_name + "_new_cmd_failure,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " .pack_lun = " + fabric_mod_name + "_pack_lun,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (!(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return -ENOMEM;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!(" + fabric_mod_name + "_fabric_configfs))\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "#ifdef MODULE\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
buf += "#endif\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric_ops.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_transport.h>\n"
buf += "#include <target/target_core_fabric_ops.h>\n"
buf += "#include <target/target_core_fabric_lib.h>\n"
buf += "#include <target/target_core_device.h>\n"
buf += "#include <target/target_core_tpg.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!(nacl)) {\n"
buf += " printk(KERN_ERR \"Unable to alocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('release_cmd_to_pool', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('new_cmd_failure\)\(', fo):
buf += "void " + fabric_mod_name + "_new_cmd_failure(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_new_cmd_failure(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
if re.search('pack_lun\)\(', fo):
buf += "u64 " + fabric_mod_name + "_pack_lun(unsigned int lun)\n"
buf += "{\n"
buf += " WARN_ON(lun >= 256);\n"
buf += " /* Caller wants this byte-swapped */\n"
buf += " return cpu_to_le64((lun & 0xff) << 8);\n"
buf += "}\n\n"
bufi += "u64 " + fabric_mod_name + "_pack_lun(unsigned int);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
joshmoore/openmicroscopy | components/tools/OmeroWeb/omeroweb/webclient/webclient_gateway.py | 1 | 76419 | #!/usr/bin/env python
#
# webclient_gateway
#
# Copyright (c) 2008-2011 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, 2008.
# Carlos Neves <carlos(at)glencoesoftware(dot)com>, 2008
#
# Version: 1.0
#
import cStringIO
import traceback
import logging
logger = logging.getLogger('webclient_gateway')
try:
from PIL import Image, ImageDraw # see ticket:2597
except ImportError:
try:
import Image, ImageDraw # see ticket:2597
except:
logger.error("You need to install the Python Imaging Library. Get it at http://www.pythonware.com/products/pil/")
logger.error(traceback.format_exc())
from StringIO import StringIO
import time
from datetime import datetime
from types import IntType, ListType, TupleType, UnicodeType, StringType
import Ice
import Glacier2
import omero.gateway
import omero.scripts
from omero.rtypes import *
from omero.model import FileAnnotationI, TagAnnotationI, \
DatasetI, ProjectI, ImageI, ScreenI, PlateI, \
DetectorI, FilterI, ObjectiveI, InstrumentI, \
LaserI
from omero.gateway import TagAnnotationWrapper, ExperimenterWrapper, \
ExperimenterGroupWrapper, WellWrapper, AnnotationWrapper, \
OmeroGatewaySafeCallWrapper
from omero.sys import ParametersI
from django.utils.encoding import smart_str
from django.utils.translation import ugettext as _
from django.conf import settings
from django.core.mail import send_mail
from django.core.mail import EmailMultiAlternatives
try:
PAGE = settings.PAGE
except:
PAGE = 200
class OmeroWebGateway (omero.gateway.BlitzGateway):
def __init__ (self, *args, **kwargs):
"""
Create the connection wrapper. Does not attempt to connect at this stage
Initialises the omero.client
@param username: User name. If not specified, use 'omero.gateway.anon_user'
@type username: String
@param passwd: Password.
@type passwd: String
@param client_obj: omero.client
@param group: name of group to try to connect to
@type group: String
@param clone: If True, overwrite anonymous with False
@type clone: Boolean
@param try_super: Try to log on as super user ('system' group)
@type try_super: Boolean
@param host: Omero server host.
@type host: String
@param port: Omero server port.
@type port: Integer
@param extra_config: Dictionary of extra configuration
@type extra_config: Dict
@param secure: Initial underlying omero.client connection type (True=SSL/False=insecure)
@type secure: Boolean
@param anonymous:
@type anonymous: Boolean
@param useragent: Log which python clients use this connection. E.g. 'OMERO.webadmin'
@type useragent: String
@param _shareId: Active share ID
@type _shareId: Long
"""
super(OmeroWebGateway, self).__init__(*args, **kwargs)
self._shareId = None
def connect (self, *args, **kwargs):
"""
Creates or retrieves connection for the given sessionUuid and
removes some groups from the event context
Returns True if connected.
@param sUuid: session uuid
@type sUuid: omero_model_SessionI
@return: Boolean
"""
rv = super(OmeroWebGateway, self).connect(*args,**kwargs)
if rv: # No _ctx available otherwise #3218
if self._ctx.userName!="guest":
self.removeGroupFromContext()
return rv
def attachToShare (self, share_id):
"""
Turns on the access control lists attached to the given share for the
current session. Warning: this will slow down the execution of the
current session for all database reads. Writing to the database will not
be allowed. If share does not exist or is not accessible (non-members) or
is disabled, then an ValidationException is thrown.
@param shareId: share id
@type shareId: Long
"""
sh = self._proxies['share'].getShare(long(share_id))
if self._shareId is None:
self._proxies['share'].activate(sh.id.val)
self._shareId = sh.id.val
def getShareId(self):
"""
Returns active share id .
@return: Share ID
@rtype: Long
"""
if self.getEventContext().shareId is not None:
if self.getEventContext().shareId != self._shareId and self._shareId > 0:
self._shareId = self.getEventContext().shareId
return self._shareId
def removeGroupFromContext (self):
"""
Removes group "User" from the current context.
"""
a = self.getAdminService()
gr_u = a.lookupGroup('user')
try:
self._ctx.memberOfGroups.remove(gr_u.id.val)
self._ctx.leaderOfGroups.remove(gr_u.id.val)
except:
pass
##############################################
# Session methods #
def changeActiveGroup(self, gid): # TODO: should be moved to ISession
"""
Every time session is created default group becomes active group
and is loaded with the security for the current user and thread.
Public data has to be created in the context of the group where user,
who would like to look at these data, is a member of.
Public data can be only visible by the member of group and owners.
@param gid: New active group ID
@type gid: Long
@return: Boolean
"""
try:
for k in self._proxies.keys():
self._proxies[k].close()
self.c.sf.setSecurityContext(omero.model.ExperimenterGroupI(gid, False))
self.getAdminService().setDefaultGroup(self.getUser()._obj, omero.model.ExperimenterGroupI(gid, False))
self._ctx = self.getAdminService().getEventContext()
return True
except omero.SecurityViolation:
logger.error(traceback.format_exc())
return False
except:
logger.error(traceback.format_exc())
return False
##############################################
## Forgotten password ##
def isForgottenPasswordSet(self):
"""
Retrieves a configuration value "omero.resetpassword.config" for
Forgotten password form from the backend store.
@return: Boolean
"""
conf = self.getConfigService()
try:
return bool(conf.getConfigValue("omero.resetpassword.config").title())
except:
logger.error(traceback.format_exc())
return False
def reportForgottenPassword(self, username, email):
"""
Allows to reset the password (temporary password is sent). The
given email must match the email for the user listed under the name
argument.
@param username: omename
@type username: String
@param email: email address
@type email: String
"""
admin_serv = self.getAdminService()
admin_serv.reportForgottenPassword(username, email)
##############################################
## IAdmin ##
def isAnythingCreated(self):
"""
Checks if any of the experimenter was created before
@return: Boolean
"""
q = self.getQueryService()
p = omero.sys.Parameters()
p.map = {}
p.map["default_names"] = rlist([rstring("user"), rstring("system"), rstring("guest")])
f = omero.sys.Filter()
f.limit = rint(1)
p.theFilter = f
sql = "select g from ExperimenterGroup as g where g.name not in (:default_names)"
if len(q.findAllByQuery(sql, p)) > 0:
return False
return True
def listLdapAuthExperimenters(self):
"""
Lists all IDs of experimenters who are authenticated by LDAP
(has set dn on password table).
@return: List of experimetner IDs
@rtype: L{Dict of String: Long}
"""
admin_serv = self.getAdminService()
return admin_serv.lookupLdapAuthExperimenters()
def getLdapAuthExperimenter(self, eid):
"""
Return DN of the specific experimenter if uses LDAP authentication
(has set dn on password table) or None.
@param eid: experimenter ID
@type eid: L{Long}
@return: Distinguished Name
@rtype: String
"""
admin_serv = self.getAdminService()
return admin_serv.lookupLdapAuthExperimenter(long(eid))
def getExperimenters(self):
"""
Return all experimenters apart from current user.
@return: Generator yielding experimetners list
@rtype: L{ExperimenterWrapper} generator
"""
q = self.getQueryService()
p = omero.sys.Parameters()
p.map = {}
p.map["id"] = rlong(self.getEventContext().userId)
sql = "select e from Experimenter as e where e.id != :id "
for e in q.findAllByQuery(sql, p):
yield ExperimenterWrapper(self, e)
#def getCurrentSupervisor(self):
# """
# Gets the owner of a group for current user.
#
# @return: ExperimenterWrapper
# """
#
# p = omero.sys.ParametersI()
# p.map = {}
# p.map["id"] = rlong(self.getEventContext().groupId)
# # TODO: there can now be multiple supervisors
# p.page(0,1)
# supervisor = self.getQueryService().findByQuery(\
# """select e from ExperimenterGroup as g
# join g.groupExperimenterMap as m join m.child as e
# where m.owner = true and g.id = :id""", p)
# return ExperimenterWrapper(self, supervisor)
#def getScriptwithDetails(self, sid):
# script_serv = self.getScriptService()
# return script_serv.getScriptWithDetails(long(sid))
#def lookupScripts(self):
# script_serv = self.getScriptService()
# return script_serv.getScripts()
def getServerVersion(self):
"""
Retrieves a configuration value "omero.version" from the backend store.
@return: String
"""
conf = self.getConfigService()
return conf.getConfigValue("omero.version")
#########################################################
## From Bram b(dot)gerritsen(at)nki(dot)nl ##
def findWellInPlate (self, plate_name, row, column):
q = self.getQueryService()
p = omero.sys.Parameters()
p.map = {}
p.map['pname'] = rstring(str(plate_name))
p.map['row'] = rint(int(row))
p.map['column'] = rint(int(column))
sql = """select well from Well as well
left outer join fetch well.plate as pt
left outer join fetch well.wellSamples as ws
inner join fetch ws.image as img
where well.plate.name = :pname and well.row = :row
and well.column = :column"""
well = q.findByQuery(sql, p)
if well is None:
return None
else:
return WellWrapper(self, well, None)
####################################################################################
## Container Queries ###
####################################################################################
def listTags(self, eid=None):
params = omero.sys.ParametersI()
params.orphan()
params.map = {}
params.map['ns'] = rstring(omero.constants.metadata.NSINSIGHTTAGSET)
sql = "select tg from TagAnnotation tg where ((ns=:ns) or (ns is null and not exists ( select aal from AnnotationAnnotationLink as aal where aal.child=tg.id))) "
if eid is not None:
params.map["eid"] = rlong(long(eid))
sql+=" and tg.details.owner.id = :eid"
q = self.getQueryService()
for ann in q.findAllByQuery(sql, params):
yield TagAnnotationWrapper(self, ann)
def countOrphans (self, obj_type, eid=None):
links = {'Dataset':('ProjectDatasetLink', DatasetWrapper),
'Image':('DatasetImageLink', ImageWrapper),
'Plate':('ScreenPlateLink', PlateWrapper)}
if obj_type not in links:
raise TypeError("'%s' is not valid object type. Must use one of %s" % (obj_type, links.keys()) )
q = self.getQueryService()
p = omero.sys.Parameters()
p.map = {}
links = {'Dataset':('ProjectDatasetLink', DatasetWrapper),
'Image':('DatasetImageLink', ImageWrapper),
'Plate':('ScreenPlateLink', PlateWrapper)}
if obj_type not in links:
raise TypeError("'%s' is not valid object type. Must use one of %s" % (obj_type, links.keys()) )
q = self.getQueryService()
p = omero.sys.Parameters()
p.map = {}
if eid is not None:
p.map["eid"] = rlong(long(eid))
eidFilter = "obj.details.owner.id=:eid and "
eidWsFilter = " and ws.details.owner.id=:eid"
else:
eidFilter = ""
eidWsFilter = ""
sql = "select count(obj.id) from %s as obj " \
"join obj.details.creationEvent "\
"join obj.details.owner join obj.details.group " \
"where %s" \
"not exists (select obl from %s as obl where " \
"obl.child=obj.id)" % (obj_type, eidFilter, links[obj_type][0])
if obj_type == 'Image':
sql += "and not exists ( "\
"select ws from WellSample as ws "\
"where ws.image=obj.id %s)" % eidWsFilter
rslt = q.projection(sql, p)
if len(rslt) > 0:
if len(rslt[0]) > 0:
return rslt[0][0].val
return 0
def listOrphans (self, obj_type, eid=None, page=None):
"""
List orphaned Datasets, Images, Plates controlled by the security system,
Optionally filter by experimenter 'eid'
@param obj_type: 'Dataset', 'Image', 'Plate'
@param eid: experimenter id
@type eid: Long
@param page: page number
@type page: Long
@return: Generator yielding Datasets
@rtype: L{DatasetWrapper} generator
"""
links = {'Dataset':('ProjectDatasetLink', DatasetWrapper),
'Image':('DatasetImageLink', ImageWrapper),
'Plate':('ScreenPlateLink', PlateWrapper)}
if obj_type not in links:
raise TypeError("'%s' is not valid object type. Must use one of %s" % (obj_type, links.keys()) )
q = self.getQueryService()
p = omero.sys.Parameters()
p.map = {}
if eid is not None:
p.map["eid"] = rlong(long(eid))
eidFilter = "obj.details.owner.id=:eid and "
eidWsFilter = " and ws.details.owner.id=:eid"
else:
eidFilter = ""
eidWsFilter = ""
if page is not None:
f = omero.sys.Filter()
f.limit = rint(PAGE)
f.offset = rint((int(page)-1)*PAGE)
p.theFilter = f
sql = "select obj from %s as obj " \
"join fetch obj.details.creationEvent "\
"join fetch obj.details.owner join fetch obj.details.group " % (obj_type)
sql += "where %s" \
"not exists (select obl from %s as obl where " \
"obl.child=obj.id)" % (eidFilter, links[obj_type][0])
if obj_type == 'Image':
sql += "and not exists ( "\
"select ws from WellSample as ws "\
"where ws.image=obj.id %s)" % eidWsFilter
for e in q.findAllByQuery(sql, p):
yield links[obj_type][1](self, e)
def listImagesInDataset (self, oid, eid=None, page=None):
"""
List Images in the given Dataset.
Optinally filter by experimenter 'eid'
@param eid: experimenter id
@type eid: Long
@param page: page number
@type page: Long
@return: Generator yielding Images
@rtype: L{ImageWrapper} generator
"""
q = self.getQueryService()
p = omero.sys.Parameters()
p.map = {}
p.map["oid"] = rlong(long(oid))
if page is not None:
f = omero.sys.Filter()
f.limit = rint(PAGE)
f.offset = rint((int(page)-1)*PAGE)
p.theFilter = f
sql = "select im from Image im "\
"join fetch im.details.creationEvent "\
"join fetch im.details.owner join fetch im.details.group " \
"left outer join fetch im.datasetLinks dil "\
"left outer join fetch dil.parent d " \
"where d.id = :oid"
if eid is not None:
p.map["eid"] = rlong(long(eid))
sql += " and im.details.owner.id=:eid"
sql+=" order by im.name ASC"
for e in q.findAllByQuery(sql, p):
kwargs = {'link': omero.gateway.BlitzObjectWrapper(self, e.copyDatasetLinks()[0])}
yield ImageWrapper(self, e, None, **kwargs)
# DATA RETRIVAL BY TAGs
def findTag (self, name, desc=None):
"""
Retrieves Tag by given Name and description
@param name name of tag
@type name String
@param desc description of tag
@type desc String
@return: TagAnnotation
@rtype: AnnotationWrapper
"""
"""TODO: #1015
It does not support SPW"""
query_serv = self.getQueryService()
res = list()
p = omero.sys.Parameters()
p.map = {}
p.map["text"] = rstring(str(name))
if desc is not None:
p.map["desc"] = rstring(str(desc))
#p.map["eid"] = rlong(self.getEventContext().userId)
f = omero.sys.Filter()
f.limit = rint(1)
p.theFilter = f
sql = "select tg from TagAnnotation tg " \
"where tg.textValue=:text"
if desc is not None:
sql+= " and tg.description=:desc"
sql+=" and tg.ns is null order by tg.textValue"
res = query_serv.findAllByQuery(sql, p)
if len(res) > 0:
return TagAnnotationWrapper(self, res[0])
return None
# AVATAR #
def uploadMyUserPhoto(self, filename, format, data):
"""
Uploads a photo for the user which will be displayed on his/her profile.
This photo will be saved as an OriginalFile object
with the given format, and attached to the user's Experimenter
object via an File Annotation with
the namespace: "openmicroscopy.org/omero/experimenter/photo" (NSEXPERIMENTERPHOTO).
If such an OriginalFile instance already exists,
it will be overwritten. If more than one photo is present, the oldest
version will be modified (i.e. the highest updateEvent id).
Note: as outlined in ticket:1794, this photo will be placed in the "user"
group and therefore will be visible to everyone on the system.
@param filename name which will be used.
@type filename String
@param format Format.value string. 'image/jpeg' and 'image/png' are common values.
@type format String
@param data Data from the image. This will be written to disk.
@type data String
@return ID of the overwritten or newly created user photo OriginalFile object.
@rtype Long
"""
admin_serv = self.getAdminService()
pid = admin_serv.uploadMyUserPhoto(filename, format, data)
if pid is not None:
return pid
def hasExperimenterPhoto(self, oid=None):
"""
Check if File annotation with the namespace:
"openmicroscopy.org/omero/experimenter/photo" (NSEXPERIMENTERPHOTO) is linked
to the given user ID. If user id not set, owned by the current user.
@param oid experimenter ID
@type oid Long
@return True or False
@rtype Boolean
"""
photo = None
meta = self.getMetadataService()
try:
if oid is None:
ann = meta.loadAnnotations("Experimenter", [self.getEventContext().userId], None, None, None).get(self.getEventContext().userId, [])[0]
else:
ann = meta.loadAnnotations("Experimenter", [long(oid)], None, None, None).get(long(oid), [])[0]
if ann is not None:
return True
else:
return False
except:
return False
def getExperimenterPhoto(self, oid=None):
"""
Get File annotation with the namespace:
"openmicroscopy.org/omero/experimenter/photo" (NSEXPERIMENTERPHOTO) linked
to the given user ID. If user id not set, owned by the current user.
@param oid experimenter ID
@type oid Long
@return Data from the image.
@rtype String
"""
photo = None
meta = self.getMetadataService()
try:
if oid is None:
ann = meta.loadAnnotations("Experimenter", [self.getEventContext().userId], None, None, None).get(self.getEventContext().userId, [])
else:
ann = meta.loadAnnotations("Experimenter", [long(oid)], None, None, None).get(long(oid), [])
if len(ann) > 0:
ann = ann[0]
store = self.createRawFileStore()
store.setFileId(ann.file.id.val)
photo = store.read(0,long(ann.file.size.val))
else:
photo = self.getExperimenterDefaultPhoto()
except:
logger.error(traceback.format_exc())
photo = self.getExperimenterDefaultPhoto()
if photo == None:
photo = self.getExperimenterDefaultPhoto()
return photo
def getExperimenterPhotoSize(self, oid=None):
"""
Get size of File annotation with the namespace:
"openmicroscopy.org/omero/experimenter/photo" (NSEXPERIMENTERPHOTO) linked
to the given user ID. If user id not set, owned by the current user.
@param oid experimenter ID
@type oid Long
@return Tuple including dimention and size of the file
@rtype Tuple
"""
photo = None
meta = self.getMetadataService()
try:
if oid is None:
ann = meta.loadAnnotations("Experimenter", [self.getEventContext().userId], None, None, None).get(self.getEventContext().userId, [])[0]
else:
ann = meta.loadAnnotations("Experimenter", [long(oid)], None, None, None).get(long(oid), [])[0]
store = self.createRawFileStore()
store.setFileId(ann.file.id.val)
photo = store.read(0,long(ann.file.size.val))
try:
im = Image.open(StringIO(photo))
except:
logger.error(traceback.format_exc())
return None
else:
return (im.size, ann.file.size.val)
except:
return None
def cropExperimenterPhoto(self, box, oid=None):
"""
Crop File annotation with the namespace:
"openmicroscopy.org/omero/experimenter/photo" (NSEXPERIMENTERPHOTO) linked
to the given user ID. If user id not set, owned by the current user.
New dimentions are defined by squer positions box = (x1,y1,x2,y2)
@param box tuple of new square positions
@type box Tuple
@param oid experimenter ID
@type oid Long
"""
# TODO: crop method could be moved to the server side
photo = None
meta = self.getMetadataService()
ann = None
try:
if oid is None:
ann = meta.loadAnnotations("Experimenter", [self.getEventContext().userId], None, None, None).get(self.getEventContext().userId, [])[0]
else:
ann = meta.loadAnnotations("Experimenter", [long(oid)], None, None, None).get(long(oid), [])[0]
store = self.createRawFileStore()
store.setFileId(ann.file.id.val)
photo = store.read(0,long(ann.file.size.val))
except:
logger.error(traceback.format_exc())
raise IOError("Photo does not exist.")
else:
region = None
try:
im = Image.open(StringIO(photo))
region = im.crop(box)
except IOError:
logger.error(traceback.format_exc())
raise IOError("Cannot open that photo.")
else:
imdata=StringIO()
region.save(imdata, format=im.format)
self.uploadMyUserPhoto(ann.file.name.val, ann.file.mimetype.val, imdata.getvalue())
def getExperimenterDefaultPhoto(self):
"""
If file annotation with the namespace:
"openmicroscopy.org/omero/experimenter/photo" (NSEXPERIMENTERPHOTO)
is not linked to experimenter this method generate default picture of the person.
@return Data from the image.
@rtype String
"""
img = Image.open(settings.DEFAULT_USER)
img.thumbnail((32,32), Image.ANTIALIAS)
draw = ImageDraw.Draw(img)
f = cStringIO.StringIO()
img.save(f, "PNG")
f.seek(0)
return f.read()
def getFileFormat(self, format):
"""
Get file annotation format for the given value.
@return Omero File format
@rtype String
"""
query_serv = self.getQueryService()
return query_serv.findByString("Format", "value", format).getValue().val;
################################################
## Counters
def getCollectionCount(self, parent, child, ids):
"""
Counts the number of members in a collection for a given object.
@param parent The fully-qualified classname of the object to be tested
@type parent String
@param child Name of the property on that class, omitting getters and setters.
@type child String
@param ids Set of Longs, the ids of the objects to test
@type ids L{Long}
@return A map from id integer to count integer
@rtype L{(Long, Long)}
"""
container = self.getContainerService()
return container.getCollectionCount(parent, child, ids, None)
################################################
## Validators
def checkOmeName(self, ome_name, old_omeName=None):
if ome_name == old_omeName:
return False
query_serv = self.getQueryService()
p = omero.sys.Parameters()
p.map = {}
p.map["omeName"] = rstring(smart_str(ome_name))
sql = "select e from Experimenter as e where e.omeName = (:omeName)"
exps = query_serv.findAllByQuery(sql, p)
if len(exps) > 0:
return True
else:
return False
def checkGroupName(self, name, old_name=None):
if name == old_name:
return False
query_serv = self.getQueryService()
p = omero.sys.Parameters()
p.map = {}
p.map["name"] = rstring(smart_str(name))
sql = "select g from ExperimenterGroup as g where g.name = (:name)"
grs = query_serv.findAllByQuery(sql, p)
if len(grs) > 0:
return True
else:
return False
def checkEmail(self, email, old_email=None):
if email == "":
return False
if email == old_email:
return False
query_serv = self.getQueryService()
p = omero.sys.Parameters()
p.map = {}
p.map["email"] = rstring(smart_str(email))
sql = "select e from Experimenter as e where e.email = (:email)"
exps = query_serv.findAllByQuery(sql, p)
if len(exps) > 0:
return True
else:
return False
def defaultThumbnail(self, size=(120,120)):
if isinstance(size, int):
size = (size,size)
if len(size) == 1:
size = (size[0],size[0])
img = Image.open(settings.DEFAULT_IMG)
img.thumbnail(size, Image.ANTIALIAS)
draw = ImageDraw.Draw(img)
f = cStringIO.StringIO()
img.save(f, "PNG")
f.seek(0)
return f.read()
##############################################
## Sets methods ##
def changeUserPassword(self, omeName, password, my_password):
"""
Change the password for the a given user.
@param omeName Experimetner omename
@type omeName String
@param password Must pass validation in the security sub-system.
@type password String
@param my_password Must pass validation in the security sub-system.
@type my_password String
"""
admin_serv = self.getAdminService()
self.c.sf.setSecurityPassword(my_password)
admin_serv.changeUserPassword(omeName, rstring(str(password)))
def changeMyPassword(self, password, old_password):
"""
Change the password for the current user by passing the old password.
@param password Must pass validation in the security sub-system.
@type password String
@param old_password Old password
@type old_password String
@return None or error message if password could not be changed
@rtype String
"""
admin_serv = self.getAdminService()
admin_serv.changePasswordWithOldPassword(rstring(str(old_password)), rstring(str(password)))
def createExperimenter(self, experimenter, defaultGroup, otherGroups, password):
"""
Create and return a new user in the given groups with password.
@param experimenter A new Experimenter instance.
@type experimenter ExperimenterI
@param defaultGroup Instance of ExperimenterGroup selected as a first active group.
@type defaultGroup ExperimenterGroupI
@param otherGroups List of ExperimenterGroup instances. Can be empty.
@type otherGroups L{ExperimenterGroupI}
@param password Must pass validation in the security sub-system.
@type password String
@return ID of the newly created Experimenter Not null.
@rtype Long
"""
admin_serv = self.getAdminService()
return admin_serv.createExperimenterWithPassword(experimenter, rstring(str(password)), defaultGroup, otherGroups)
def updateExperimenter(self, experimenter, defaultGroup, addGroups, rmGroups):
"""
Update an existing user including groups user is a member of.
Password cannot be changed by calling that method.
@param experimenter An existing Experimenter instance.
@type experimenter ExperimenterI
@param defaultGroup Instance of ExperimenterGroup selected as a new active group.
@type defaultGroup ExperimenterGroupI
@param addGroups List of new ExperimenterGroup instances user will be a member of. Can be empty.
@type addGroups L{ExperimenterGroupI}
@param rmGroups List of old ExperimenterGroup instances user no longer be a member of. Can be empty.
@type rmGroups L{ExperimenterGroupI}
"""
admin_serv = self.getAdminService()
admin_serv.updateExperimenter(experimenter)
if len(addGroups) > 0:
admin_serv.addGroups(experimenter, addGroups)
admin_serv.setDefaultGroup(experimenter, defaultGroup)
if len(rmGroups) > 0:
admin_serv.removeGroups(experimenter, rmGroups)
def setMembersOfGroup(self, group, add_exps, rm_exps):
"""
Change members of the group.
@param group An existing ExperimenterGroup instance.
@type group ExperimenterGroupI
@param add_exps List of new Experimenters instances. Can be empty.
@type add_exps L{ExperimenterI}
@param rm_exps List of old Experimenters instances no longer be a member of that group. Can be empty.
@type rm_exps L{ExperimenterI}
"""
admin_serv = self.getAdminService()
for e in add_exps:
admin_serv.addGroups(e, [group])
for e in rm_exps:
admin_serv.removeGroups(e, [group])
#def deleteExperimenter(self, experimenter):
# """
# Removes a user by removing the password information for that user as well
# as all GroupExperimenterMap instances.
#
# @param user Experimenter to be deleted. Not null.
# @type user ExperimenterI
# """
# admin_serv = self.getAdminService()
# admin_serv.deleteExperimenter(experimenter)
def createGroup(self, group, group_owners):
"""
Create and return a new group with the given owners.
@param group A new ExperimenterGroup instance.
@type group ExperimenterGroupI
@param group_owners List of Experimenter instances. Can be empty.
@type group_owners L{ExperimenterI}
@return ID of the newly created ExperimenterGroup Not null.
@rtype Long
"""
admin_serv = self.getAdminService()
gr_id = admin_serv.createGroup(group)
new_gr = admin_serv.getGroup(gr_id)
admin_serv.addGroupOwners(new_gr, group_owners)
return gr_id
def updateGroup(self, group, add_exps, rm_exps, perm=None):
"""
Update an existing user including groups user is a member of.
Password cannot be changed by calling that method.
@param group An existing ExperimenterGroup instance.
@type group ExperimenterGroupI
@param add_exps List of new Experimenter instances. Can be empty.
@type add_exps L{ExperimenterI}
@param rm_exps List of old Experimenter instances who no longer will be a member of. Can be empty.
@type rm_exps L{ExperimenterI}
@param perm Permissions set on the given group
@type perm PermissionsI
"""
admin_serv = self.getAdminService()
# Should we update updateGroup so this would be atomic?
admin_serv.updateGroup(group)
if perm is not None:
logger.warning("WARNING: changePermissions was called!!!")
admin_serv.changePermissions(group, perm)
self._user = self.getObject("Experimenter", self._userid)
admin_serv.addGroupOwners(group, add_exps)
admin_serv.removeGroupOwners(group, rm_exps)
def updateMyAccount(self, experimenter, defultGroup):
"""
Allows a user to update his/her own information and set the default group for a given user.
@param experimenter A data transfer object. Only the fields: firstName, middleName,
lastName, email, and institution are checked. Not null.
@type experimenter ExperimenterI
@param defultGroup The group which should be set as default group for this user. Not null
@type defultGroup ExperimenterGroupI
"""
admin_serv = self.getAdminService()
admin_serv.updateSelf(experimenter)
admin_serv.setDefaultGroup(experimenter, defultGroup)
self.changeActiveGroup(defultGroup.id.val)
self._user = self.getObject("Experimenter", self._userid)
def updatePermissions(self, obj, perm):
"""
Allow to change the permission on the object.
@param obj An entity or an unloaded reference to an entity. Not null.
@type obj ObjectI
@param perm The permissions value for this entity. Not null.
@type perm PermissionsI
"""
admin_serv = self.getAdminService()
if perm is not None:
logger.warning("WARNING: changePermissions was called!!!")
admin_serv.changePermissions(obj, perm)
self._user = self.getObject("Experimenter", self._userid)
def saveObject (self, obj):
"""
Provide method for directly updating object graphs. Act recursively on
the entire object graph, replacing placeholders and details where necessary,
and then "merging" the final graph. This means that the objects that are
passed into methods are copied over to new instances which are then returned.
The original objects should be discarded.
@param obj An entity or an unloaded reference to an entity. Not null.
@type obj ObjectI
"""
u = self.getUpdateService()
u.saveObject(obj)
def saveArray (self, objs):
"""
Provide method for directly updating list of object graphs. Act recursively on
the entire object graph, replacing placeholders and details where necessary,
and then "merging" the final graph. This means that the objects that are
passed into methods are copied over to new instances which are then returned.
The original objects should be discarded.
@param obj List of entities or an unloaded references to an entity. Not null.
@type obj L{ObjectI}
"""
u = self.getUpdateService()
u.saveArray(objs)
def saveAndReturnObject (self, obj):
"""
Provide method for directly updating object graphs and return it. Act recursively on
the entire object graph, replacing placeholders and details where necessary,
and then "merging" the final graph. This means that the objects that are
passed into methods are copied over to new instances which are then returned.
The original objects should be discarded.
@param obj An entity or an unloaded reference to an entity. Not null.
@type obj ObjectI
@return Saved object
@rtype ObjectI
"""
u = self.getUpdateService()
res = u.saveAndReturnObject(obj)
res.unload()
obj = omero.gateway.BlitzObjectWrapper(self, res)
return obj
def saveAndReturnId (self, obj):
"""
Provide method for directly updating object graphs and return ID. Act recursively on
the entire object graph, replacing placeholders and details where necessary,
and then "merging" the final graph. This means that the objects that are
passed into methods are copied over to new instances which are then returned.
The original objects should be discarded.
@param obj An entity or an unloaded reference to an entity. Not null.
@type obj ObjectI
@return ID of saved object
@rtype Long
"""
u = self.getUpdateService()
res = u.saveAndReturnObject(obj)
res.unload()
return res.id.val
def saveAndReturnFile(self, binary, oFile_id):
"""
Provide method for directly updating a file object and return binary.
@param binary Binary. Not null.
@type binary String
@param oFile_id File Id in order to manage the state of the service. Not null.
@type oFile_id Long
@return Shallow copy of file.
"""
store = self.createRawFileStore()
store.setFileId(oFile_id);
pos = 0
rlen = 0
for chunk in binary.chunks():
rlen = len(chunk)
store.write(chunk, pos, rlen)
pos = pos + rlen
return store.save()
##############################################
## IShare
def getShare (self, oid):
"""
Gets share for the given share id.
@param oid: Share ID.
@type oid: Long
@return: ShareWrapper or None
@rtype: L{ShareWrapper}
"""
sh_serv = self.getShareService()
sh = sh_serv.getShare(long(oid))
if sh is not None:
return ShareWrapper(self, sh)
else:
return None
def getOwnShares(self):
"""
Gets all owned shares for the current user.
@return: Shares that user owns
@rtype: L{ShareWrapper} generator
"""
sh = self.getShareService()
for e in sh.getOwnShares(False):
yield ShareWrapper(self, e)
def getMemberShares(self):
"""
Gets all shares where current user is a member.
@return: Shares that user is a member of
@rtype: L{ShareWrapper} generator
"""
sh = self.getShareService()
for e in sh.getMemberShares(False):
yield ShareWrapper(self, e)
def getMemberCount(self, share_ids):
"""
Returns a map from share id to the count of total members (including the
owner). This is represented by ome.model.meta.ShareMember links.
@param share_ids: List of IDs
@type share_ids: List of Longs
@return: Dict of shareId: member-count
@rtype: Dict of long: long
"""
sh = self.getShareService()
return sh.getMemberCount(share_ids)
def getCommentCount(self, share_ids):
"""
Returns a map from share id to comment count.
@param share_ids: List of IDs
@type share_ids: List of Longs
@return: Dict of shareId: comment-count
@rtype: Dict of long: long
"""
sh = self.getShareService()
return sh.getCommentCount(share_ids)
def getContents(self, share_id):
"""
Looks up all items belonging to the share, wrapped in object wrapper
@param share_id: share ID
@type share_id: Long
@return: Share contents
@rtype: L{omero.gateway.BlitzObjectWrapper} generator
"""
sh = self.getShareService()
for e in sh.getContents(long(share_id)):
try:
obj = omero.gateway.BlitzObjectWrapper(self, e)
except:
obj = omero.gateway.BlitzObjectWrapper(self,None)
obj._obj = e
yield obj
def getComments(self, share_id):
"""
Looks up all comments which belong to the share, wrapped in object wrapper
@param share_id: share ID
@type share_id: Long
@return: Share comments
@rtype: L{AnnotationWrapper} generator
"""
sh = self.getShareService()
for e in sh.getComments(long(share_id)):
yield AnnotationWrapper(self, e)
def getAllMembers(self, share_id):
"""
Get all {@link Experimenter users} who are a member of the share.
@param share_id: share ID
@type share_id: Long
@return: Members of share
@rtype: L{ExperimenterWrapper} generator
"""
sh = self.getShareService()
for e in sh.getAllMembers(long(share_id)):
yield ExperimenterWrapper(self, e)
def getAllGuests(self, share_id):
"""
Get the email addresses for all share guests.
@param share_id: share ID
@type share_id: Long
@return: List of e-mail addresses
@rtype: List of Strings
"""
sh = self.getShareService()
return sh.getAllGuests(long(share_id))
def getAllUsers(self, share_id):
"""
Get a single set containing the login names of the users as well email addresses for guests.
@param share_id: share ID
@type share_id: Long
@return: List of usernames and e-mail addresses
@rtype: List of Strings
"""
sh = self.getShareService()
return sh.getAllUsers(long(share_id))
def prepareRecipients(self, recipients):
recps = list()
for m in recipients:
try:
e = (m.email, m.email.val)[isinstance(m.email, omero.RString)]
if e is not None and e!="":
recps.append(e)
except:
logger.error(traceback.format_exc())
logger.info(recps)
if len(recps) == 0:
raise AttributeError("Recipients list is empty")
return recps
def addComment(self, host, blitz_id, share_id, comment):
sh = self.getShareService()
new_cm = sh.addComment(long(share_id), str(comment))
members = list(self.getAllMembers(long(share_id)))
sh = self.getShare(long(share_id))
if self.getEventContext().userId != sh.owner.id.val:
members.append(sh.getOwner())
if sh.active:
try:
for m in members:
try:
if m.id == self.getEventContext().userId:
members.remove(m)
except:
logger.error(traceback.format_exc())
recipients = self.prepareRecipients(members)
except Exception, x:
logger.error(traceback.format_exc())
else:
blitz = settings.SERVER_LIST.get(pk=blitz_id)
t = settings.EMAIL_TEMPLATES["add_comment_to_share"]
message = t['text_content'] % (settings.APPLICATION_HOST, blitz_id)
message_html = t['html_content'] % (settings.APPLICATION_HOST, blitz_id, settings.APPLICATION_HOST, blitz_id)
try:
title = 'OMERO.web - new comment for share %i' % share_id
text_content = message
html_content = message_html
msg = EmailMultiAlternatives(title, text_content, settings.SERVER_EMAIL, recipients)
msg.attach_alternative(html_content, "text/html")
msg.send()
logger.error("Email was sent")
except:
logger.error(traceback.format_exc())
def removeImage(self, share_id, image_id):
sh = self.getShareService()
img = self.getObject("Image", image_id)
sh.removeObject(long(share_id), img._obj)
def createShare(self, host, blitz_id, image, message, members, enable, expiration=None):
sh = self.getShareService()
q = self.getQueryService()
items = list()
ms = list()
p = omero.sys.Parameters()
p.map = {}
#images
if len(image) > 0:
p.map["ids"] = rlist([rlong(long(a)) for a in image])
sql = "select im from Image im join fetch im.details.owner join fetch im.details.group where im.id in (:ids) order by im.name"
items.extend(q.findAllByQuery(sql, p))
#members
if members is not None:
p.map["ids"] = rlist([rlong(long(a)) for a in members])
sql = "select e from Experimenter e " \
"where e.id in (:ids) order by e.omeName"
ms = q.findAllByQuery(sql, p)
sid = sh.createShare(message, rtime(expiration), items, ms, [], enable)
sh.addObjects(sid, items)
#send email if avtive
if enable:
try:
recipients = self.prepareRecipients(ms)
except Exception, x:
logger.error(traceback.format_exc())
else:
t = settings.EMAIL_TEMPLATES["create_share"]
message = t['text_content'] % (settings.APPLICATION_HOST, blitz_id, self.getUser().getFullName())
message_html = t['html_content'] % (settings.APPLICATION_HOST, blitz_id, settings.APPLICATION_HOST, blitz_id, self.getUser().getFullName())
try:
title = 'OMERO.web - new share %i' % sid
text_content = message
html_content = message_html
msg = EmailMultiAlternatives(title, text_content, settings.SERVER_EMAIL, recipients)
msg.attach_alternative(html_content, "text/html")
msg.send()
logger.error("Email was sent")
except:
logger.error(traceback.format_exc())
def updateShareOrDiscussion (self, host, blitz_id, share_id, message, add_members, rm_members, enable, expiration=None):
sh = self.getShareService()
sh.setDescription(long(share_id), message)
sh.setExpiration(long(share_id), rtime(expiration))
sh.setActive(long(share_id), enable)
if len(add_members) > 0:
sh.addUsers(long(share_id), add_members)
if len(rm_members) > 0:
sh.removeUsers(long(share_id), rm_members)
#send email if avtive
if len(add_members) > 0:
try:
recipients = self.prepareRecipients(add_members)
except Exception, x:
logger.error(traceback.format_exc())
else:
blitz = settings.SERVER_LIST.get(pk=blitz_id)
t = settings.EMAIL_TEMPLATES["add_member_to_share"]
message = t['text_content'] % (settings.APPLICATION_HOST, blitz_id, self.getUser().getFullName())
message_html = t['html_content'] % (settings.APPLICATION_HOST, blitz_id, settings.APPLICATION_HOST, blitz_id, self.getUser().getFullName())
try:
title = 'OMERO.web - update share %i' % share_id
text_content = message
html_content = message_html
msg = EmailMultiAlternatives(title, text_content, settings.SERVER_EMAIL, recipients)
msg.attach_alternative(html_content, "text/html")
msg.send()
logger.error("Email was sent")
except:
logger.error(traceback.format_exc())
if len(rm_members) > 0:
try:
recipients = self.prepareRecipients(rm_members)
except Exception, x:
logger.error(traceback.format_exc())
else:
blitz = settings.SERVER_LIST.get(pk=blitz_id)
t = settings.EMAIL_TEMPLATES["remove_member_from_share"]
message = t['text_content'] % (settings.APPLICATION_HOST, blitz_id)
message_html = t['html_content'] % (settings.APPLICATION_HOST, blitz_id, settings.APPLICATION_HOST, blitz_id)
try:
title = 'OMERO.web - update share %i' % share_id
text_content = message
html_content = message_html
msg = EmailMultiAlternatives(title, text_content, settings.SERVER_EMAIL, recipients)
msg.attach_alternative(html_content, "text/html")
msg.send()
logger.error("Email was sent")
except:
logger.error(traceback.format_exc())
##############################################
## History methods ##
#def getLastAcquiredImages (self):
# tm = self.getTimelineService()
# p = omero.sys.Parameters()
# p.map = {}
# f = omero.sys.Filter()
# f.ownerId = rlong(self.getEventContext().userId)
# f.groupId = rlong(self.getEventContext().groupId)
# f.limit = rint(6)
# p.theFilter = f
# for e in tm.getMostRecentObjects(['Image'], p, False)["Image"]:
# yield ImageWrapper(self, e)
def listLastImportedImages (self):
"""
Retrieve most recent imported images
controlled by the security system.
@return: Generator yielding Images
@rtype: L{ImageWrapper} generator
"""
tm = self.getTimelineService()
p = omero.sys.Parameters()
p.map = {}
f = omero.sys.Filter()
f.ownerId = rlong(self.getEventContext().userId)
f.groupId = rlong(self.getEventContext().groupId)
f.limit = rint(10)
p.theFilter = f
for e in tm.getMostRecentObjects(['Image'], p, False)["Image"]:
yield ImageWrapper(self, e)
def listMostRecentShares (self):
"""
Retrieve most recent shares
controlled by the security system.
@return: Generator yielding SessionAnnotationLink
@rtype: L{ShareWrapper} generator
"""
tm = self.getTimelineService()
p = omero.sys.Parameters()
p.map = {}
f = omero.sys.Filter()
f.ownerId = rlong(self.getEventContext().userId)
f.limit = rint(10)
p.theFilter = f
for e in tm.getMostRecentShareCommentLinks(p):
yield ShareWrapper(self, e.parent)
def listMostRecentShareComments (self):
"""
Retrieve most recent share comments
controlled by the security system.
@return: Generator yielding SessionAnnotationLink
@rtype: L{SessionCommentWrapper} generator
"""
tm = self.getTimelineService()
p = omero.sys.Parameters()
p.map = {}
f = omero.sys.Filter()
f.ownerId = rlong(self.getEventContext().userId)
f.limit = rint(10)
p.theFilter = f
for e in tm.getMostRecentShareCommentLinks(p):
yield AnnotationWrapper(self, e.child, link=ShareWrapper(self, e.parent))
def listMostRecentComments (self):
"""
Retrieve most recent comment annotations
controlled by the security system.
@return: Generator yielding BlitzObjectWrapper
@rtype: L{BlitzObjectWrapper} generator
"""
tm = self.getTimelineService()
p = omero.sys.Parameters()
p.map = {}
f = omero.sys.Filter()
f.ownerId = rlong(self.getEventContext().userId)
f.groupId = rlong(self.getEventContext().groupId)
f.limit = rint(10)
p.theFilter = f
for e in tm.getMostRecentAnnotationLinks(None, ['CommentAnnotation'], None, p):
yield omero.gateway.BlitzObjectWrapper(self, e)
def listMostRecentTags (self):
"""
Retrieve most recent tag annotations
controlled by the security system.
@return: Generator yielding BlitzObjectWrapper
@rtype: L{BlitzObjectWrapper} generator
"""
tm = self.getTimelineService()
p = omero.sys.Parameters()
p.map = {}
f = omero.sys.Filter()
#f.ownerId = rlong(self.getEventContext().userId)
f.groupId = rlong(self.getEventContext().groupId)
f.limit = rint(200)
p.theFilter = f
for e in tm.getMostRecentAnnotationLinks(None, ['TagAnnotation'], None, p):
yield omero.gateway.BlitzObjectWrapper(self, e.child)
def getDataByPeriod (self, start, end, eid, otype=None, page=None):
"""
Retrieve given data objects by the given period of time
controlled by the security system.
@param start Starting data
@type start Long
@param end Finishing data
@type end Long
@param otype Data type: Project, Dataset, Image
@type otype String
@return: Map of project, dataset and image lists
@rtype: Map
"""
tm = self.getTimelineService()
p = omero.sys.Parameters()
p.map = {}
f = omero.sys.Filter()
f.ownerId = rlong(eid)
f.groupId = rlong(self.getEventContext().groupId)
if page is not None:
f.limit = rint(PAGE)
f.offset = rint((int(page)-1)*PAGE)
else:
f.limit = rint(100)
p.theFilter = f
im_list = list()
ds_list = list()
pr_list = list()
if otype == 'image':
try:
for e in tm.getByPeriod(['Image'], rtime(long(start)), rtime(long(end)), p, True)['Image']:
im_list.append(ImageWrapper(self, e))
except:
pass
elif otype == 'dataset':
try:
for e in tm.getByPeriod(['Dataset'], rtime(long(start)), rtime(long(end)), p, True)['Dataset']:
ds_list.append(DatasetWrapper(self, e))
except:
pass
elif otype == 'project':
try:
for e in tm.getByPeriod(['Project'], rtime(long(start)), rtime(long(end)), p, True)['Project']:
pr_list.append(ImageWrapper(self, e))
except:
pass
else:
res = tm.getByPeriod(['Image', 'Dataset', 'Project'], rtime(long(start)), rtime(long(end)), p, True)
try:
for e in res['Image']:
im_list.append(ImageWrapper(self, e))
except:
pass
try:
for e in res['Dataset']:
ds_list.append(DatasetWrapper(self, e))
except:
pass
try:
for e in res['Project']:
pr_list.append(ProjectWrapper(self, e))
except:
pass
return {'project': pr_list, 'dataset':ds_list, 'image':im_list}
def countDataByPeriod (self, start, end, eid, otype=None):
"""
Counts given data objects by the given period of time
controlled by the security system.
@param start Starting data
@type start Long
@param end Finishing data
@type end Long
@param otype Data type: Project, Dataset, Image
@type otype String
@return: Counter
@rtype: Long
"""
tm = self.getTimelineService()
p = omero.sys.Parameters()
p.map = {}
f = omero.sys.Filter()
f.ownerId = rlong(eid)
f.groupId = rlong(self.getEventContext().groupId)
p.theFilter = f
if otype == 'image':
return tm.countByPeriod(['Image'], rtime(long(start)), rtime(long(end)), p)['Image']
elif otype == 'dataset':
return tm.countByPeriod(['Dataset'], rtime(long(start)), rtime(long(end)), p)['Dataset']
elif otype == 'project':
return tm.countByPeriod(['Project'], rtime(long(start)), rtime(long(end)), p)['Project']
else:
c = tm.countByPeriod(['Image', 'Dataset', 'Project'], rtime(long(start)), rtime(long(end)), p)
return c['Image']+c['Dataset']+c['Project']
def getEventsByPeriod (self, start, end, eid):
"""
Retrieve event log objects by the given period of time
controlled by the security system.
@param start Starting data
@type start Long
@param end Finishing data
@type end Long
@return: List of event logs
@rtype: List
"""
tm = self.getTimelineService()
p = omero.sys.Parameters()
p.map = {}
f = omero.sys.Filter()
f.limit = rint(100000)
f.ownerId = rlong(eid)
f.groupId = rlong(self.getEventContext().groupId)
p.theFilter = f
return tm.getEventLogsByPeriod(rtime(start), rtime(end), p)
#yield EventLogWrapper(self, e)
omero.gateway.BlitzGateway = OmeroWebGateway
class OmeroWebSafeCallWrapper(OmeroGatewaySafeCallWrapper): #pragma: no cover
"""
Function or method wrapper that handles L{Ice.ObjectNotExistException}
by re-creating the server side proxy.
"""
def handle_exception(self, e, *args, **kwargs):
if e.__class__ is Ice.ObjectNotExistException:
# Restored proxy object re-creation logic from the pre-#5835
# version of # _safeCallWrap() from omero.gateway. (See #6365)
logger.warn('Attempting to re-create proxy and re-call method.')
try:
self.proxyObjectWrapper._obj = \
self.proxyObjectWrapper._create_func()
func = getattr(self.proxyObjectWrapper._obj, self.attr)
return func(*args, **kwargs)
except Exception, e:
self.debug(e.__class__.__name__, args, kwargs)
raise
else:
super(OmeroWebSafeCallWrapper, self).handle_exception(
e, *args, **kwargs)
omero.gateway.SafeCallWrapper = OmeroWebSafeCallWrapper
class OmeroWebObjectWrapper (object):
annotation_counter = None
def countParents (self):
l = self.listParents()
if l is not None:
return len(l)
def countAnnotations (self):
"""
Count on annotations linked to the object and set the value
on the custom fiels 'annotation_counter'.
@return Counter
"""
if self.annotation_counter is not None:
return self.annotation_counter
else:
container = self._conn.getContainerService()
m = container.getCollectionCount(self._obj.__class__.__name__, type(self._obj).ANNOTATIONLINKS, [self._oid], None)
if m[self._oid] > 0:
self.annotation_counter = m[self._oid]
return self.annotation_counter
else:
return None
def warpName(self):
"""
Warp name of the object if names is longer then 30 characters.
@return Warped string.
"""
try:
l = len(self.name)
if l < 30:
return self.name
elif l >= 30:
splited = []
for v in range(0,len(self.name),30):
splited.append(self.name[v:v+30]+"\n")
return "".join(splited)
except:
logger.info(traceback.format_exc())
return self.name
class ExperimenterWrapper (OmeroWebObjectWrapper, omero.gateway.ExperimenterWrapper):
"""
omero_model_ExperimenterI class wrapper overwrite omero.gateway.ExperimenterWrapper
and extend OmeroWebObjectWrapper.
"""
def isEditable(self):
return self.omeName.lower() not in ('guest')
omero.gateway.ExperimenterWrapper = ExperimenterWrapper
class ExperimenterGroupWrapper (OmeroWebObjectWrapper, omero.gateway.ExperimenterGroupWrapper):
"""
omero_model_ExperimenterGroupI class wrapper overwrite omero.gateway.ExperimenterGroupWrapper
and extend OmeroWebObjectWrapper.
"""
def isEditable(self):
return self.name.lower() not in ('guest', 'user')
omero.gateway.ExperimenterGroupWrapper = ExperimenterGroupWrapper
class ProjectWrapper (OmeroWebObjectWrapper, omero.gateway.ProjectWrapper):
"""
omero_model_ProjectI class wrapper overwrite omero.gateway.ProjectWrapper
and extend OmeroWebObjectWrapper.
"""
annotation_counter = None
def __prepare__ (self, **kwargs):
super(ProjectWrapper, self).__prepare__(**kwargs)
if kwargs.has_key('annotation_counter'):
self.annotation_counter = kwargs['annotation_counter']
omero.gateway.ProjectWrapper = ProjectWrapper
class DatasetWrapper (OmeroWebObjectWrapper, omero.gateway.DatasetWrapper):
"""
omero_model_DatasetI class wrapper overwrite omero.gateway.DatasetWrapper
and extends OmeroWebObjectWrapper.
"""
annotation_counter = None
def __prepare__ (self, **kwargs):
super(DatasetWrapper, self).__prepare__(**kwargs)
if kwargs.has_key('annotation_counter'):
self.annotation_counter = kwargs['annotation_counter']
if kwargs.has_key('link'):
self.link = kwargs.has_key('link') and kwargs['link'] or None
omero.gateway.DatasetWrapper = DatasetWrapper
class ImageWrapper (OmeroWebObjectWrapper, omero.gateway.ImageWrapper):
"""
omero_model_ImageI class wrapper overwrite omero.gateway.ImageWrapper
and extends OmeroWebObjectWrapper.
"""
annotation_counter = None
def __prepare__ (self, **kwargs):
super(ImageWrapper, self).__prepare__(**kwargs)
if kwargs.has_key('annotation_counter'):
self.annotation_counter = kwargs['annotation_counter']
if kwargs.has_key('link'):
self.link = kwargs.has_key('link') and kwargs['link'] or None
"""
This override standard omero.gateway.ImageWrapper.getChannels
and catch exceptions.
"""
def getChannels (self):
try:
return super(ImageWrapper, self).getChannels()
except Exception, x:
logger.error('Failed to load channels:', exc_info=True)
return None
omero.gateway.ImageWrapper = ImageWrapper
class PlateWrapper (OmeroWebObjectWrapper, omero.gateway.PlateWrapper):
"""
omero_model_PlateI class wrapper overwrite omero.gateway.PlateWrapper
and extends OmeroWebObjectWrapper.
"""
annotation_counter = None
def __prepare__ (self, **kwargs):
super(PlateWrapper, self).__prepare__(**kwargs)
if kwargs.has_key('annotation_counter'):
self.annotation_counter = kwargs['annotation_counter']
if kwargs.has_key('link'):
self.link = kwargs.has_key('link') and kwargs['link'] or None
def _loadPlateAcquisitions(self):
p = omero.sys.Parameters()
p.map = {}
p.map["pid"] = self._obj.id
sql = "select pa from PlateAcquisition as pa join fetch pa.plate as p where p.id=:pid"
self._obj._plateAcquisitionsSeq = self._conn.getQueryService().findAllByQuery(sql, p)
self._obj._plateAcquisitionsLoaded = True
def countPlateAcquisitions(self):
if self._obj.sizeOfPlateAcquisitions() < 0:
self._loadPlateAcquisitions()
return self._obj.sizeOfPlateAcquisitions()
def listPlateAcquisitions(self):
if not self._obj._plateAcquisitionsLoaded:
self._loadPlateAcquisitions()
for pa in self._obj.copyPlateAcquisitions():
yield PlateAcquisitionWrapper(self._conn, pa)
def getFields (self, pid=None):
"""
Returns tuple of min and max of indexed collection of well samples
per plate acquisition if exists
"""
q = self._conn.getQueryService()
sql = "select minIndex(ws), maxIndex(ws) from Well w " \
"join w.wellSamples ws where w.plate.id=:oid"
p = omero.sys.Parameters()
p.map = {}
p.map["oid"] = self._obj.id
if pid is not None:
sql += " and ws.plateAcquisition.id=:pid"
p.map["pid"] = rlong(pid)
fields = None
try:
res = [r for r in unwrap(q.projection(sql, p))[0] if r != None]
if len(res) == 2:
fields = tuple(res)
except:
pass
return fields
omero.gateway.PlateWrapper = PlateWrapper
class WellWrapper (OmeroWebObjectWrapper, omero.gateway.WellWrapper):
"""
omero_model_ImageI class wrapper overwrite omero.gateway.ImageWrapper
and extends OmeroWebObjectWrapper.
"""
annotation_counter = None
def __prepare__ (self, **kwargs):
super(WellWrapper, self).__prepare__(**kwargs)
if kwargs.has_key('annotation_counter'):
self.annotation_counter = kwargs['annotation_counter']
if kwargs.has_key('link'):
self.link = kwargs.has_key('link') and kwargs['link'] or None
omero.gateway.WellWrapper = WellWrapper
class PlateAcquisitionWrapper (OmeroWebObjectWrapper, omero.gateway.BlitzObjectWrapper):
"""
omero_model_PlateI class wrapper overwrite omero.gateway.PlateWrapper
and extends OmeroWebObjectWrapper.
"""
annotation_counter = None
def __bstrap__ (self):
self.OMERO_CLASS = 'PlateAcquisition'
def __prepare__ (self, **kwargs):
super(PlateAcquisitionWrapper, self).__prepare__(**kwargs)
if kwargs.has_key('annotation_counter'):
self.annotation_counter = kwargs['annotation_counter']
def getName (self):
name = super(PlateAcquisitionWrapper, self).getName()
if name is None:
if self.startTime is not None and self.endTime is not None:
name = "%s - %s" % (datetime.fromtimestamp(self.startTime/1000), datetime.fromtimestamp(self.endTime/1000))
else:
name = "Plate %i" % self.id
return name
name = property(getName)
def getFields (self):
"""
Returns max of indexed collection of well samples
"""
p = omero.sys.Parameters()
p.map = {}
p.map["oid"] = self._obj.id
q = self._conn.getQueryService()
sql = "select maxIndex(pa.wellSamples)+1 from PlateAcquisition as pa "\
"where pa.id=:oid"
try:
index = unwrap(q.projection(sql, p))[0][0]
except:
index = -1
return index
class ScreenWrapper (OmeroWebObjectWrapper, omero.gateway.ScreenWrapper):
"""
omero_model_ScreenI class wrapper overwrite omero.gateway.ScreenWrapper
and extends OmeroWebObjectWrapper.
"""
annotation_counter = None
def __prepare__ (self, **kwargs):
super(ScreenWrapper, self).__prepare__(**kwargs)
if kwargs.has_key('annotation_counter'):
self.annotation_counter = kwargs['annotation_counter']
omero.gateway.ScreenWrapper = ScreenWrapper
class EventLogWrapper (omero.gateway.BlitzObjectWrapper):
"""
omero_model_EventLogI class wrapper extends omero.gateway.BlitzObjectWrapper.
"""
LINK_CLASS = "EventLog"
class ShareWrapper (omero.gateway.BlitzObjectWrapper):
"""
omero_model_ShareI class wrapper extends BlitzObjectWrapper.
"""
def getShareType(self):
if self.itemCount == 0:
return "Discussion"
else:
return "Share"
def isEmpty(self):
if self.itemCount == 0:
return True
return False
def getExpireDate(self):
#workaround for problem of year 2038
try:
d = self.started+self.timeToLive
if d > 2051222400000:
return datetime(2035, 1, 1, 0, 0, 0)
return datetime.fromtimestamp(d / 1000)
except:
logger.info(traceback.format_exc())
return None
def getStartDate(self):
"""
Gets the start date of the share
@return: Start Date-time
@rtype: datetime object
"""
return datetime.fromtimestamp(self.getStarted()/1000)
def getExpirationDate(self):
"""
Gets the end date for the share
@return: End Date-time
@rtype: datetime object
"""
#workaround for problem of year 2038
try:
d = self.started+self.timeToLive
if d > 2051222400:
return datetime(2035, 1, 1, 0, 0, 0)
return datetime.fromtimestamp(d / 1000)
except:
logger.info(traceback.format_exc())
return None
def isExpired(self):
"""
Returns True if we are past the end date of the share
@return: True if share expired
@rtype: Boolean
"""
#workaround for problem of year 2038
now = time.time()
try:
d = long(self.started+self.timeToLive)
if (d / 1000) > now:
return False
return True
except:
logger.info(traceback.format_exc())
return None
def isOwned(self):
"""
Returns True if share is owned by the current user
@return: True if owned
@rtype: Boolean
"""
try:
if self.owner.id.val == self._conn.getEventContext().userId:
return True
except:
logger.error(traceback.format_exc())
return False
def getOwner(self):
"""
The owner of this share
@return: Owner
@rtype: L{ExperimenterWrapper}
"""
return omero.gateway.ExperimenterWrapper(self, self.owner)
# IMPORTANT to update the map of wrappers 'project', 'dataset', 'image' etc. returned by getObjects()
omero.gateway.refreshWrappers()
omero.gateway.KNOWN_WRAPPERS.update({"plateacquisition":PlateAcquisitionWrapper})
| gpl-2.0 |
ayushagrawal288/zamboni | mkt/commonplace/urls.py | 7 | 3562 | from django.conf import settings
from django.conf.urls import include, patterns, url
import mkt
from . import views
def fireplace_route(path, name=None):
"""
Helper function for building Fireplace URLs. `path` is the URL route,
and `name` (if specified) is the name given to the route.
"""
kwargs = {}
if name:
kwargs['name'] = name
return url('^%s$' % path, views.commonplace, {'repo': 'fireplace'},
**kwargs)
fireplace_reviews_patterns = patterns(
'',
fireplace_route('flag', 'ratings.flag'),
fireplace_route('delete', 'ratings.delete'),
)
fireplace_app_patterns = patterns(
'',
fireplace_route('', 'detail'),
fireplace_route('abuse', 'detail.abuse'),
fireplace_route('privacy', 'detail.privacy'),
fireplace_route('recommended', 'recommended'),
fireplace_route('reviews/', 'ratings.list'),
fireplace_route('reviews/add', 'ratings.add'),
url('^(?P<review_id>\d+)/', include(fireplace_reviews_patterns)),
)
fireplace_website_patterns = patterns(
'',
fireplace_route('', 'website.detail'),
)
urlpatterns = patterns(
'',
# Fireplace:
url('^$', views.commonplace, {'repo': 'fireplace'}, name='home'),
url('^server.html$', views.commonplace, {'repo': 'fireplace'},
name='commonplace.fireplace'),
url('^fxa-authorize$', views.fxa_authorize,
name='commonplace.fxa_authorize'),
(r'^app/%s/' % mkt.APP_SLUG, include(fireplace_app_patterns)),
(r'^website/(?P<pk>\d+)', include(fireplace_website_patterns)),
url(r'^iframe-install.html/?$', views.iframe_install,
name='commonplace.iframe-install'),
url(r'^potatolytics.html$', views.potatolytics,
name='commonplace.potatolytics'),
# Commbadge:
url('^comm/app/%s$' % mkt.APP_SLUG, views.commonplace,
{'repo': 'commbadge'},
name='commonplace.commbadge.app_dashboard'),
url('^comm/thread/(?P<thread_id>\d+)$', views.commonplace,
{'repo': 'commbadge'},
name='commonplace.commbadge.show_thread'),
url('^comm/.*$', views.commonplace, {'repo': 'commbadge'},
name='commonplace.commbadge'),
# Transonic:
url('^curate/.*$', views.commonplace, {'repo': 'transonic'},
name='commonplace.transonic'),
# Stats:
url('^statistics/app/%s$' % mkt.APP_SLUG, views.commonplace,
{'repo': 'marketplace-stats'},
name='commonplace.stats.app_dashboard'),
url('^statistics/.*$', views.commonplace, {'repo': 'marketplace-stats'},
name='commonplace.stats'),
# Operator Dashboard:
url('^operators/.*$', views.commonplace,
{'repo': 'marketplace-operator-dashboard'},
name='commonplace.operatordashboard'),
# Submission:
url('^submission/.*$', views.commonplace,
{'repo': 'marketplace-submission'},
name='commonplace.submission'),
)
if settings.DEBUG:
# More Fireplace stuff, only for local dev:
urlpatterns += patterns(
'',
fireplace_route('category/.*'),
fireplace_route('categories'),
fireplace_route('collection/.*'),
fireplace_route('debug'),
fireplace_route('feed/.*'),
fireplace_route('feedback'),
fireplace_route('fxa-authorize'),
fireplace_route('new'),
fireplace_route('popular'),
fireplace_route('privacy-policy'),
fireplace_route('purchases'),
fireplace_route('search/?'),
fireplace_route('settings'),
fireplace_route('terms-of-use'),
fireplace_route('tests'),
)
| bsd-3-clause |
graingert/isort | kate_plugin/isort_plugin.py | 12 | 3174 | """ Sorts Python import definitions, and groups them based on type (stdlib, third-party, local).
isort/isort_kate_plugin.py
Provides a simple kate plugin that enables the use of isort to sort Python imports
in the currently open kate file.
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import os
import kate
from isort import SortImports
try:
from PySide import QtGui
except ImportError:
from PyQt4 import QtGui
def sort_kate_imports(add_imports=(), remove_imports=()):
"""Sorts imports within Kate while maintaining cursor position and selection, even if length of file changes."""
document = kate.activeDocument()
view = document.activeView()
position = view.cursorPosition()
selection = view.selectionRange()
sorter = SortImports(file_contents=document.text(), add_imports=add_imports, remove_imports=remove_imports,
settings_path=os.path.dirname(os.path.abspath(str(document.url().path()))))
document.setText(sorter.output)
position.setLine(position.line() + sorter.length_change)
if selection:
start = selection.start()
start.setLine(start.line() + sorter.length_change)
end = selection.end()
end.setLine(end.line() + sorter.length_change)
selection.setRange(start, end)
view.setSelection(selection)
view.setCursorPosition(position)
@kate.action
def sort_imports():
"""Sort Imports"""
sort_kate_imports()
@kate.action
def add_imports():
"""Add Imports"""
text, ok = QtGui.QInputDialog.getText(None,
'Add Import',
'Enter an import line to add (example: from os import path or os.path):')
if ok:
sort_kate_imports(add_imports=text.split(";"))
@kate.action
def remove_imports():
"""Remove Imports"""
text, ok = QtGui.QInputDialog.getText(None,
'Remove Import',
'Enter an import line to remove (example: os.path or from os import path):')
if ok:
sort_kate_imports(remove_imports=text.split(";"))
| mit |
baldengineers/mapper | main.py | 1 | 70755 | #easy cs:go mapper: counter-strike: global offensive port of the easy tf2 mapper
#
#in development, not at a working stage.
#DIFFERENCES:
#more prefabrications
#more sections (subsections?)
#improved UI
#improved file count
#multi-game system
# program boots up and variables are set which change what game the program utilizes
# (set up after dialog with radio button + grid size is chosen)
# grid size of createprefab, how skybox renderings, skybox textures, light vars, window titles, file directories, etc.
#move all prefabs on grid
# if we can make a new grid system widget
#
#important:
#move all variable definitions that need changing based off game selection
#to a separate function which runs after dialog
#make the grid size dialog run before everything else. make it its own separate class that
#runs before mainwindow
import sys
#move this to after initial dialog
import os.path
import os
from PySide.QtCore import *
from PySide.QtGui import *
import importlib
import createPrefab
import pf
from PIL import Image
from PIL.ImageQt import ImageQt
import generateSkybox
import light_create
import export
import subprocess
import pickle
import pprint
import random
import glob
import webbrowser
import wave
import zipfile
import shutil
import winsound
import GridWidget
class GridBtn(QWidget):
def __init__(self, parent, x, y, btn_id):
super(GridBtn, self).__init__()
self.button = QPushButton("", parent)
self.x,self.y = x,y
self.btn_id = btn_id
self.button.resize(32,32)
self.button.setFixedSize(32, 32)
self.button.pressed.connect(lambda: self.click_func(parent, x, y,btn_id))
self.button.installEventFilter(self)
self.button.show()
self.icons = None
parent.progress += 100/(parent.grid_x*parent.grid_y)
parent.progressBar.setValue(parent.progress)
def reset_icon(self):
self.button.setIcon(QIcon(""))
def click_func(self, parent, x, y, btn_id, clicked=True, h_moduleName="None", h_icon=''): #h_moduleName and h_icon and h_rot are used when undoing/redoing
current_list = eval('parent.tile_list%s' % str(parent.list_tab_widget.currentIndex()+1))
#format | history.append((x,y,moduleName,self.icon,level))
if clicked:
parent.redo_history=[]
if self.icons:
moduleName = eval(parent.prefab_list[parent.list_tab_widget.currentIndex()][parent.current_list.currentRow()])
templist=[(x,y,moduleName,self.icons,None)]
else:
templist=[(x,y,None,None,None)]
def clear_btn(btn_id):
self.button.setIcon(QIcon())
for l in [parent.totalblocks,parent.entity_list,parent.stored_info_list]:
l[btn_id] = ''
parent.iconlist[btn_id] = ('','')
self.icons = None
if self.checkForCtrl(clicked):
clear_btn(btn_id)
else:
if clicked:
if parent.ymin == None or parent.xmin == None:
parent.ymin,parent.xmin = y,x
else:
if y < parent.ymin:
parent.ymin = y
if x < parent.xmin:
parent.xmin = x
if y > parent.ymax:
parent.ymax = y
if x > parent.xmax:
parent.xmax = x
moduleName = eval(parent.prefab_list[parent.list_tab_widget.currentIndex()][parent.current_list.currentRow()])
else:
moduleName = h_moduleName if h_moduleName != None else clear_btn(btn_id)
if h_moduleName != None:
if clicked:
icon = parent.cur_icon
else:
icon = h_icon
self.button.setIcon(QIcon(icon))
self.button.setIconSize(QSize(32,32))
parent.iconlist[btn_id] = [icon]
parent.stored_info_list[btn_id] = [moduleName,x,y,parent.id_num,parent.world_id_num,parent.entity_num,parent.placeholder_list,parent.rotation]
self.icons = icon
else:
parent.stored_info_list[btn_id] = ""
if "*" not in parent.windowTitle():
parent.setWindowTitle("Easy "+parent.gameVar+" Mapper* - ["+parent.currentfilename+"]")
if clicked:
templist.append((x,y,moduleName,self.icons,None))
parent.history.append(templist)
def checkForCtrl(self, clicked):
if clicked:
modifiers = QApplication.keyboardModifiers()
if modifiers == Qt.ControlModifier:
return True
else:
return False
else:
return False
class MainWindow(QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
#QApplication.setStyle(QStyleFactory.create("Cleanlooks")) #comment out if unwanted
#define some variables used throughout the class
self.level = 0
self.levels = 0
self.id_num = 1
self.world_id_num = 2
self.rotation = 0
self.entity_num = 1
self.btn_id_count = 0
self.grid_list=[]
self.totalblocks = []
self.skybox_list=[]
self.last_tuple = 'First'
self.skybox_light_list=[]
self.iconlist = []
self.cur_icon = ""
self.rotation_icon_list=[]
self.skybox_angle_list=[]
self.skybox_icon_list=[]
self.gridsize = []
self.count_btns = 0
self.entity_list=[]
self.save_dict = {}
self.load_dict = {}
self.stored_info_list=[]
#tabs should be more reusable
#for example: the following lists should be this instead:
#self.prefab_list = [[] for i in self.tabs] where self.tabs is the # of tabs
#i.e. We should be able to create new tabs whenever we want, just by
#changing the self.tabs variable.
self.prefab_list = [[],[],[]]
self.prefab_text_list = [[],[],[]]
self.prefab_icon_list = [[],[],[]]
self.openblocks=[]
self.placeholder_list = []
self.history = []
self.redo_history = []
self.currentfilename='Untitled'
self.file_loaded = False
self.current_loaded = ''
self.latest_path='/'
self.isTF = True
self.TLBool = False
self.SLBool = False
self.BRBool = False
#initial startup/gridchange window
initWindow = GridChangeWindow(self, True)
values = initWindow.returnVal()
#tell which game was chosen on launch
if self.isTF:
self.gameVar,self.gameDirVar = "TF2","tf2/"
else:
self.gameVar,self.gameDirVar = "CS:GO","csgo/"
self.TFFormat() if self.isTF else self.CSFormat()
util_list = [createPrefab,light_create,generateSkybox,export]
for util in util_list:
util.setGameDirVar(self.gameDirVar)
#create the main window
self.setGeometry(100, 25, 875, 750)
self.setWindowTitle("Easy "+self.gameVar+" Mapper")
self.setWindowIcon(QIcon("icons\icon.ico"))
#removed for now to see how gui looks without it
## if self.isTF:
## namelist = ['gravelpit','2fort','upward','mvm']
## palette = QPalette()
## palette.setBrush(QPalette.Background,QBrush(QPixmap(self.gameDirVar+"icons/backgrounds/background_"+namelist[random.randint(0,3)]+".jpg")))
## self.setPalette(palette)
#create menubar
exitAction = QAction("&Exit", self)
exitAction.setShortcut("Ctrl+Q")
exitAction.setStatusTip("Exit Application")
exitAction.triggered.connect(self.close_application)
openAction = QAction("&Open", self)
openAction.setShortcut("Ctrl+O")
openAction.setStatusTip("Open .vmf file")
openAction.triggered.connect(self.file_open)
saveAction = QAction("&Save", self)
saveAction.setShortcut("Ctrl+S")
saveAction.setStatusTip("Save File as .ezm save, allowing for use by others/you later.")
saveAction.triggered.connect(self.file_save)
saveAsAction = QAction("&Save As", self)
saveAsAction.setShortcut("Ctrl+Shift+S")
saveAsAction.setStatusTip("Save File as .ezm save, allowing for use by others/you later.")
saveAsAction.triggered.connect(lambda: self.file_save(False, True))
helpAction = QAction("&Wiki",self)
helpAction.triggered.connect(lambda: webbrowser.open_new_tab('http://github.com/baldengineers/easytf2_mapper/wiki'))
tutorialAction = QAction("&Reference Guide",self)
tutorialAction.setStatusTip("Quick reference guide on the Mapper website.")
tutorialAction.triggered.connect(lambda: webbrowser.open_new_tab('http://tf2mapper.com/tutorial.html'))
newAction = QAction("&New", self)
newAction.setShortcut("Ctrl+n")
newAction.setStatusTip("Create a New File")
newAction.triggered.connect(self.grid_change)
hammerAction = QAction("&Open Hammer",self)
hammerAction.setShortcut("Ctrl+H")
hammerAction.setStatusTip("Opens up Hammer.")
hammerAction.triggered.connect(lambda: self.open_hammer(0,"null"))
changeHammer = QAction("&Change Hammer Directory",self)
changeHammer.setShortcut("Ctrl+Shift+H")
changeHammer.setStatusTip("Changes default hammer directory.")
changeHammer.triggered.connect(lambda: self.open_hammer(0,"null",True))
changeLightAction = QAction("&Change Lighting", self)
changeLightAction.setShortcut("Ctrl+J")
changeLightAction.setStatusTip("Change the environment lighting of the map.")
changeLightAction.triggered.connect(self.change_light)
exportAction = QAction("&as .VMF", self)
exportAction.setShortcut("Ctrl+E")
exportAction.setStatusTip("Export as .vmf")
exportAction.triggered.connect(self.file_export)
undoAction = QAction("&Undo", self)
undoAction.setShortcut("Ctrl+Z")
undoAction.setStatusTip("Undo previous action")
undoAction.triggered.connect(lambda: self.undo(True))
redoAction = QAction("&Redo", self)
redoAction.setShortcut("Ctrl+Shift+Z")
redoAction.setStatusTip("Redo previous action")
redoAction.triggered.connect(lambda: self.undo(False))
gridAction = QAction("&Set Grid Size", self)
gridAction.setShortcut("Ctrl+G")
gridAction.setStatusTip("Set Grid Height and Width. RESETS ALL BLOCKS.")
gridAction.triggered.connect(self.grid_change) #change so it just makes grid bigger/smaller, not erase all blocks, or else it would just do the same exact thing as making a new file
createPrefabAction = QAction("&Create Prefab", self)
createPrefabAction.setShortcut("Ctrl+I")
createPrefabAction.setStatusTip("View the readme for a good idea on formatting Hammer Prefabs.")
createPrefabAction.triggered.connect(self.create_prefab)
consoleAction = QAction("&Open Dev Console", self)
consoleAction.setShortcut("`")
consoleAction.setStatusTip("Run functions/print variables manually")
consoleAction.triggered.connect(self.open_console)
changeSkybox = QAction("&Change Skybox", self)
changeSkybox.setStatusTip("Change the skybox of the map.")
changeSkybox.setShortcut("Ctrl+B")
changeSkybox.triggered.connect(self.change_skybox)
importPrefab = QAction("&Prefab",self)
importPrefab.setStatusTip("Import a prefab in a .zip file. You can find some user-made ones at http://tf2mapper.com")
importPrefab.setShortcut("Ctrl+Shift+I")
importPrefab.triggered.connect(self.import_prefab)
bspExportAction = QAction("&as .BSP",self)
bspExportAction.setStatusTip("Export as .bsp")
bspExportAction.setShortcut("Ctrl+Shift+E")
bspExportAction.triggered.connect(self.file_export_bsp)
mainMenu = self.menuBar()
fileMenu = mainMenu.addMenu("&File")
editMenu = mainMenu.addMenu("&Edit")
optionsMenu = mainMenu.addMenu("&Options")
toolsMenu = mainMenu.addMenu("&Tools")
helpMenu = mainMenu.addMenu("&Help")
fileMenu.addAction(newAction)
fileMenu.addAction(openAction)
fileMenu.addAction(saveAction)
fileMenu.addAction(saveAsAction)
fileMenu.addSeparator()
importMenu = fileMenu.addMenu("&Import")
importMenu.addAction(importPrefab)
exportMenu = fileMenu.addMenu("&Export")
exportMenu.addAction(exportAction)
exportMenu.addAction(bspExportAction)
fileMenu.addSeparator()
editMenu.addAction(undoAction)
editMenu.addAction(redoAction)
fileMenu.addAction(exitAction)
optionsMenu.addAction(gridAction)
optionsMenu.addAction(changeSkybox)
optionsMenu.addAction(changeHammer)
toolsMenu.addAction(createPrefabAction)
toolsMenu.addAction(hammerAction)
toolsMenu.addSeparator()
toolsMenu.addAction(consoleAction)
helpMenu.addAction(tutorialAction)
helpMenu.addAction(helpAction)
#create the status bar
self.status = QStatusBar(self)
self.setStatusBar(self.status)
#perform some necessary functions for startup of program
self.home()
self.grid_change_func(values[0], values[1], values[2])
#self.change_skybox()
#self.level_select()
def TFFormat(self):
print('TF2 version of the mapper loading!')
sys.path.append(self.gameDirVar+"prefabs/")
self.currentlight = '''
entity
{
"id" "world_idnum"
"classname" "light_environment"
"_ambient" "255 255 255 100"
"_ambientHDR" "-1 -1 -1 1"
"_AmbientScaleHDR" "1"
"_light" "CURRENT_LIGHT"
"_lightHDR" "-1 -1 -1 1"
"_lightscaleHDR" "1"
"angles" "CURRENT_ANGLE"
"pitch" "0"
"SunSpreadAngle" "0"
"origin" "0 0 73"
editor
{
"color" "220 30 220"
"visgroupshown" "1"
"visgroupautoshown" "1"
"logicalpos" "[0 500]"
}
}
'''
#skybox default needs to be based off game chosen
self.skybox = 'sky_tf2_04'
#skyboxlight = '255 255 255 200'
#skyboxangle = '0 0 0'
#if the user does not change the lighting, it sticks with this.
#if the user does not choose a skybox it sticks with this
#self.prefab_file = open(self.gameDirVar+"prefab_template/prefab_list.txt")
#self.prefab_text_file = open(self.gameDirVar+"prefab_template/prefab_text_list.txt")
#self.prefab_icon_file = open(self.gameDirVar+"prefab_template/prefab_icon_list.txt")
self.prefab_file = pickle.load(open(self.gameDirVar+"prefabs/pfinfo.ezmd","rb"))
self.skybox_file = open(self.gameDirVar+"prefab_template/skybox_list.txt")
self.skybox_icon = open(self.gameDirVar+"prefab_template/skybox_icons.txt")
self.skybox_light = open(self.gameDirVar+"prefab_template/skybox_light.txt")
self.skybox_angle = open(self.gameDirVar+"prefab_template/skybox_angle.txt")
for main_index,file in enumerate(["prefab_list","prefab_icon_list","prefab_text_list"]):
for index,line in enumerate(self.prefab_file[main_index+1]):
eval("self."+file+"""[int(self.prefab_file[0][index])].append(line)""")# need to do this because reading the file generates a \n after every line
section = 0
self.rotation_icon_list = []
self.index_section_list = [0]
self.rotation_icon_list.append([])
#print(rotation_icon_list)
for line in self.skybox_file.readlines():
self.skybox_list.append(line[:-1] if line.endswith("\n") else line)# need to do this because reading the file generates a \n after every line
for line in self.skybox_icon.readlines():
self.skybox_icon_list.append(line[:-1] if line.endswith("\n") else line)
for line in self.skybox_light.readlines():
self.skybox_light_list.append(line[:-1] if line.endswith("\n") else line)
for line in self.skybox_angle.readlines():
self.skybox_angle_list.append(line[:-1] if line.endswith("\n") else line)
for file in [self.skybox_file,self.skybox_icon,self.skybox_angle,self.skybox_light]:
file.close()
print(self.prefab_list)
#imports that need prefab_list to be defined
for sec in self.prefab_list:
for item in sec:
if item:
globals()[item] = importlib.import_module(item)
print("import", item)
self.save_dict[item]=eval(item)
self.load_dict[eval(item)]=item
logo = open('logo.log','r+')
logo_f = logo.readlines()
for i in logo_f:
print(i[:-1])
logo.close()
print("\n~~~~~~~~~~~~~~~~~~~~~\nMapper loaded! You may have to alt-tab to find the input values dialog.\n")
def CSFormat(self):
#for cs area
pass
def open_hammer(self,loaded,file,reloc = False):
self.open_file()
if "loaded_first_time" not in self.files or reloc:
self.file.close()
self.open_file(True)
hammer_location = QFileDialog.getOpenFileName(self, "Find Hammer Location", "/","Hammer Executable (*.exe *.bat)")
hammer_location = str(hammer_location[0])
self.file.write("loaded_first_time\n")
self.file.write(hammer_location)
self.file.close()
if loaded == 1:
subprocess.Popen(hammer_location +" "+ file)
else:
subprocess.Popen(hammer_location)
else:
if os.path.isfile(self.fileloaded[1]):
if loaded == 1:
subprocess.Popen(self.fileloaded[1] + " "+file)
else:
subprocess.Popen(self.fileloaded[1])
else:
print(str(e))
self.notFound = QMessageBox().setText("ERROR!")
self.notFound.setInformativeText("Hammer executable/batch moved or renamed! (or something else went wrong...)")
self.notFound.exec_()
self.file.close()
os.remove(gameDirVar+"startupcache/startup.su")
self.open_hammer(0,"null")
def open_file(self,reloc = False):
if reloc:
os.remove(self.gameDirVar+"startupcache/startup.su")
if os.path.isfile(self.gameDirVar+"startupcache/startup.su"):
self.file = open(self.gameDirVar+"startupcache/startup.su", "r+")
else:
self.file = open(self.gameDirVar+"startupcache/startup.su", "w+")
self.fileloaded = self.file.readlines()
self.files = "".join(self.fileloaded)
def closeEvent(self, event):
#closeEvent runs close_application when the x button is pressed
event.ignore()
self.close_application()
def home(self):
global levels, current_list
self.xmin = None
self.ymin = None
self.xmax = 0
self.ymax = 0
self.central_widget = QWidget()
self.setCentralWidget(self.central_widget)
self.scrollArea = QScrollArea()
self.current = QPushButton("",self)
self.current.setIcon(QIcon(''))
self.current.setIconSize(QSize(40,40))
self.current.setFixedSize(QSize(40,40))
self.current.setFlat(True)
self.rotateCW = QToolButton(self)
self.rotateCW.setShortcut(QKeySequence(Qt.Key_Right))
self.rotateCW.setIcon(QIcon('icons/rotate_cw.png'))
self.rotateCW.setIconSize(QSize(40,40))
self.rotateCW.setFixedSize(QSize(40,40))
self.rotateCW.setAutoRaise(True)
self.rotateCCW = QToolButton(self)
self.rotateCCW.setShortcut(QKeySequence(Qt.Key_Left))
self.rotateCCW.setIcon(QIcon('icons/rotate_ccw.png'))
self.rotateCCW.setIconSize(QSize(40,40))
self.rotateCCW.setFixedSize(QSize(40,40))
self.rotateCCW.setAutoRaise(True)
#sets rotation value. 0 = right, 1 = down, 2 = left, 3 = right
self.rotateCW.clicked.connect(self.rotateCW_func)
self.rotateCCW.clicked.connect(self.rotateCCW_func)
self.button_rotate_layout = QHBoxLayout()
self.button_rotate_layout.addWidget(self.rotateCCW)
self.button_rotate_layout.addWidget(self.current)
self.button_rotate_layout.addWidget(self.rotateCW)
self.button_rotate_layout.addStretch(1)
#add the main tool bar
self.skyboxAction = QAction(QIcon('icons/sky.png'), "Change Skybox", self)
self.skyboxAction.triggered.connect(self.loadSkyboxList)
self.tileListAction = QAction(QIcon('icons/tile_list.png'), "Re-open Tile list", self)
self.tileListAction.triggered.connect(self.loadTileList)
self.rotateDockAction = QAction(QIcon('icons/rotate_dock.png'), "Re-open Rotation Dock", self)
self.rotateDockAction.triggered.connect(self.loadButtonRotate)
self.mainToolBar = self.addToolBar("Main")
self.mainToolBar.addAction(self.skyboxAction)
self.mainToolBar.addAction(self.tileListAction)
self.mainToolBar.addAction(self.rotateDockAction)
#add the many sections of the tile_list
self.tile_list1 = QListWidget()
self.tile_list2 = QListWidget()
self.tile_list3 = QListWidget()
self.current_list = self.tile_list1
for l in [self.tile_list1, self.tile_list2, self.tile_list3]:
l.setDragEnabled(True)
self.gui_skybox_list = QListWidget()
#print(self.skybox_icon_list)
self.gui_skybox_list.setIconSize(QSize(140, 20))
self.gui_skybox_list.setMaximumWidth(160)
for index, text in enumerate(self.skybox_list):
item = QListWidgetItem(QIcon(self.gameDirVar+self.skybox_icon_list[index]),'')
self.gui_skybox_list.addItem(item)
self.list_tab_widget = QTabWidget()
self.list_tab_widget.setMaximumWidth(200)
self.list_tab_widget.addTab(self.tile_list1,'Geometry')
self.list_tab_widget.addTab(self.tile_list2,'Map Layout')
self.list_tab_widget.addTab(self.tile_list3,'Fun')
self.list_tab_widget.currentChanged.connect(self.changeCurrentList)
print("len:", self.list_tab_widget.count())
#add the prefab tools
self.up_tool_btn = QToolButton(self)
self.up_tool_btn.setIcon(QIcon('icons/up.png'))
self.up_tool_btn.clicked.connect(self.prefab_list_up)
self.down_tool_btn = QToolButton(self)
self.down_tool_btn.setIcon(QIcon('icons/down.png'))
self.down_tool_btn.clicked.connect(self.prefab_list_down)
self.del_tool_btn = QToolButton(self)
self.del_tool_btn.setIcon(QIcon('icons/delete.png'))
self.del_tool_btn.clicked.connect(lambda: self.prefab_list_del(self.current_list.currentRow()))
self.add_tool_btn = QToolButton(self)
self.add_tool_btn.setIcon(QIcon('icons/add.png'))
self.add_tool_btn.clicked.connect(self.create_prefab)
self.tile_toolbar = QToolBar()
for t in [self.up_tool_btn,self.down_tool_btn,self.del_tool_btn,self.add_tool_btn]:
self.tile_toolbar.addWidget(t)
self.tile_toolbar.addSeparator()
for index, text in enumerate(self.prefab_text_list):
for ind, indiv in enumerate(text):
curr_list = eval("self.tile_list%d" % (index+1))
item = QListWidgetItem(QIcon(self.gameDirVar+self.prefab_icon_list[index][ind]), indiv)
curr_list.addItem(item)
for i in range(self.list_tab_widget.count()):
eval("self.tile_list%d" %(i+1)).currentItemChanged.connect(self.changeIcon)
#contains label and list vertically
self.tile_list_layout = QVBoxLayout()
#self.tile_list_layout.addWidget(self.listLabel)
self.tile_list_layout.addWidget(self.list_tab_widget)
#self.tile_list_layout.addWidget(self.toolsLabel)
self.tile_list_layout.addWidget(self.tile_toolbar)
self.button_grid_layout = QGridLayout()
self.button_grid_layout.setSpacing(0)
self.grid_widget = QWidget()
self.grid_widget.setLayout(self.button_grid_layout)
self.scrollArea.setWidget(self.grid_widget)
self.scrollArea.setWidgetResizable(True)
self.button_rotate_widget = QWidget()
self.button_rotate_widget.setLayout(self.button_rotate_layout)
self.tile_list_widget = QWidget()
self.tile_list_widget.setLayout(self.tile_list_layout)
self.loadTileList(True)
self.loadSkyboxList(True)
self.loadButtonRotate(True)
self.column = QHBoxLayout()
self.column.addWidget(self.scrollArea)
self.row = QVBoxLayout(self.central_widget)
self.row.addLayout(self.column)
#TESTING
from classes import PrefabItem, ListGroup
#grid for placing prefabs
self.grid = GridWidget.GridWidget(20,20,self)
self.grid_container = GridWidget.GridWidgetContainer(self.grid)
self.grid_dock = QDockWidget("Grid", self)
self.grid_dock.setWidget(self.grid_container)
self.grid_dock.setFloating(True)
#define various lists
self.tile_list1 = QListWidget()
self.tile_list2 = QListWidget()
self.tile_list3 = QListWidget()
#add items to self.tab_dict and everything will update
self.tab_dict = {"Geometry":self.tile_list1, "Map Layout":self.tile_list2, "Fun/Other":self.tile_list3}
self.list_group = ListGroup([l for _, l in self.tab_dict.items()])
def set_cur_prefab(item):
self.grid.cur_prefab = item.prefab
for _, tile_list in self.tab_dict.items():
tile_list.itemClicked.connect(set_cur_prefab)
#add prefabs to the lists
with open("tf2/prefabs.dat", "rb") as f:
l = pickle.load(f)
for p in l:
prefab = pf.Prefab(p)
self.tab_dict[prefab.section].addItem(PrefabItem(prefab))
#create tabwidget for the lists
self.list_tab_widget = QTabWidget()
self.list_tab_widget.addTab(self.tab_dict['Geometry'],'Geometry')
self.list_tab_widget.addTab(self.tab_dict['Map Layout'],'Map Layout')
self.list_tab_widget.addTab(self.tab_dict['Fun/Other'],'Fun/Other')
#create dock for the tab widget
self.prefab_dock = QDockWidget("Prefabs", self)
self.prefab_dock.setWidget(self.list_tab_widget)
self.prefab_dock.setFloating(True)
#create buttons for the tools
self.grid_tools_ag = QActionGroup(self)
self.add_prefab_action = QAction(QIcon("icons/add_prefab.png"), "Add a prefab to the grid", self.grid_tools_ag)
self.add_prefab_action.toggled.connect(self.grid.enableAddPrefab)
self.select_action = QAction(QIcon("icons/select_move.png"), "Select Prefabs", self.grid_tools_ag)
self.select_action.toggled.connect(self.grid.enableSelect)
self.grid_tools = QToolBar()
self.grid_tools.setOrientation(Qt.Vertical)
self.addToolBar(Qt.LeftToolBarArea, self.grid_tools)
for act in [self.add_prefab_action,self.select_action]:
act.setCheckable(True)
self.grid_tools.addAction(act)
self.add_prefab_action.setChecked(True) #set the default button checked
def file_export():
for p in self.grid.prefabs:
p.prefab.create(p.posx, p.posy, self.grid.prefab_scale, self.rotataion)
## self.grid_tool_dock = QDockWidget("Tools", self)
## self.grid_tool_dock.setWidget(self.grid_tools)
## self.grid_tool_dock.setFloating(True)
self.addDockWidget(Qt.LeftDockWidgetArea, self.skybox_list_dock)
#END TESTING
if os.path.isfile(self.gameDirVar+'startupcache/firsttime.su'):
f = open(self.gameDirVar+'startupcache/firsttime.su', 'r+')
lines = f.readlines()
else:
f = open(self.gameDirVar+'startupcache/firsttime.su','w+')
lines = f.readlines()
if "startup" not in lines:
QMessageBox.information(self, "First Launch", "First Launch!\n\nYou haven't launched this before! Try looking at the <a href=\"https://github.com/baldengineers/easytf2_mapper/wiki/Texture-bug\">wiki</a> for help!")
f.write("startup")
f.close()
#WILL ONLY WORK IN REDIST FORM
else:
pass
self.show()
def loadSkyboxList(self,startup=False):
if not self.SLBool:
self.skybox_list_dock = QDockWidget("Skybox List", self)
self.skybox_list_dock.visibilityChanged.connect(self.toggleSLBool)
self.skybox_list_dock.setWidget(self.gui_skybox_list)
self.skybox_list_dock.setFloating(False)
self.addDockWidget(Qt.LeftDockWidgetArea, self.skybox_list_dock)
def toggleSLBool(self):
if self.SLBool:
self.SLBool = False
else:
self.SLBool = True
def loadTileList(self,startup=False):
if not self.TLBool:
self.tile_list_dock = QDockWidget("Prefab List", self)
self.tile_list_dock.visibilityChanged.connect(self.toggleTLBool)
self.tile_list_dock.setWidget(self.tile_list_widget)
self.tile_list_dock.setFloating(False)
self.addDockWidget(Qt.RightDockWidgetArea, self.tile_list_dock)
#if startup:
#self.TLBool = True
def toggleTLBool(self):
if self.TLBool:
self.TLBool = False
else:
self.TLBool = True
def loadButtonRotate(self,startup = False):
if not self.BRBool:
self.button_rotate_dock = QDockWidget("Current Prefab", self)
self.button_rotate_dock.visibilityChanged.connect(self.toggleBRBool)
self.button_rotate_dock.setWidget(self.button_rotate_widget)
self.button_rotate_dock.setFloating(False)
self.addDockWidget(Qt.LeftDockWidgetArea,self.button_rotate_dock)
#if startup:
#self.BRBool = True
#i am.... the top dock
# ^
# |
#this comment is perfect and i will leave it in because the pun is wasted because it's no longer on the top dock widget area
def toggleBRBool(self):
if self.BRBool:
self.BRBool = False
else:
self.BRBool = True
def changeCurrentList(self):
print("current list: tile_list%s" % str(self.list_tab_widget.currentIndex()+1))
self.current_list = eval('self.tile_list%s' % str(self.list_tab_widget.currentIndex()+1))
def rotateCW_func(self):
if self.rotation < 3:
self.rotation = self.rotation + 1
else:
self.rotation = 0
self.changeIcon()
def rotateCCW_func(self):
if self.rotation == 0:
self.rotation = 3
else:
self.rotation = self.rotation - 1
self.changeIcon()
def prefab_list_up(self):
self.current_list = eval('self.tile_list%s' % str(self.list_tab_widget.currentIndex()+1))
currentRow = self.current_list.currentRow()
if currentRow > 0:
currentItem = self.current_list.takeItem(currentRow)
self.current_list.insertItem(currentRow - 1, currentItem)
self.current_list.setCurrentRow(currentRow - 1)
self.update_list_file(currentRow, currentRow - 1)
self.changeIcon()
def prefab_list_down(self):
self.current_list = eval('self.tile_list%s' % str(self.list_tab_widget.currentIndex()+1))
currentRow = self.current_list.currentRow()
if currentRow < self.current_list.count() - 1:
currentItem = self.current_list.takeItem(currentRow)
self.current_list.insertItem(currentRow + 1, currentItem)
self.current_list.setCurrentRow(currentRow + 1)
self.update_list_file(currentRow, currentRow + 1)
self.changeIcon()
def update_list_file(self, old_index, new_index):
file_list = [self.gameDirVar+"prefab_template/prefab_list.txt", self.gameDirVar+"prefab_template/prefab_icon_list.txt", self.gameDirVar+"prefab_template/prefab_text_list.txt"]
list_list = [prefab_list, prefab_icon_list, prefab_text_list]
for l in list_list:
l.insert(new_index, l.pop(old_index))
with open(file_list[list_list.index(l)], "w") as file:
if list_list.index(l) == 0:
rot_file = open(self.gameDirVar+"prefab_template/rot_prefab_list.txt", "w")
for item in l:
file.write(item + "\n")
if list_list.index(l) == 0:
rot_file.write(item + "_icon_list.txt" + "\n")
#stupid icon lists, making me add more lines of code to my already concise function
def prefab_list_del(self, currentprefab):
#NEEDS TO BE REDONE based off what mode
choice = QMessageBox.question(self,"Delete Prefab (DO NOT DELETE STOCK PREFABS)","Are you sure you want to delete \"%s\"?\nThis is mainly for developers." %(prefab_text_list[self.list_tab_widget.currentIndex()][currentprefab]),
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if choice == QMessageBox.Yes:
text_list = [self.gameDirVar+'prefab_template/prefab_text_list.txt',self.gameDirVar+'prefab_template/rot_prefab_list.txt',
self.gameDirVar+'prefab_template/prefab_list.txt', self.gameDirVar+'prefab_template/prefab_icon_list.txt']
for cur in text_list:
file = open(cur, 'r+')
cur_list = file.readlines()
file.seek(0)
file.truncate()
print(cur_list[index_section_list[self.list_tab_widget.currentIndex()]+currentprefab+1])
del cur_list[index_section_list[self.list_tab_widget.currentIndex()]+currentprefab+1]
cur_str = "".join(cur_list)
file.write(cur_str)
file.close()
restart_btn = QPushButton("Restart")
later_btn = QPushButton("Later")
choice = QMessageBox(self)
choice.setIcon(QMessageBox.Question)
choice.setWindowTitle("Prefab Successfully Deleted")
choice.setText("Program must be restarted for changes to take effect.")
choice.setInformativeText("Restart? You will lose any unsaved progress.")
choice.addButton(restart_btn, QMessageBox.YesRole)
choice.addButton(later_btn, QMessageBox.NoRole)
choice.setDefaultButton(later_btn)
#needs to be redone-- final redist will not be called easytf2mapper as it is no longer just that
if choice.exec_() == 0:
if os.path.isfile('EasyTF2Mapper.exe'):
subprocess.Popen('EasyTF2Mapper.exe')
else:
subprocess.Popen('python main.py')
sys.exit()
else:
pass
else:
del choice
def changeIcon(self):
pixmap = QPixmap(self.gameDirVar+self.prefab_icon_list[self.list_tab_widget.currentIndex()][self.current_list.currentRow()])
transform = QTransform().rotate(90*self.rotation)
self.cur_icon = pixmap.transformed(transform, Qt.SmoothTransformation)
self.current.setIcon(QIcon(self.cur_icon))
self.current.setIconSize(QSize(32,32))
def file_open(self, tmp = False, first = False):
global stored_info_list, totalblocks,entity_list, currentfilename, file_loaded, latest_path,save_dict,load_dict
if not tmp:
name = QFileDialog.getOpenFileName(self, "Open File", latest_path,"*.ezm")
latest_path,file = str(name[0]),open(name[0], "rb")
self.level = 0
self.iconlist=[]
while True:
header = pickle.load(file)
if "levels" in header:
openlines = pickle.load(file)
levelcountload = openlines
elif "grid_size" in header:
openlines = pickle.load(file)
self.grid_change_func(openlines[0],openlines[1],openlines[2])
#print('grid changed')
elif "stored_info_list" in header:
stored_info_list=[]
stored_info_list_temp=[]
openlines = pickle.load(file)
for item in openlines:
stored_info_list_temp.append(item)
for index,lvl in enumerate(stored_info_list_temp):
stored_info_list.append([])
for info in lvl:
try:
temp = save_dict[info[0]]
info[0] = temp
stored_info_list[index].append(info)
except:
stored_info_list[index].append('')
elif "icon_list" in header:
self.iconlist=[]
openlines = pickle.load(file)
for item in openlines:
self.iconlist.append(item)
elif "GSList" in header:
openlines = pickle.load(file)
self.gui_skybox_list.setCurrentRow(openlines)
else:
break
for i in range(levelcountload):
file = open(self.gameDirVar+"leveltemp/level" + str(i)+".tmp", "wb")
pickle.dump(self.iconlist[i], file)
file.close()
#self.change_skybox()
file.close()
self.setWindowTitle("Easy "+gameVar+" Mapper - [" + str(name[0]) + "]")
currentfilename = str(name[0])
file_loaded = True
self.upd_icns()
else:
file = open(self.gameDirVar+"leveltemp/level.tmp", "rb")
self.iconlist = pickle.load(file)
file.close()
for index, icon in enumerate(self.iconlist):
self.grid_list[index].button.setIcon(QIcon(icon))
self.grid_list[index].button.setIconSize(QSize(32,32))
def upd_icns(self):
for index, icon in enumerate(self.iconlist[0]):
#if "icons" in icon:
#print(grid_list)
if icon != '':
#print("index: "+str(index)+" icon name: "+icon[0])
ptrans = QTransform().rotate(90*icon[1])
pmap = QPixmap(icon[0]).transformed(ptrans,Qt.SmoothTransformation)
self.grid_list[index].button.setIcon(QIcon(pmap))
self.grid_list[index].button.setIconSize(QSize(32,32))
else:
#print(str(e))
self.grid_list[index].button.setIcon(QIcon(''))
self.grid_list[index].button.setIconSize(QSize(32,32))
def file_save(self, tmp = False, saveAs = False):
global grid_x, grid_y, iconlist, levels, level, currentfilename, file_loaded, latest_path, stored_info_list, save_dict,load_dict,skybox2_list
print(latest_path)
self.gridsize = (grid_x,grid_y)
skybox_sav = self.gui_skybox_list.currentRow()
if not tmp:
if not file_loaded or saveAs:
name = QFileDialog.getSaveFileName(self, "Save File", latest_path, "*.ezm")[0]
latest_path = name
else:
if "*" in currentfilename:
name = currentfilename[:-1]
else:
name = currentfilename
file = open(name, "wb")
pickle.dump("<levels>",file)
pickle.dump(self.levels,file)
pickle.dump("<grid_size>", file)
pickle.dump(self.gridsize, file)
pickle.dump("<stored_info_list>", file)
stored_info_list_temp=[]
for index,lvl in enumerate(stored_info_list):
stored_info_list_temp.append([])
for info in lvl:
#print(info)
if info:
temp = load_dict[info[0]]
info[0] = temp
stored_info_list_temp[index].append(info)
else:
stored_info_list_temp[index].append('')
pickle.dump(stored_info_list_temp, file)
pickle.dump("<icon_list>", file)
pickle.dump(self.iconlist, file)
pickle.dump("<GSList>", file)
pickle.dump(skybox_sav, file)
file.close()
QMessageBox.information(self, "File Saved", "File saved as %s" %(name))
self.setWindowTitle("Easy "+gameVar+" Mapper - [" + name + "]")
currentfilename = name
file_loaded = True
else:
#writes tmp file to save the icons for each level
file = open(self.gameDirVar+"leveltemp/level.tmp", "wb")
pickle.dump(self.iconlist, file)
file.close()
def file_export(self,bsp=False):
global cur_vmf_location,id_num,stored_info_list, grid_y, grid_x, world_id_num, count_btns, currentlight, skybox, skybox2_list, entity_list, skybox_light_list, skybox_angle_list, latest_path
skyboxgeolist = []
#make recommended height based off tallest prefab in the map
skyboxz = QInputDialog.getInt(self,("Set Skybox Height"),("Skybox Height(hammer units, %d minimum recommended):" %(1024)), QLineEdit.Normal, 1024)
skyboxz = int(skyboxz[0])
#generate skybox stuff now
#needs to be redone to change how skyboxes are rendered
create = generateSkybox.createSkyboxLeft(grid_x,grid_y,skyboxz,self.id_num,world_id_num)
skyboxgeolist.append(create[0])
self.id_num = create[1]
self.world_id_num = create[2]
create = generateSkybox.createSkyboxNorth(grid_x,grid_y,skyboxz,self.id_num,world_id_num)
skyboxgeolist.append(create[0])
self.id_num = create[1]
self.world_id_num = create[2]
create = generateSkybox.createSkyboxRight(grid_x,grid_y,skyboxz,self.id_num,world_id_num)
skyboxgeolist.append(create[0])
self.id_num = create[1]
self.world_id_num = create[2]
create = generateSkybox.createSkyboxTop(grid_x,grid_y,skyboxz,self.id_num,world_id_num)
skyboxgeolist.append(create[0])
self.id_num = create[1]
self.world_id_num = create[2]
create = generateSkybox.createSkyboxSouth(grid_x,grid_y,skyboxz,self.id_num,world_id_num)
skyboxgeolist.append(create[0])
skybox = self.skybox_list[self.gui_skybox_list.currentRow()]
skyboxlight = self.skybox_light_list[self.gui_skybox_list.currentRow()]
skyboxangle = self.skybox_angle_list[self.gui_skybox_list.currentRow()]
skyboxangle = '0 145 0'
skyboxlight = '216 207 194 700'
skybox = 'sky_tf2_04'
currentlight = currentlight.replace("world_idnum",str(world_id_num))
currentlight = currentlight.replace("CURRENT_LIGHT",skyboxlight)
currentlight = currentlight.replace("CURRENT_ANGLE",skyboxangle)
QMessageBox.critical(self, "Error", "Please choose a skybox.")
self.change_skybox()
light = currentlight
latest_path = latest_path.replace(".ezm",".vmf")
self.totalblocks =[]
self.entity_list=[]
for lvl in stored_info_list:
for prfb in lvl:
if prfb != '':
create = prfb[0].createTile(prfb[1], prfb[2], prfb[3], prfb[4], prfb[5], prfb[6], prfb[7], prfb[8])
self.id_num = create[1]
self.world_id_num = create[2]
self.totalblocks.append(create[0])
self.entity_num = create[3]
self.placeholder_list = create[5]
self.entity_list.append(create[4])
import export #export contains the code to compile/export the map
wholething = export.execute(totalblocks, entity_list, skybox,skyboxgeolist, light)
if bsp:
with open(self.gameDirVar+'output/'+gameVar+'mapperoutput.vmf','w+') as f:
f.write(wholething)
self.cur_vmf_location = self.gameDirVar+'output/'+gameVar+'mapperoutput.vmf'
else:
name = QFileDialog.getSaveFileName(self, "Export .vmf", latest_path, "Valve Map File (*.vmf)")
with open(name[0], "w+") as f:
f.write(wholething)
popup = QMessageBox(self, "File Exported",
"The .vmf has been outputted to %s" %(name[0]) + " Open it in hammer to compile as a .bsp. Check out the wiki (https://github.com/baldengineers/easytf2_mapper/wiki/Texture-bug) for fixing errors with textures.")
popup.setWindowTitle("File Exported")
popup.setText("The .vmf has been outputted to %s" %(name[0]))
popup.setInformativeText(" Open it in hammer to compile as a .bsp and/or make some changes.")
hammerButton = popup.addButton("Open Hammer",QMessageBox.ActionRole)
exitButton = popup.addButton("OK",QMessageBox.ActionRole)
popup.exec_()
if popup.clickedButton() == hammerButton:
self.open_hammer(1,name[0])
if popup.clickedButton() == exitButton:
popup.deleteLater()
self.cur_vmf_location = name[0]
def file_export_bsp(self):
self.file_export(True)
#need to change for multi-game
#this is fine and can be used, just make an if/then with the cs:go version
tf2BinLoc = open(self.gameDirVar+'startupcache/vbsp.su','r+')
tf2BinLocFile = tf2BinLoc.readlines()[0].replace('\\','/') #wtf even is this!?!? why do you need it?!?!
tf2BinLoc.close()
if not os.path.isfile(tf2BinLocFile):
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
tf2BinLoc = open('startupcache/vbsp.su', 'w+')
tf2BinLocFile = QFileDialog.getExistingDirectory(self,'LOCATE Team Fortress 2/bin, NOT IN DEFAULT LOCATION!')
tf2BinLocFile = str(tf2BinLocFile.replace('\\','/'))
tf2BinLoc.write(tf2BinLocFile)
tf2BinLoc.close()
subprocess.call('"'+tf2BinLocFile+'/vbsp.exe" "'+self.cur_vmf_location+'"')
subprocess.call('"'+tf2BinLocFile+'/vvis.exe" "'+self.cur_vmf_location.replace('.vmf','.bsp')+'"')
subprocess.call('"'+tf2BinLocFile+'/vrad.exe" "'+self.cur_vmf_location.replace('.vmf','.bsp')+'"')
shutil.copyfile(cur_vmf_location.replace('.vmf','.bsp'),tf2BinLocFile.replace('/bin','/tf/maps/tf2mapperoutput.bsp'))
popup = QMessageBox(self)
popup.setWindowTitle("File Exported")
popup.setText("The .vmf has been outputted to %s" %(tf2BinLocFile.replace('/bin','/tf/maps/tf2mapperoutput.bsp')))
popup.setInformativeText("Open TF2 and in load up 'tf2outputmapper.bsp'! You can do this by typing 'map tf2mapperoutput' or by creating a server with that map.\n\nThere also is a .vmf file of your map stored in output/tf2mapperoutput.vmf.")
hammerButton = popup.addButton("Open TF2",QMessageBox.ActionRole)
exitButton = popup.addButton("OK",QMessageBox.ActionRole)
popup.exec_()
if popup.clickedButton() == hammerButton:
subprocess.Popen('"'+tf2BinLocFile.replace('steamapps/common/Team Fortress 2/bin','')+'steam.exe" "steam://run/440"')
if popup.clickedButton() == exitButton:
popup.deleteLater()
def removeButtons(self):
for i in reversed(range(self.button_grid_layout.count())):
widget = self.button_grid_layout.takeAt(i).widget()
if widget is not None:
widget.deleteLater()
def grid_change(self):
grid_dialog = GridChangeWindow(self)
values = grid_dialog.returnVal()
self.grid_change_func(values[0], values[1], values[2])
def grid_change_func(self,x,y,z):
#needs to be changed to accomodate grid widget
#basically: reset entitylist, totalblocks, and iconlist
#reset grid widget
#set mins and maxs to None
self.entity_list = []
self.iconlist = []
self.totalblocks = []
self.grid_list = []
self.xmin = None
self.ymin = None
self.xmax = None
self.ymax = None
#self.level = 0
self.count_btns = 0
self.file_loaded = False
self.grid_y = y
self.grid_x = x
self.levels = z
self.removeButtons()
#create the progress bar
self.progressBar = QProgressBar()
self.progress = 0 #how much progress is on the progressBar
self.status.addWidget(self.progressBar)
#self.totalblocks.append([])
#self.entity_list.append([])
#self.iconlist.append([])
self.stored_info_list.append([])
self.btn_id_count=0
self.count_btns=0
for x in range(self.grid_x):
for y in range(self.grid_y):
self.totalblocks.append("") #This is so that there are no problems with replacing list values
self.entity_list.append("")
self.iconlist.append(('',''))
self.stored_info_list.append('')
for x in range(self.grid_x):
for y in range(self.grid_y):
grid_btn = GridBtn(self, x, y, self.btn_id_count)
self.button_grid_layout.addWidget(grid_btn.button,y,x)
self.btn_id_count += 1
self.grid_list.append(grid_btn)
self.button_grid_layout.setRowStretch(self.grid_y + 1, 1)
self.button_grid_layout.setColumnStretch(self.grid_x + 1, 1)
self.entity_list.append("lighting slot")
self.count_btns = self.grid_x*self.grid_y
self.status.removeWidget(self.progressBar)
self.setWindowTitle("Easy "+self.gameVar+" Mapper ")
def change_light(self):
r_input = QInputDialog.getInt(self, ("Red light level 0-255"),
("Put in the red light ambiance level, 0-255:"))
g_input = QInputDialog.getInt(self, ("Green light level 0-255"),
("Put in the green light ambiance level, 0-255:"))
b_input = QInputDialog.getInt(self, ("Blue light level 0-255"),
("Put in the blue light ambiance level, 0-255:"))
light_input = QInputDialog.getInt(self, ("Brightness level"),
("Put in the brightness level desired:"))
r_input = int(r_input[0])
g_input = int(g_input[0])
b_input = int(b_input[0])
light_input = int(light_input[0])
if r_input > 255 or g_input > 255 or b_input > 255:
print("Error. Put in a number below 256 for each color input")
self.currentlight = light_create.replacevalues(r_input,g_input,b_input,light_input,world_id_num)
def change_skybox(self):
self.window = QDialog(self)
skybox2_list = QListWidget()
skybox2_list.setIconSize(QSize(200, 25))
for index, text in enumerate(self.skybox_list):
item = QListWidgetItem(QIcon(self.gameDirVar+self.skybox_icon_list[index]), text)
skybox2_list.addItem(item)
self.layout = QHBoxLayout()
self.layout.addWidget(skybox2_list)
self.window.setGeometry(150,150,400,300)
self.window.setWindowTitle("Choose a skybox")
self.window.setWindowIcon(QIcon("icons\icon.ico"))
self.window.setLayout(self.layout)
skybox2_list.itemClicked.connect(self.window.close)
self.window.exec_()
def close_application(self, restart = False):
if not restart:
close = True
if "*" in self.windowTitle():
print('are you sure')
choice = QMessageBox.warning(self, "Exit TF2Mapper",
"Some changes have not been saved.\nDo you really want to quit?",
QMessageBox.Ok | QMessageBox.Cancel,
QMessageBox.Cancel)
if choice != QMessageBox.Ok:
close = False
if close:
folder = self.gameDirVar+'leveltemp/'
for f in os.listdir(folder):
if "level" in f:
print("removing", f)
os.remove(folder+f)
sys.exit()
if restart:
choice = QMessageBox.question(self, "Restart",
"Are you sure you want to restart?",
QMessageBox.Yes | QMessageBox.No,
QMessageBox.No)
if choice == QMessageBox.Yes:
folder = self.gameDirVar+'leveltemp/'
for f in os.listdir(folder):
if "level" in f:
print("removing", f)
os.remove(folder+f)
#again the exe references need to be changed
if os.path.isfile('./EasyEasyTF2Mapper.exe'):
subprocess.Popen('EasyTF2Mapper.exe')
else:
subprocess.Popen('python main.py')
sys.exit()
def create_prefab(self):
self.window = QDialog(self)
self.textLineEdit = QLineEdit()
self.nameLineEdit = QLineEdit()
self.vmfTextEdit = QLineEdit()
self.iconTextEdit = QLineEdit()
self.vmfBrowse = QPushButton("Browse",self)
self.vmfBrowse.clicked.connect(lambda: self.vmfTextEdit.setText(QFileDialog.getOpenFileName(self, "Choose .vmf File", "/","*.vmf")[0]))
self.iconBrowse = QPushButton("Browse",self)
self.iconBrowse.clicked.connect(lambda: self.iconTextEdit.setText(QFileDialog.getOpenFileName(self, "Choose .jpg File", "/","*.jpg")[0]))
self.vmfLayout = QHBoxLayout()
self.vmfLayout.addWidget(self.vmfTextEdit)
self.vmfLayout.addWidget(self.vmfBrowse)
self.vmfBrowse.setWindowModality(Qt.NonModal)
self.iconLayout = QHBoxLayout()
self.iconLayout.addWidget(self.iconTextEdit)
self.iconLayout.addWidget(self.iconBrowse)
self.okay_btn = QPushButton("Create Prefab", self)
self.blankstring = QWidget()
self.okay_btn_layout = QHBoxLayout()
self.okay_btn_layout.addStretch(1)
self.okay_btn_layout.addWidget(self.okay_btn)
self.okay_btn.clicked.connect(self.create_run_func)
#self.rotCheckBox = QCheckBox()
self.expCheckBox = QCheckBox()
self.buggyText = QLabel("This is a pretty buggy tool at this point, and is mostly used by developers. Are you sure you want to do this? \n(exported prefabs can be found in the main directory, where the executable is.)")
self.sectionSelect = QComboBox()
#needs to have a cs:go version
if self.isTF:
self.sectionSelect.addItems(["Geometry","Map Layout","Fun/Other"])
else:
pass
self.radioLayout = QHBoxLayout()
self.radioTF2 = QRadioButton("TF2",self)
self.radioCSGO = QRadioButton("CS:GO",self)
self.group.addButton(self.radioTF2)
self.group.addButton(self.radioCSGO)
self.group.setExclusive(True)
self.radioLayout.addWidget(self.radioTF2)
self.radioLayout.addWidget(self.radioCSGO)
self.form = QFormLayout()
self.form.addRow(self.buggyText)
self.form.addRow("Prefab Text:", self.textLineEdit)
self.form.addRow("Prefab Name:", self.nameLineEdit)
self.form.addRow("VMF file (.vmf):", self.vmfLayout)
self.form.addRow("Icon (.jpg):", self.iconLayout)
#self.form.addRow("Make Rotations?", self.rotCheckBox)
self.form.addRow("Export prefab?", self.expCheckBox)
self.form.addRow("Which section?",self.sectionSelect)
self.form.addRow("Which game?", self.radioLayout)
for i in range(5):
self.form.addRow(self.blankstring)
self.form.addRow(self.okay_btn_layout)
self.window.setGeometry(150,150,400,300)
self.window.setWindowTitle("Create Prefab")
self.window.setWindowIcon(QIcon("icons\icon.ico"))
self.window.setLayout(self.form)
self.window.exec_()
def create_run_func(self):
if self.sectionSelect.currentIndex() == 2:
input_number = 'END'
else:
input_number = index_section_list[self.sectionSelect.currentIndex()+1]
name_str = self.nameLineEdit.displayText().replace(' ','_')
form_list,t_list = [self.vmfTextEdit.displayText(),self.textLineEdit.displayText(),self.iconTextEdit.displayText(),self.nameLineEdit.displayText()],[]
form_dict = {1:'Prefab Text',2:'Prefab Name',3:'VMF file',4:'Icon'}
if self.vmfTextEdit.displayText() != '' and self.textLineEdit.displayText() != '' and self.iconTextEdit.displayText() != '' and self.nameLineEdit.displayText() != '':
QMessageBox.information(self, "Files Created, restart to see the prefab.",createPrefab.create(self.vmfTextEdit.displayText(), name_str, self.textLineEdit.displayText(), self.iconTextEdit.displayText(),self.expCheckBox.isChecked(),input_number,self.sectionSelect.currentIndex(),self.radioTF2.isChecked()))
restart_btn = QPushButton("Restart")
later_btn = QPushButton("Later")
choice = QMessageBox(self)
choice.setIcon(QMessageBox.Question)
choice.setWindowTitle("Prefab Successfully Created")
choice.setText("Program must be restarted for changes to take effect.")
choice.setInformativeText("Restart? You will lose any unsaved progress.")
choice.addButton(restart_btn, QMessageBox.YesRole)
choice.addButton(later_btn, QMessageBox.NoRole)
choice.setDefaultButton(later_btn)
#exe name change
if choice.exec_() == 0:
if os.path.isfile('./EasyEasyTF2Mapper.exe'):
subprocess.Popen('EasyTF2Mapper.exe')
else:
subprocess.Popen('python main.py')
sys.exit()
else:
for index,box in enumerate(form_list):
if box == '':
t_list.append(form_dict[index+1])
err = ", ".join(t_list)
QMessageBox.critical(self, "Error", "Fill out all sections of the form. ("+err+")")
#self.importprefabs()
def import_prefab(self):
name = QFileDialog.getOpenFileName(self, "Import Zipped Prefab", latest_path,"*.zip")[0]
prefab_zip = zipfile.ZipFile(name).extractall("")
lists = pickle.load(gameDirVar+'prefabs/pinfo.ezmd')
lns = pickle.load('info.pfb')
#there need to be 4 items in the list that is info.pfb
#1) what section it is (int) [eg. 0]
#2) prefab name (str) [eg. "ground_prefab"]
#3) prefab icon dir (str) [eg. "icons/ground_prefab.png"]
#4) prefab text name (str) [eg. Ground Prefab]
for list_index,line in enumerate(lns):
lists[list_index].append(line)
os.remove('info.pfb')
with open(gameDirVar+'prefabs/pinfo.ezmd', "w") as tfile:
pickle.dump(lists,tfile)
restart_btn = QPushButton("Restart")
later_btn = QPushButton("Later")
choice = QMessageBox(self)
choice.setIcon(QMessageBox.Question)
choice.setWindowTitle("Prefab Successfully Imported")
choice.setText("Program must be restarted for changes to take effect.")
choice.setInformativeText("Restart? You will lose any unsaved progress.")
choice.addButton(restart_btn, QMessageBox.YesRole)
choice.addButton(later_btn, QMessageBox.NoRole)
choice.setDefaultButton(later_btn)
#rename exe
if choice.exec_() == 0:
if os.path.isfile('./EasyEasyTF2Mapper.exe'):
subprocess.Popen('EasyTF2Mapper.exe')
else:
subprocess.Popen('python main.py')
sys.exit()
def open_console(self):
#contains dev console where you can manually run functions
self.console = QDialog()
self.console.setWindowTitle("Developer Console")
self.prev_text = QTextEdit("<Bald Engineers Developer Console>")
self.prev_text.setText('''Developer console for Easy '''+gameVar+''' Mapper version r 1.0.1. Current commands are:
print <variable>, setlevel <int>, help, restart, exit, func <function>, wiki, py <python function>.\n''')
self.prev_text.setReadOnly(True)
self.curr_text = QLineEdit()
self.curr_text_btn = QPushButton("Enter")
self.curr_text_btn.clicked.connect(self.console_enter)
self.curr_text_layout = QHBoxLayout()
self.curr_text_layout.addWidget(self.curr_text)
self.curr_text_layout.addWidget(self.curr_text_btn)
self.console_close_btn = QPushButton("Close")
self.console_close_btn.clicked.connect(self.console.close)
self.console_form = QFormLayout()
self.console_form.addRow(self.prev_text)
self.console_form.addRow(self.curr_text_layout)
self.console_form.addRow(self.console_close_btn)
self.console.setLayout(self.console_form)
self.console.show()
def console_enter(self):
global level, levels
command = ""
char_num = 0
text = self.curr_text.displayText()
text_prefix = text + " --> "
command = text.split()[0]
try:
value = text.split()[1]
except IndexError:
value = ""
if command == "print":
try:
new_text = text_prefix + str(eval(value))
except Exception as e:
new_text = text_prefix + str(e)
elif command == "setlevel":
try:
if int(value)-1 < int(self.levels):
self.level = int(value)-1
self.level.setText("Level: " + str(self.level+1))
new_text = text_prefix + "Level set to "+str(value+".")
else:
new_text = text_prefix + "Level "+str(value+" is out of range.")
except Exception as e:
new_text = text_prefix + str(e)
elif command == "help":
new_text = text_prefix + '''Developer console for Easy '''+gameVar+''' Mapper version r 1.0.1. Current commands are: print <variable>, func <function>, setlevel <int>, help, restart, exit, func <function>, wiki, py <python function>'''
elif command == "exit":
self.close_application()
elif command == "restart":
self.close_application(True)
elif command == "pootis":
new_text = '<img src="icons/thedoobs.jpg">'
elif command == "sterries" or command == "jerries":
new_text = text_prefix + "Gimme all those berries, berries, berries!"
elif command == "sideshow":
new_text = ''
self.sideshow()
elif command == "func":
try:
eval("self."+value + "()")
new_text = text_prefix + "Function "+value+" has been run."
except Exception as e:
new_text = text_prefix + str(e)
elif command == "wiki":
try:
webbrowser.open("http://github.com/baldengineers/easytf2_mapper/wiki")
new_text = text_prefix + "Wiki has been opened in your default browser"
except Exception as e:
print(str(e))
elif command == "py":
try:
new_text = text_prefix + str(eval(value))
except Exception as e:
new_text = text_prefix + str(e)
else:
new_text = text_prefix + "\"" + command + "\" is not a valid command"
self.prev_text.append(new_text)
self.curr_text.setText("")
def undo(self, undo):
if self.history if undo else self.redo_history:
x = self.history[-1][0][0] if undo else self.redo_history[-1][1][0]
y = self.history[-1][0][1] if undo else self.redo_history[-1][1][1]
h_moduleName = self.history[-1][0][2] if undo else self.redo_history[-1][1][2]
h_icon = self.history[-1][0][3] if undo else self.redo_history[-1][1][3]
h_level = self.history[-1][0][4] if undo else self.redo_history[-1][1][4]
if h_level == None:
for button in self.grid_list:
if button.x == x and button.y == y:
button.click_func(self, x, y, button.btn_id, False, h_moduleName, h_icon)
break
else:
#self.level.setText("Level: " + str(h_level+1))
self.levellist.setCurrentRow(h_level)
#self.change_level(False, False, True)
self.redo_history.append(self.history.pop(-1)) if undo else self.history.append(self.redo_history.pop(-1))
else:
winsound.MessageBeep(winsound.MB_ICONEXCLAMATION)
#format | click_func(parent, x, y, btn_id, clicked=True, h_moduleName="None", h_icon='')
#format | history.append((x,y,moduleName,self.icon,level), (x,y,moduleName,self.icon,level))
def sideshow(self):
self.gif("icons/sideshow.gif", (350,262,154,103), "SIDESHOW", "icons/ss.ico")
def heavy(self):
self.gif("icons/heavy.gif", (350,262,150,99), "DANCE HEAVY DANCE!")
def gif(self, file, geo, title, icon="icons\icon.ico"):
self.gif = QLabel()
movie = QMovie(file)
self.gif.setMovie(movie)
self.gif.setGeometry(geo[0],geo[1],geo[2],geo[3])
self.gif.setWindowTitle(title)
self.gif.setWindowIcon(QIcon(icon))
self.gif.show()
movie.start()
class GridChangeWindow(QDialog):
def __init__(self, parent, startup = False):
super(GridChangeWindow,self).__init__()
#parent - references the main window's attributes
#startup | Boolean | - if the window is being run when program starts up
self.startup = startup
if not self.startup:
parent.entity_list = []
parent.iconlist = []
parent.totalblocks = []
parent.grid_list = []
self.widthSpin = QSpinBox()
self.heightSpin = QSpinBox()
for spin in [self.widthSpin, self.heightSpin]:
spin.setRange(0,1000)
spin.setSingleStep(5)
spin.setValue(5)
self.okay_btn = QPushButton("OK",self)
self.okay_btn.clicked.connect(lambda: self.clickFunction(parent))
self.form = QFormLayout()
self.form.addRow("Set Grid Width:",self.widthSpin)
self.form.addRow("Set Grid Height:",self.heightSpin)
#self.form.addRow("Set Amount of Levels:",self.text3)
if self.startup:
self.radioTF2 = QRadioButton("&TF2",self)
self.radioTF2.setChecked(True)
self.radioTF2.setWhatsThis("TF2- The best game xd")
self.radioCSGO = QRadioButton("&CS:GO",self)
self.group = QButtonGroup()
self.group.addButton(self.radioTF2)
self.group.addButton(self.radioCSGO)
self.group.setExclusive(True)
self.radioLayout = QHBoxLayout()
self.radioLayout.addWidget(self.radioTF2)
self.radioLayout.addWidget(self.radioCSGO)
self.form.addRow("Choose game:",self.radioLayout)
self.form.addRow(self.okay_btn)
self.setLayout(self.form)
self.setWindowTitle("Set Grid Size")
self.setWindowIcon(QIcon("icons\icon.ico"))
self.exec_()
def clickFunction(self, parent):
self.hide()
self.deleteLater()
if self.startup:
parent.isTF = self.radioTF2.isChecked()
def returnVal(self):
return (self.widthSpin.value(), self.heightSpin.value(), 1)
def closeEvent(self, event):
if self.startup:
sys.exit()
if __name__ == '__main__':
#Main Program
app = QApplication(sys.argv)
main = MainWindow()
sys.exit(app.exec_())
| gpl-3.0 |
javachengwc/hue | desktop/core/ext-py/tablib-0.10.0/tablib/packages/odf/table.py | 96 | 9406 | # -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from namespaces import TABLENS
from element import Element
# Autogenerated
def Body(**args):
return Element(qname = (TABLENS,'body'), **args)
def CalculationSettings(**args):
return Element(qname = (TABLENS,'calculation-settings'), **args)
def CellAddress(**args):
return Element(qname = (TABLENS,'cell-address'), **args)
def CellContentChange(**args):
return Element(qname = (TABLENS,'cell-content-change'), **args)
def CellContentDeletion(**args):
return Element(qname = (TABLENS,'cell-content-deletion'), **args)
def CellRangeSource(**args):
return Element(qname = (TABLENS,'cell-range-source'), **args)
def ChangeDeletion(**args):
return Element(qname = (TABLENS,'change-deletion'), **args)
def ChangeTrackTableCell(**args):
return Element(qname = (TABLENS,'change-track-table-cell'), **args)
def Consolidation(**args):
return Element(qname = (TABLENS,'consolidation'), **args)
def ContentValidation(**args):
return Element(qname = (TABLENS,'content-validation'), **args)
def ContentValidations(**args):
return Element(qname = (TABLENS,'content-validations'), **args)
def CoveredTableCell(**args):
return Element(qname = (TABLENS,'covered-table-cell'), **args)
def CutOffs(**args):
return Element(qname = (TABLENS,'cut-offs'), **args)
def DataPilotDisplayInfo(**args):
return Element(qname = (TABLENS,'data-pilot-display-info'), **args)
def DataPilotField(**args):
return Element(qname = (TABLENS,'data-pilot-field'), **args)
def DataPilotFieldReference(**args):
return Element(qname = (TABLENS,'data-pilot-field-reference'), **args)
def DataPilotGroup(**args):
return Element(qname = (TABLENS,'data-pilot-group'), **args)
def DataPilotGroupMember(**args):
return Element(qname = (TABLENS,'data-pilot-group-member'), **args)
def DataPilotGroups(**args):
return Element(qname = (TABLENS,'data-pilot-groups'), **args)
def DataPilotLayoutInfo(**args):
return Element(qname = (TABLENS,'data-pilot-layout-info'), **args)
def DataPilotLevel(**args):
return Element(qname = (TABLENS,'data-pilot-level'), **args)
def DataPilotMember(**args):
return Element(qname = (TABLENS,'data-pilot-member'), **args)
def DataPilotMembers(**args):
return Element(qname = (TABLENS,'data-pilot-members'), **args)
def DataPilotSortInfo(**args):
return Element(qname = (TABLENS,'data-pilot-sort-info'), **args)
def DataPilotSubtotal(**args):
return Element(qname = (TABLENS,'data-pilot-subtotal'), **args)
def DataPilotSubtotals(**args):
return Element(qname = (TABLENS,'data-pilot-subtotals'), **args)
def DataPilotTable(**args):
return Element(qname = (TABLENS,'data-pilot-table'), **args)
def DataPilotTables(**args):
return Element(qname = (TABLENS,'data-pilot-tables'), **args)
def DatabaseRange(**args):
return Element(qname = (TABLENS,'database-range'), **args)
def DatabaseRanges(**args):
return Element(qname = (TABLENS,'database-ranges'), **args)
def DatabaseSourceQuery(**args):
return Element(qname = (TABLENS,'database-source-query'), **args)
def DatabaseSourceSql(**args):
return Element(qname = (TABLENS,'database-source-sql'), **args)
def DatabaseSourceTable(**args):
return Element(qname = (TABLENS,'database-source-table'), **args)
def DdeLink(**args):
return Element(qname = (TABLENS,'dde-link'), **args)
def DdeLinks(**args):
return Element(qname = (TABLENS,'dde-links'), **args)
def Deletion(**args):
return Element(qname = (TABLENS,'deletion'), **args)
def Deletions(**args):
return Element(qname = (TABLENS,'deletions'), **args)
def Dependencies(**args):
return Element(qname = (TABLENS,'dependencies'), **args)
def Dependency(**args):
return Element(qname = (TABLENS,'dependency'), **args)
def Detective(**args):
return Element(qname = (TABLENS,'detective'), **args)
def ErrorMacro(**args):
return Element(qname = (TABLENS,'error-macro'), **args)
def ErrorMessage(**args):
return Element(qname = (TABLENS,'error-message'), **args)
def EvenColumns(**args):
return Element(qname = (TABLENS,'even-columns'), **args)
def EvenRows(**args):
return Element(qname = (TABLENS,'even-rows'), **args)
def Filter(**args):
return Element(qname = (TABLENS,'filter'), **args)
def FilterAnd(**args):
return Element(qname = (TABLENS,'filter-and'), **args)
def FilterCondition(**args):
return Element(qname = (TABLENS,'filter-condition'), **args)
def FilterOr(**args):
return Element(qname = (TABLENS,'filter-or'), **args)
def FirstColumn(**args):
return Element(qname = (TABLENS,'first-column'), **args)
def FirstRow(**args):
return Element(qname = (TABLENS,'first-row'), **args)
def HelpMessage(**args):
return Element(qname = (TABLENS,'help-message'), **args)
def HighlightedRange(**args):
return Element(qname = (TABLENS,'highlighted-range'), **args)
def Insertion(**args):
return Element(qname = (TABLENS,'insertion'), **args)
def InsertionCutOff(**args):
return Element(qname = (TABLENS,'insertion-cut-off'), **args)
def Iteration(**args):
return Element(qname = (TABLENS,'iteration'), **args)
def LabelRange(**args):
return Element(qname = (TABLENS,'label-range'), **args)
def LabelRanges(**args):
return Element(qname = (TABLENS,'label-ranges'), **args)
def LastColumn(**args):
return Element(qname = (TABLENS,'last-column'), **args)
def LastRow(**args):
return Element(qname = (TABLENS,'last-row'), **args)
def Movement(**args):
return Element(qname = (TABLENS,'movement'), **args)
def MovementCutOff(**args):
return Element(qname = (TABLENS,'movement-cut-off'), **args)
def NamedExpression(**args):
return Element(qname = (TABLENS,'named-expression'), **args)
def NamedExpressions(**args):
return Element(qname = (TABLENS,'named-expressions'), **args)
def NamedRange(**args):
return Element(qname = (TABLENS,'named-range'), **args)
def NullDate(**args):
return Element(qname = (TABLENS,'null-date'), **args)
def OddColumns(**args):
return Element(qname = (TABLENS,'odd-columns'), **args)
def OddRows(**args):
return Element(qname = (TABLENS,'odd-rows'), **args)
def Operation(**args):
return Element(qname = (TABLENS,'operation'), **args)
def Previous(**args):
return Element(qname = (TABLENS,'previous'), **args)
def Scenario(**args):
return Element(qname = (TABLENS,'scenario'), **args)
def Shapes(**args):
return Element(qname = (TABLENS,'shapes'), **args)
def Sort(**args):
return Element(qname = (TABLENS,'sort'), **args)
def SortBy(**args):
return Element(qname = (TABLENS,'sort-by'), **args)
def SortGroups(**args):
return Element(qname = (TABLENS,'sort-groups'), **args)
def SourceCellRange(**args):
return Element(qname = (TABLENS,'source-cell-range'), **args)
def SourceRangeAddress(**args):
return Element(qname = (TABLENS,'source-range-address'), **args)
def SourceService(**args):
return Element(qname = (TABLENS,'source-service'), **args)
def SubtotalField(**args):
return Element(qname = (TABLENS,'subtotal-field'), **args)
def SubtotalRule(**args):
return Element(qname = (TABLENS,'subtotal-rule'), **args)
def SubtotalRules(**args):
return Element(qname = (TABLENS,'subtotal-rules'), **args)
def Table(**args):
return Element(qname = (TABLENS,'table'), **args)
def TableCell(**args):
return Element(qname = (TABLENS,'table-cell'), **args)
def TableColumn(**args):
return Element(qname = (TABLENS,'table-column'), **args)
def TableColumnGroup(**args):
return Element(qname = (TABLENS,'table-column-group'), **args)
def TableColumns(**args):
return Element(qname = (TABLENS,'table-columns'), **args)
def TableHeaderColumns(**args):
return Element(qname = (TABLENS,'table-header-columns'), **args)
def TableHeaderRows(**args):
return Element(qname = (TABLENS,'table-header-rows'), **args)
def TableRow(**args):
return Element(qname = (TABLENS,'table-row'), **args)
def TableRowGroup(**args):
return Element(qname = (TABLENS,'table-row-group'), **args)
def TableRows(**args):
return Element(qname = (TABLENS,'table-rows'), **args)
def TableSource(**args):
return Element(qname = (TABLENS,'table-source'), **args)
def TableTemplate(**args):
return Element(qname = (TABLENS,'table-template'), **args)
def TargetRangeAddress(**args):
return Element(qname = (TABLENS,'target-range-address'), **args)
def TrackedChanges(**args):
return Element(qname = (TABLENS,'tracked-changes'), **args)
| apache-2.0 |
sgerhart/ansible | lib/ansible/modules/network/meraki/meraki_device.py | 43 | 15823 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Kevin Breit (@kbreit) <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: meraki_device
short_description: Manage devices in the Meraki cloud
version_added: "2.7"
description:
- Visibility into devices associated to a Meraki environment.
notes:
- This module does not support claiming of devices or licenses into a Meraki organization.
- More information about the Meraki API can be found at U(https://dashboard.meraki.com/api_docs).
- Some of the options are likely only used for developers within Meraki.
options:
state:
description:
- Query an organization.
choices: [absent, present, query]
default: query
org_name:
description:
- Name of organization.
- If C(clone) is specified, C(org_name) is the name of the new organization.
aliases: [ organization ]
org_id:
description:
- ID of organization.
net_name:
description:
- Name of a network.
aliases: [network]
net_id:
description:
- ID of a network.
serial:
description:
- Serial number of a device to query.
hostname:
description:
- Hostname of network device to search for.
aliases: [name]
model:
description:
- Model of network device to search for.
tags:
description:
- Space delimited list of tags to assign to device.
lat:
description:
- Latitude of device's geographic location.
- Use negative number for southern hemisphere.
aliases: [latitude]
lng:
description:
- Longitude of device's geographic location.
- Use negative number for western hemisphere.
aliases: [longitude]
address:
description:
- Postal address of device's location.
move_map_marker:
description:
- Whether or not to set the latitude and longitude of a device based on the new address.
- Only applies when C(lat) and C(lng) are not specified.
type: bool
serial_lldp_cdp:
description:
- Serial number of device to query LLDP/CDP information from.
lldp_cdp_timespan:
description:
- Timespan, in seconds, used to query LLDP and CDP information.
- Must be less than 1 month.
serial_uplink:
description:
- Serial number of device to query uplink information from.
author:
- Kevin Breit (@kbreit)
extends_documentation_fragment: meraki
'''
EXAMPLES = r'''
- name: Query all devices in an organization.
meraki_device:
auth_key: abc12345
org_name: YourOrg
state: query
delegate_to: localhost
- name: Query all devices in a network.
meraki_device:
auth_key: abc12345
org_name: YourOrg
net_name: YourNet
state: query
delegate_to: localhost
- name: Query a device by serial number.
meraki_device:
auth_key: abc12345
org_name: YourOrg
net_name: YourNet
serial: ABC-123
state: query
delegate_to: localhost
- name: Lookup uplink information about a device.
meraki_device:
auth_key: abc12345
org_name: YourOrg
net_name: YourNet
serial_uplink: ABC-123
state: query
delegate_to: localhost
- name: Lookup LLDP and CDP information about devices connected to specified device.
meraki_device:
auth_key: abc12345
org_name: YourOrg
net_name: YourNet
serial_lldp_cdp: ABC-123
state: query
delegate_to: localhost
- name: Lookup a device by hostname.
meraki_device:
auth_key: abc12345
org_name: YourOrg
net_name: YourNet
hostname: main-switch
state: query
delegate_to: localhost
- name: Query all devices of a specific model.
meraki_device:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
model: MR26
state: query
delegate_to: localhost
- name: Update information about a device.
meraki_device:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
state: present
serial: '{{serial}}'
name: mr26
address: 1060 W. Addison St., Chicago, IL
lat: 41.948038
lng: -87.65568
tags: recently-added
delegate_to: localhost
- name: Claim a deivce into a network.
meraki_device:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
serial: ABC-123
state: present
delegate_to: localhost
- name: Remove a device from a network.
meraki_device:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
serial: ABC-123
state: absent
delegate_to: localhost
'''
RETURN = r'''
response:
description: Data returned from Meraki dashboard.
type: dict
returned: info
'''
import os
from ansible.module_utils.basic import AnsibleModule, json, env_fallback
from ansible.module_utils._text import to_native
from ansible.module_utils.network.meraki.meraki import MerakiModule, meraki_argument_spec
def format_tags(tags):
return " {tags} ".format(tags=tags)
def is_device_valid(meraki, serial, data):
for device in data:
if device['serial'] == serial:
return True
return False
def get_org_devices(meraki, org_id):
path = meraki.construct_path('get_all_org', org_id=org_id)
response = meraki.request(path, method='GET')
if meraki.status != 200:
meraki.fail_json(msg='Failed to query all devices belonging to the organization')
return response
def main():
# define the available arguments/parameters that a user can pass to
# the module
argument_spec = meraki_argument_spec()
argument_spec.update(state=dict(type='str', choices=['absent', 'present', 'query'], default='query'),
net_name=dict(type='str', aliases=['network']),
net_id=dict(type='str'),
serial=dict(type='str'),
serial_uplink=dict(type='str'),
serial_lldp_cdp=dict(type='str'),
lldp_cdp_timespan=dict(type='int'),
hostname=dict(type='str', aliases=['name']),
model=dict(type='str'),
tags=dict(type='str'),
lat=dict(type='float', aliases=['latitude']),
lng=dict(type='float', aliases=['longitude']),
address=dict(type='str'),
move_map_marker=dict(type='bool'),
)
# seed the result dict in the object
# we primarily care about changed and state
# change is if this module effectively modified the target
# state will include any data that you want your module to pass back
# for consumption, for example, in a subsequent task
result = dict(
changed=False,
)
# the AnsibleModule object will be our abstraction working with Ansible
# this includes instantiation, a couple of common attr would be the
# args/params passed to the execution, as well as if the module
# supports check mode
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
)
meraki = MerakiModule(module, function='device')
if meraki.params['serial_lldp_cdp'] and not meraki.params['lldp_cdp_timespan']:
meraki.fail_json(msg='lldp_cdp_timespan is required when querying LLDP and CDP information')
if meraki.params['net_name'] and meraki.params['net_id']:
meraki.fail_json(msg='net_name and net_id are mutually exclusive')
meraki.params['follow_redirects'] = 'all'
query_urls = {'device': '/networks/{net_id}/devices'}
query_org_urls = {'device': '/organizations/{org_id}/inventory'}
query_device_urls = {'device': '/networks/{net_id}/devices/'}
claim_device_urls = {'device': '/networks/{net_id}/devices/claim'}
bind_org_urls = {'device': '/organizations/{org_id}/claim'}
update_device_urls = {'device': '/networks/{net_id}/devices/'}
delete_device_urls = {'device': '/networks/{net_id}/devices/'}
meraki.url_catalog['get_all'].update(query_urls)
meraki.url_catalog['get_all_org'] = query_org_urls
meraki.url_catalog['get_device'] = query_device_urls
meraki.url_catalog['create'] = claim_device_urls
meraki.url_catalog['bind_org'] = bind_org_urls
meraki.url_catalog['update'] = update_device_urls
meraki.url_catalog['delete'] = delete_device_urls
payload = None
# if the user is working with this module in only check mode we do not
# want to make any changes to the environment, just return the current
# state with no modifications
# FIXME: Work with Meraki so they can implement a check mode
if module.check_mode:
meraki.exit_json(**meraki.result)
# execute checks for argument completeness
# manipulate or modify the state as needed (this is going to be the
# part where your module will do what it needs to do)
org_id = meraki.params['org_id']
if org_id is None:
org_id = meraki.get_org_id(meraki.params['org_name'])
nets = meraki.get_nets(org_id=org_id)
net_id = None
if meraki.params['net_id'] or meraki.params['net_name']:
net_id = meraki.params['net_id']
if net_id is None:
net_id = meraki.get_net_id(net_name=meraki.params['net_name'], data=nets)
if meraki.params['state'] == 'query':
if meraki.params['net_name'] or meraki.params['net_id']:
device = []
if meraki.params['serial']:
path = meraki.construct_path('get_device', net_id=net_id) + meraki.params['serial']
request = meraki.request(path, method='GET')
device.append(request)
meraki.result['data'] = device
elif meraki.params['serial_uplink']:
path = meraki.construct_path('get_device', net_id=net_id) + meraki.params['serial_uplink'] + '/uplink'
meraki.result['data'] = (meraki.request(path, method='GET'))
elif meraki.params['serial_lldp_cdp']:
if meraki.params['lldp_cdp_timespan'] > 2592000:
meraki.fail_json(msg='LLDP/CDP timespan must be less than a month (2592000 seconds)')
path = meraki.construct_path('get_device', net_id=net_id) + meraki.params['serial_lldp_cdp'] + '/lldp_cdp'
path = path + '?timespan=' + str(meraki.params['lldp_cdp_timespan'])
device.append(meraki.request(path, method='GET'))
meraki.result['data'] = device
elif meraki.params['hostname']:
path = meraki.construct_path('get_all', net_id=net_id)
devices = meraki.request(path, method='GET')
for unit in devices:
if unit['name'] == meraki.params['hostname']:
device.append(unit)
meraki.result['data'] = device
elif meraki.params['model']:
path = meraki.construct_path('get_all', net_id=net_id)
devices = meraki.request(path, method='GET')
device_match = []
for device in devices:
if device['model'] == meraki.params['model']:
device_match.append(device)
meraki.result['data'] = device_match
else:
path = meraki.construct_path('get_all', net_id=net_id)
request = meraki.request(path, method='GET')
meraki.result['data'] = request
else:
path = meraki.construct_path('get_all_org', org_id=org_id)
devices = meraki.request(path, method='GET')
if meraki.params['serial']:
for device in devices:
if device['serial'] == meraki.params['serial']:
meraki.result['data'] = device
else:
meraki.result['data'] = devices
elif meraki.params['state'] == 'present':
device = []
if meraki.params['hostname']:
query_path = meraki.construct_path('get_all', net_id=net_id)
device_list = meraki.request(query_path, method='GET')
if is_device_valid(meraki, meraki.params['serial'], device_list):
payload = {'name': meraki.params['hostname'],
'tags': format_tags(meraki.params['tags']),
'lat': meraki.params['lat'],
'lng': meraki.params['lng'],
'address': meraki.params['address'],
'moveMapMarker': meraki.params['move_map_marker'],
}
query_path = meraki.construct_path('get_device', net_id=net_id) + meraki.params['serial']
device_data = meraki.request(query_path, method='GET')
ignore_keys = ['lanIp', 'serial', 'mac', 'model', 'networkId', 'moveMapMarker', 'wan1Ip', 'wan2Ip']
if meraki.is_update_required(device_data, payload, optional_ignore=ignore_keys):
path = meraki.construct_path('update', net_id=net_id) + meraki.params['serial']
updated_device = []
updated_device.append(meraki.request(path, method='PUT', payload=json.dumps(payload)))
meraki.result['data'] = updated_device
meraki.result['changed'] = True
else:
if net_id is None:
device_list = get_org_devices(meraki, org_id)
if is_device_valid(meraki, meraki.params['serial'], device_list) is False:
payload = {'serial': meraki.params['serial']}
path = meraki.construct_path('bind_org', org_id=org_id)
created_device = []
created_device.append(meraki.request(path, method='POST', payload=json.dumps(payload)))
meraki.result['data'] = created_device
meraki.result['changed'] = True
else:
query_path = meraki.construct_path('get_all', net_id=net_id)
device_list = meraki.request(query_path, method='GET')
if is_device_valid(meraki, meraki.params['serial'], device_list) is False:
if net_id:
payload = {'serial': meraki.params['serial']}
path = meraki.construct_path('create', net_id=net_id)
created_device = []
created_device.append(meraki.request(path, method='POST', payload=json.dumps(payload)))
meraki.result['data'] = created_device
meraki.result['changed'] = True
elif meraki.params['state'] == 'absent':
device = []
query_path = meraki.construct_path('get_all', net_id=net_id)
device_list = meraki.request(query_path, method='GET')
if is_device_valid(meraki, meraki.params['serial'], device_list) is True:
path = meraki.construct_path('delete', net_id=net_id)
path = path + meraki.params['serial'] + '/remove'
request = meraki.request(path, method='POST')
meraki.result['changed'] = True
# in the event of a successful module execution, you will want to
# simple AnsibleModule.exit_json(), passing the key/value results
meraki.exit_json(**meraki.result)
if __name__ == '__main__':
main()
| mit |
TheWardoctor/Wardoctors-repo | script.module.liveresolver/lib/liveresolver/__init__.py | 10 | 43372 | # -*- coding: utf-8 -*-
import re
from modules import client,webutils,cloudflare,decryptionUtils,cache,liveresolver_utils,convert
from modules.constants import resolver_dict
from modules.log_utils import log
from modules.liveresolver_utils import *
import urlparse,urllib,base64
from BeautifulSoup import BeautifulSoup as bs
global limit
limit=0
from modules import constants
FLASH = constants.flash_ver()
'''
Pass any url containing video to this function.
It will try to find the embedded video and resolve it, returning the resolved
and playable video link.
cache_timeout (in hours) - how long to cache the found stream link for the given page.
html - pass html content to resolver and it will search for embedded links from it, instead
of requesting the given url and searching from there.
'''
def resolve(url, cache_timeout=3, html=None, title='Video',icon='x'):
try:
log("Resolver called with url: " + url)
resolved=None
if html==None:
resolved=resolve_it(url,title=title)
if resolved==None:
if html==None and cache_timeout!=0:
#semi-cached resolving
url=cache.get(find_link,cache_timeout,url)
else:
url = find_link(url,html=html)
resolved=url
url=resolve_it(url,title=title,icon=icon)
if url!=None:
resolved=url
log("Resolved url: " + resolved)
return resolved
except:
log("Failed to find link.")
return url
'''
Check if your video link is resolvable through the liveresolver module.
'''
def isValid(url):
return prepare(urlparse.urlparse(url).netloc) in resolver_dict.keys()
'''
Flush the liveresolver cache.
'''
def delete_cache():
cache.clear()
'''
Not intended for external use.
This method is used internally for resolving the found link.
'''
def resolve_it(url, title='Video',icon='x'):
if '.m3u8' in url or 'rtmp:' in url or '.flv' in url or '.mp4' in url or '.ts' in url or url.startswith('plugin://'):
if '.m3u8' in url and '|' not in url:
url += '|%s' % urllib.urlencode({'User-Agent': client.agent()})
if '.ts' in url:
url = 'plugin://plugin.video.f4mTester/?name=%s&iconImage=%s&streamtype=TSDOWNLOADER&url='%(urllib.quote(title),urllib.quote(icon)) + urllib.quote(url)
return url
if '.f4m' in url:
from resolvers import f4m
resolved = f4m.resolve(url)
return resolved
if url.startswith('acestream://') or url.startswith('sop://') or '.acelive' in url:
from resolvers import sop_ace
resolved = sop_ace.resolve(url, title)
return resolved
netloc = prepare(urlparse.urlparse(url).netloc)
if netloc in resolver_dict.keys():
resolver = resolver_dict[netloc]
log("Calling resolver: " + resolver)
exec "from resolvers import %s"%resolver
resolved = eval(resolver+".resolve(url)")
return resolved
else:
return
def find_link(url, html=''):
log('Finding in : %s'%url)
try: referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0]
except: referer = 'http://' + urlparse.urlparse(url).netloc
url = manual_url_fix(url)
host = urlparse.urlparse(url).netloc
headers = {'Referer':referer, 'Host':host, 'User-Agent' : client.agent(), 'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language' : 'en-US,en;q=0.5'}
if html=='' or html is None:
html = client.request(url, headers=headers)
ws = ['livetvcdn','shadow','blog']
if any(w in url for w in ws) and 'goto/' not in url :
import requests
s = requests.Session()
s.headers = headers
r = s.get(url)
html = r.text
ref=url
fs=list(globals().copy())
for f in fs:
if 'finder' in f:
resolved = eval (f+"(html,ref)")
if resolved:
log('Resolved with %s: %s'%(f,resolved))
return resolved
break
return
#embeded iframes
def finder1(html,url):
html = html.replace('/adplus/adplus.html?id=','')
try:html = urllib.unquote(html)
except:pass
global limit
limit+=1
ref=url
try:
urls = re.findall('<i?frame\s*.+?src=(?:\'|\")(.+?)(?:\'|\")',html,flags=re.IGNORECASE)
urly = client.parseDOM(html, "iframe", ret="src")
urlc = re.findall('top.location.href\s*=\s*[\'\"](.+?axe-tv[^\'\"]+)[\'\"]',html)
for url in urlc:
if 'sky-sports-1' not in url and 'fox1ushd' not in url:
urls.append(url)
urls += urly
try:
urls.append(re.findall("playStream\('iframe', '(.+?)'\)",html)[0])
except: pass
urls += re.findall('<a.+?href=[\'\"](/live-.+?stream.+?)[\'\"]',html)
urls += re.findall('(http://www.hdmyt.info/(?:channel|player).php\?file=[^"\']+)["\']',html)
from random import shuffle
for url in urls:
url = url.replace('https','http')
if 'c4.zedo' in url or 'ProtectFile.File' in url or 'adServe' in url or 'facebook' in url or 'banner' in url:
continue
elif "micast" in url or 'turbocast' in url:
return finder47(html,ref)
elif 'lshstream' in url:
return finder2(url,url)
rr = resolve_it(url)
if rr:
return rr
uri = manual_fix(url,ref)
if limit>=25:
log("Exiting - iframe visit limit reached")
return
resolved = find_link(uri)
if resolved:
break
headers = {'User-Agent': client.agent(), 'Referer': ref}
if '.m3u8' in resolved and '|' not in resolved:
headers.update({'X-Requested-With':constants.get_shockwave(), 'Host':urlparse.urlparse(resolved).netloc, 'Connection':'keep-alive'})
resolved += '|%s' % urllib.urlencode(headers)
return resolved
except:
return
#lsh stream
def finder2(html,url):
try:
reg = re.compile('(http://(?:www.)?lshstream.com[^\"\']+)')
url = re.findall(reg,html)[0]
return url
except:
try:
reg = re.compile('fid=[\"\'](.+?)[\"\'].+?lshstream.+?.com/embed.js')
fid = re.findall(reg,html)[0]
url = 'http://www.lshstreams.com/embed.php?u=%s&vw=720&vh=420&live.realstreamunited.com=%s'%(fid,url)
return url
except:
return
#castalba
def finder3(html,url):
try:
reg=re.compile('id=[\"\']([^\"\']+)[\"\'];.+?castalba.tv/.+?.js')
id=re.findall(reg,html)[0]
url = 'http://castalba.tv/embed.php?cid=%s&wh=600&ht=380&referer=%s'%(id,url)
return url
except:
return
#jw_config
def finder4(html,url):
ref = url
try:
links = re.compile('file\s*:\s*[\"\']([^\"\']+)[\"\']').findall(html)
for link in links:
if '.png' in link or link == '.flv':
continue
if '.f4m' in link:
link = link+'?referer=%s'%url
if '.m3u8' in link and '|' not in link:
link += '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': ref, 'X-Requested-With':constants.get_shockwave(), 'Host':urlparse.urlparse(link).netloc, 'Connection':'keep-alive','Accept':'*/*'})
return link
except:
return
#vlc_config
def finder5(html,url):
try:
soup=bs(html)
try:
link=soup.find('embed',{'id':'vlc'})
link=link['target']
except:
link=soup.find('embed',{'name':'vlc'})
link=link['target']
return link
except:
return
#sawlive
def finder6(html,url):
try:
uri = re.compile("[\"']([^\"\']*sawlive.tv\/embed\/[^\"'\/]+)\"").findall(html)[0]
page = re.compile('//.+?/(?:embed|v)/([0-9a-zA-Z-_]+)').findall(uri)[0]
host = urlparse.urlparse(uri).netloc
uri = 'http://sawlive.tv/embed/%s?referer=%s&host=%s' % (page,url,host)
return uri
except:
try:
uri = re.compile("src=(?:\'|\")(http:\/\/(?:www\.)?sawlive.tv\/embed\/.+?)(?:\'|\")").findall(html)[0]
page = re.compile('//.+?/(?:embed|v)/([0-9a-zA-Z-_]+)').findall(uri)[0]
host = urlparse.urlparse(uri).netloc
uri = 'http://sawlive.tv/embed/%s?referer=%s&host=%s' % (page,url,host)
return uri
except:
return
#yocast
def finder7(html,url):
try:
reg=re.compile('<script>fid\s*=\s*(?:\'|\")(.+?)(?:\'|\");.+?src=(?:\'|\")http://www.yocast.tv/.+?.js(?:\'|\")')
id = re.findall(reg,html)[0]
url='http://www.yocast.tv/embed.php?live=%s&vw=600&vh=450'%id
return url
except:
return
#miplayer
def finder8(html,url):
try:
reg = re.compile("(http://(?:www\.)?miplayer.net/embed[^'\"]+)")
url = re.findall(reg,html)[0]
return url
except:
return
#castamp
def finder9(html,url):
try:
reg = re.compile("(http://(?:www.)?castamp.com/embed.php\?c=[^\"&]+)")
url = re.findall(reg,html)[0]
return url
except:
return
#04 stream
def finder10(html,url):
try:
reg = re.compile('04stream.com/\w+\.js\?stream=([^ "\'&]+)')
url = re.findall(reg,html)[0]
url = 'http://www.04stream.com/weed.js?stream=%s&width=600&height=460&str=is&link=1&cat=3'%url
return url
except:
return
#leton
def finder11(html,url):
try:
html = urllib.unquote(html)
reg = re.compile('leton.tv/player.php\?streampage=([^&]+)&')
url = re.findall(reg,html)[0]
url = 'http://leton.tv/player.php?streampage=%s&width=600&height=450'%url
return url
except:
return
#yotv.co
def finder12(html,url):
try:
ref=url
reg = re.compile("<script type='text/javascript'>\s*fid=(?:\'|\")(.+?)(?:\'|\");\s*v_width=.+?;\s*v_height=.+?;</script><script type='text/javascript' src='http://www.yotv.co/player.js'></script>")
url = re.findall(reg,html)[0]
url = 'http://www.yotv.co/embed.php?live=%s&vw=620&vh=490&referer=%s'%(url,ref)
return url
except:
return
#hdcast
def finder13(html,url):
try:
url = re.compile('src="(http://(?:www\.)?hdcast.me/embed[^\'"]+)').findall(html)[0]
return url
except:
pass
#zerocast
def finder14(html,url):
try:
ref=url
url = re.compile('zerocast\.(?:tv|in)/(?:channel|embed)?\.php\?a=(\d+)').findall(html)[0]
url = 'http://zerocast.tv/channel.php?a=%s&width=640&height=480&autostart=true'%url
return url
except:
pass
#castup
def finder15(html,url):
try:
ref = url
reg = '<script type="text/javascript">\s*fid=(?:\'|\")(.+?)(?:\'|\");.+?src="http://www.castup.tv/js/.+?.js">'
url = re.findall(reg,html)[0]
url = 'http://www.castup.tv/embed_2.php?channel=%s&vw=650&vh=410&referer=%s'%(url,ref)
return url
except:
return
#mybeststream
def finder16(html,url):
try:
ref=url
try:
id = re.findall('id=(?:\'|\")(\d+)(?:\'|\");width=.*?pt987.googlecode.com',html)[0]
except:
id = re.findall('id=[\"\']([^\"\']+)[\"\'];.+?mybeststream.xyz',html)[0]
url = 'http://mybeststream.xyz/gen_s.php?id=%s&width=640&height=385&referer=%s'%(id,ref)
return url
except:
pass
#sunhd
def finder17(html,url):
try:
ref=url
url = re.findall('src="(http://www.sunhd.info/channel.+?.php\?file=.+?)"',html)[0]
return url+'&referer=%s'%ref
except:
pass
#youtube
def finder18(html,url):
try:
url = re.findall('src="?(https?://(?:www.|)youtube(?:-nocookie)?.com.+?[^\'\"]+)',html)[0]
return url.replace('amp;','').replace('-nocookie','')
except:
return
#livestream
def finder19(html,url):
try:
url = re.findall('(http://(?:new\.)?livestream.com[^"]+)',html)[0]
if 'player' in url:
return url
except:
return
#privatestream
def finder20(html,url):
try:
try:
id = re.findall('privatestream.tv/player\?streamname=([^&]+)&', html)[0]
except:
id = re.findall('privatestream.tv/((?!player)[^\.&\?\=]+)',html)[0]
if id != 'js/jquery-1':
url = 'http://privatestream.tv/player?streamname=%s&width=640&height=490'%id
return url
else:
return
except:
return
#airq.tv
def finder21(html,url):
try:
id = re.findall('(?:SRC|src)="http://airq.tv/(\w+)',html)[0]
url = 'http://airq.tv/%s/'%id
return url
except:
return
#aliez
def finder22(html,url):
try:
ref = url
try:
id = re.findall('emb.aliez[\w\.]+?/player/live.php\?id=([^&"]+)',html)[0]
return 'http://emb.aliez.me/player/live.php?id=%s&w=728&h=480&referer=%s'%(id,ref)
except:
try:
id = re.findall('(?:94.242.255.35|195.154.44.194|aliez\.\w+)/player/(?:live|embed).php\?id=(\d+)',html)[0]
except:
id = re.findall('http://aliez.(?:me|tv)/live/(.+?)(?:/|"|\')',html)[0]
return 'http://emb.aliez.me/player/live.php?id=%s&w=728&h=480&referer=%s'%(id,ref)
return
except:
return
#p3g
def finder23(html,url):
try:
id = re.findall("channel='(.+?)',\s*g='.+?';</script><script type='text/javascript' src='http://p3g.tv/resources/scripts/p3g.js'",html)[0]
url = 'http://www.p3g.tv/embedplayer/%s/2/600/420'%id
return url
except:
return
#dinozap (not implemented)
def finder24(html,url):
try:
url = re.findall('(http://(?:www\.)?dinozap.info/redirect/channel.php\?id=[^"\']+)',html)[0]
return url
except:
return
#liveflashplayer
def finder25(html,url):
try:
id = re.findall("channel='(.+?)', g='.+?';</script><script type='text/javascript' src='http://www.liveflashplayer.net/resources/scripts/liveFlashEmbed.js'>",html)[0]
url = 'http://www.liveflashplayer.net/membedplayer/%s/1/620/430'%id
return url
except:
return
#laola1
def finder26(html,url):
try:
url = re.findall('(http://www.laola1.tv[^"]+)', html)[0]
return url
except:
pass
#ehftv
def finder27(html,url):
try:
url = re.findall('src=(?:\'|\")(http:\/\/(?:www\.)?ehftv.com(?:/|//)player\.php[^\'\"]+)',html)[0]
return url
except:
return
#zoomtv
def finder28(html,url):
try:
ref=url
try:
fid = re.findall('fid="(.+?)".+?zome.zoomtv.me/.+?.js',html)[0]
except:
f = re.findall('fid=([^;]+)',html)[0]
fid = re.findall('%s\s*=\s*[\"\']([^\"\']+)'%f,html)[0]
pid = re.findall('pid\s*=\s*(.+?);',html)[0]
url = 'http://www.zoomtv.me/embed.php?v=' + fid + '&vw=660&vh=450&referer=%s&pid=%s'%(ref,pid)
return url
except:
return
#streamlive
def finder29(html,url):
try:
ref = url
url = re.findall('src="(http://(?:www.)?streamlive.to/embed/[^"]+)"',html)[0]
url = url + '&referer=%s'%ref
return url
except:
return
#roja redirect links
def finder30(html,url):
try:
html = client.request(url, referer=urlparse.urlparse(url).netloc)
url = re.findall('href="(.+?)">click here...',html)[0]
resolved = find_link(url+'&referer=http://rojedirecta.me')
return resolved
except:
return
#iguide
def finder31(html,url):
try:
ref=url
url = re.findall('(http://(?:www.)?iguide.to/embed/[^"\']+)"',html)[0]
return url+'&referer='+ref
except:
return
#letgo
def finder32(html,url):
try:
id = re.findall('fid="(.+?)"; v_width=.+?; v_height=.+?;</script><script type="text/javascript" src="http://www.letgo.tv/js/embed.js"',html)[0]
url = 'http://www.letgo.tv/embed.php?channel=%s&vw=630&vh=450'%id
return url
except:
return
#streamup
def finder33(html,url):
ref = url
try:
id = re.findall("streamup.com/rooms/([^/\'\"?&\s]+)",html)[0]
url = 'http://streamup.com/%s'%id
return url
except:
try:
id = re.findall('streamup.com/([^/\'\"?&\s]+)/embed',html)[0]
url = 'http://streamup.com/%s'%(id)
return url
except:
return
#p2pcast
def finder34(html,url):
try:
ref = url
try:
id = re.findall('http://p2pcast.tv/(?:p2pembed|stream).php\?id=([^&]+)',html)[0]
except:
id = re.findall("id=[\"\'](.+?)[\"\'];.+?src=[\"\']http://js.p2pcast.+?.js",html)[0]
url = 'http://p2pcast.tv/stream.php?id=%s&referer=%s'%(id,ref)
return url
except:
return
def finder35(html,url):
try:
try:
id = re.findall('cast3d.tv/embed.php\?(?:u|channel)=([^&]+)&',html)[0]
except:
id = re.findall('fid\s*=\s*(?:\'|\")(.+?)(?:\'|\");.*\s*.+?src=(?:\'|\")http://www.cast3d.tv/js/.+?.js',html)[0]
url = 'http://www.cast3d.tv/embed.php?channel=%s&vw=600&vh=400'%id
return url
except:
return
#xvtr
def finder36(html,url):
try:
ref = url
id = re.findall("fid=\"(.+?)\".+?</script><script type='text/javascript' src='http://www.xvtr.pw/embed.js'></script>",html)[0]
url = 'http://www.xvtr.pw/channel/%s.htm?referer=%s'%(id,ref)
return url
except:
return
#acestream
def finder37(html,url):
try:
try:
ace = re.findall('this.load(?:Player|Torrent)\((?:\'|\")(.+?)(?:\'|\")',html)[0]
except:
ace = re.findall('"http://torrentstream.net/p/(.+?)"',html)[0]
url = 'plugin://program.plexus/?mode=1&url=%s&name=Video'%(ace)
return url
except:
return
#sopcast
def finder38(html,url):
try:
sop = re.findall("(sop://[^\"\']+)['\"]",html)[0]
url = 'plugin://program.plexus/?mode=2&url=%s&name=Video'%(sop)
return url
except:
return
#turbocast
def finder39(html,url):
try:
url = re.findall('(http://www.turbocast.tv[^\'\"]+)',html)[0]
return url
except:
try:
url = re.findall('(.+?turbocast.tv.+?)',url)[0]
return url
except:
return
#directstream
def finder40(html,url):
try:
ref=url
fid = re.findall('fid=(?:\'|\")(.+?)(?:\'|\").+?</script><script type="text/javascript" src="http://direct-stream.org/embedStream.js"',html)[0]
url = 'http://direct-stream.org/e.php?id=%s&vw=740&vh=490&referer=%s'%(fid,ref)
return url
except:
return
#pxstream
def finder42(html,url):
try:
ref=url
id = re.findall("file=(?:\'|\")(.+?)(?:\'|\");.+?src='http://pxstream.tv/.+?.js",html)[0]
url = 'http://pxstream.tv/embedrouter.php?file=%s&width=730&height=430&jwplayer=flash&referer=%s'%(id,ref)
return url
except:
return
#publishpublish
def finder43(html,url):
try:
ref=url
id = re.findall('fid="(.+?)";.+?</script><script type="text/javascript" src="http://www.pushpublish.tv/js/embed.js"',html)[0]
loc = (urlparse.urlparse(url).netloc).replace('www.','')
url ='http://www.pushpublish.tv/player.php?channel=%s&vw=650&vh=400&domain=%s&referer=%s'%(id,loc,ref)
return url
except:
return
#ucaster
def finder44(html,url):
try:
ref=url
id = re.findall('channel=[\'"]([^\'"]+)[\'"].*?ucaster.(?:eu|com)', html)[0]
url = 'http://www.embeducaster.com/membedplayer/%s/1/595/500?referer=%s'%(id,ref)
return url
except:
return
#rocktv
def finder45(html,url):
try:
ref=url
id = re.findall("fid=[\'\"]([^\'\"]+)[\'\"];.+?src=[\'\"]http://www.rocktv.co/player.+?.js",html)[0]
url = 'http://rocktv.co/embed.php?live=%s&vw=620&vh=490&referer=%s'%(id,ref)
return url
except:
return
#ezcast
def finder46(html,url):
try:
ref=url
id = re.findall("channel=(?:\'|\")(.+?)(?:\'|\").+?src=(?:\'|\")http://www.ezcast.tv/static/scripts/ezcast.js(?:\'|\")>",html)[0]
url = 'http://www.embedezcast.com/embedplayer/%s/1/790/420?referer=%s'%(id,ref)
return url
except:
return
#micast
def finder47(html,url):
try:
ref=url
try:
id = re.findall('micast.tv/.*?\.php\?ch=([^"\']+)',html)[0]
except:
try:
id = re.findall('turbocast.tv/.*?\.php\?ch=([^"]+)',html)[0]
except:
id = re.findall('(?:ca|ch)=(?:\'|\")(.+?)(?:\'|\").+?micast.tv/embed.js(?:\'|\")',html)[0]
url = 'http://micast.tv/iframe.php?ch=%s&referer=%s'%(id,ref)
return url
except:
return
#openlive
def finder48(html,url):
try:
ref=url
id = re.findall("file=(?:\'|\")(.+?)(?:\'|\").+?src=(?:\'|\")http://openlive.org/live.js(?:\'|\")>",html)[0]
url = 'http://openlive.org/embed.php?file=%s&width=640&height=380&referer=%s'%(id,ref)
return url
except:
return
#helper
def finder49(html,url):
try:
ch = re.findall('fid=(?:\'|\")(.+?)(?:\'|\");.+?src=(?:\'|\")http://www.webspor.pw/HD/TV/info/channel.js(?:\'|\")>',html)[0]
url = 'http://worldsport.me/%s'%ch
return find_link(url)
except:
return
#sostart
def finder50(html,url):
try:
ref=url
id = re.findall("id=(?:\'|\")(.+?)(?:\'|\");.+?src=(?:\'|\")http://.+?sostart.([^/]+)/.+?.js(?:\'|\")>",html)[0]
url = 'http://sostart.%s/stream.php?id=%s&width=630&height=450&referer=%s'%(id[1],id[0],ref)
return url
except:
return
#lsh
def finder52(html,url):
try:
ref=url
id = re.findall('fid=(?:\'|\")(.+?)(?:\'|\");.+?src=(?:\'|\")http://cdn.lshstream.com/embed.js(?:\'|\")>')
url = 'http://cdn.lshstream.com/embed.php?u=%s&referer=' + ref
return url
except:
return
#hqstream
def finder53(html,url):
try:
ref=url
id = re.findall('http://hqstream.tv/.+?\?streampage=([^&/ ]+)',html)[0]
url = 'http://hqstream.tv/player.php?streampage=%s&height=480&width=700&referer=%s'%(id,ref)
return url
except:
return
#jw rtmp
def finder54(html,url):
try:
rtmp = re.findall('jwplayer.+?file.?\s*:\s*[\"\']((?:rtmp|http)?://[^\"\']+)[\"\']',html)[0]
return rtmp
except:
return
#tutele
def finder55(html,url):
try:
ref = url
id = re.findall("channel=(?:\'|\")(.+?)(?:\'|\").+?src='http://tutelehd.com/embedPlayer.js'>",html)[0]
url = 'http://tutelehd.com/embed/embed.php?channel=%s&referer=%s'%(id,ref)
return url
except:
return
#janjua
def finder56(html,url):
try:
ref = url
id = re.findall("channel=(?:\'|\")(.+?)(?:\'|\").+?src=(?:\'|\")http://www.janjua.tv/resources/scripts/janjua.js(?:\'|\")>",html)[0]
url = 'http://www.janjua.tv/embedplayer/%s/1/500/400?referer=%s'%(id,ref)
return url
except:
return
#abcast
def finder57(html,url):
try:
ref = url
id = re.findall("file=(?:\'|\")(.+?)(?:\'|\").+?src=(?:\'|\")http://abcast.net/simple.js(?:\'|\")",html)[0]
url = 'http://abcast.net/embed.php?file=%s&referer=%s'%(id,ref)
return url
except:
return
#castfree
def finder58(html,url):
try:
ref = url
id = re.findall('castfree.me/channel.php\?a=(\d+)',html)[0]
url = 'http://www.castfree.me/embed.php?a=%s&id=&width=640&height=460&autostart=true&referer=%s'%(id,ref)
return url
except:
return
#dinozap
def finder59(html,url):
try:
ref = url
url = re.findall('[\"\'](http://(?:www.)?player(?:hd|app)\d+.pw/channel(?:fr)?.php\?file=[^"\']+)',html)[0]
return url + '&referer=' + ref
except:
return
#dinozap
def finder60(html,url):
try:
ref = url
id = re.findall('(?:www\.)?sitenow.me/channel.php\?file=([^"\']+)',html)[0]
return url + 'http://www.sitenow.me/channel.php?file=%s&width=670&height=470&autostart=true&referer=s'%(id,ref)
except:
return
#streamcasttv
def finder61(html,url):
try:
id = re.findall("file=(?:\'|\")(.+?)(?:\'|\");.+?src=(?:\'|\")http://streamcasttv.biz/.+?.js",html)[0]
url ='http://streamcasttv.biz/embed.php?file=%s&referer=%s'%(id,url)
return url
except:
return
#rtmp
def finder63(html,url):
try:
swf = re.findall('src=(?:\'|\")(.+?.swf)',html)[0]
file, rtmp = re.findall('flashvars=(?:\'|\")file=(.+?)&.+?streamer=(.+?)&',html)[0]
url = rtmp + ' playpath=' + file +' swfUrl=' + swf + ' flashver=WIN\\2019,0,0,226 live=true timeout=15 swfVfy=true pageUrl=' + url
return url
except:
return
def finder64(html,url):
try:
url = re.findall('(http://vaughnlive.tv/embed/video/[^/\'"?&\s]+)',html)[0]
return url
except:
return
def finder65(html,url):
try:
referer = url
url = re.findall('src=(?:\'|\")(.+?)(?:\'|\").+?type="video/mp4"',html)[0]
if len(url)<10:
raise
url += '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': referer})
return url
except:
return
#hdcast.org
def finder66(html,url):
try:
ref = url
id,id2 = re.findall('fid="(.+?)";.+?src="http://hdcast.org/(.+?).js">',html)[0]
url = 'http://www.hdcast.org/%s.php?u=%s&vw=854&vh=480&domain=%s&referer=%s'%(id2,id,urlparse.urlparse(ref).netloc,ref)
return url
except:
return
#serbiaplus
def finder67(html,url):
try:
if 'serbiaplus' not in url:
return
id = re.findall('fid="(.+?)";.+?src="/live.js"',html)[0]
url = 'http://serbiaplus.com/' + id
resolved = find_link(url)
return resolved
except:
pass
#streamking
def finder68(html,url):
try:
ref = url
url = re.findall('(http://streamking.cc/[^"\']+)(?:\'|\")',html)[0]
return url+'&referer=%s'%ref
except:
return
#beba
def finder69(html,url):
try:
url = re.findall('http://beba.ucoz.com/playerlive.html\?id=(.+?)$',url)[0]
return find_link(url)
except:
return
#stream-sports
def finder70(html,url):
try:
ref = url
url = re.findall('http://www.stream\-sports.eu/uploads/video.html\?id=(.+?)$',url)[0]
return url+'&referer=%s'%ref
except:
return
#ustream
def finder71(html,url):
try:
ref=url
url=re.findall('(https?://(?:www.)?ustream.tv/embed/.+?[^\'\"]+)',html)[0]
url+='&referer='+ref
return url
except:
return
#config finder
def finder72(html,ref):
try:
url = re.findall('src\s*:\s*\'(.+?(?:.m3u8)?)\'',html)[0]
if 'images/' in url:
return
url += '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': ref})
return url
except:
pass
#config finder
def finder73(html,url):
try:
ref = url
url = re.findall('Player\(\{\n\s*source\:\s*[\'\"](.+?)[\'\"]\,',html)[0]
url += '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': ref})
if 'ace/manifest' in url:
url = finder102(html,url)
return url
except:
return
#cast4u
def finder74(html,url):
try:
ref = url
id = re.findall('id=[\'\"](.+?)[\'\"].+?src=[\'\"]http://www.cast4u.tv/.+?.js',html)[0]
url = 'http://www.cast4u.tv/embed.php?live=%s&vw=620&vh=490&referer=%s'%(id,ref)
return url
except:
return
#m3u8 config finder
def finder75(html,url):
try:
ref = url
url = re.findall('file: window.atob\(\'(.+?)\'\),', html)[0]
file = base64.b64decode(url)
file += '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': ref, 'X-Requested-With':constants.get_shockwave(), 'Host':urlparse.urlparse(file).netloc, 'Connection':'keep-alive','Accept':'*/*'})
return file
except:
return
#direct stream 2nd finder
def finder76(html,url):
ref = url
try:
id = re.findall('fid=[\"\'](.+?)[\"\'];.+?data-rocketsrc="http://direct-stream.org/.+?.js',html)[0]
url ="http://direct-stream.org/e.php?id=%s&vw=700&vh=400&referer=%s"%(id,ref)
return url
except:
return
#zona priority
def finder77(html,url):
try:
html = urllib.unquote(html)
url = finder4(html,url)
if client.request(url) != None:
return url
return
except:
return
#weplayer
def finder78(html,url):
try:
id = re.findall("id=['\"](.+?)['\"];.+?src=['\"]http://weplayer.([^/]+)/.+?.js([^\s]+)",html)[0]
url = 'http://weplayer.%s/stream.php?id=%s&width=640&height=480&referer=%s'%(id[1],id[0],url)
if '-->' in id[2]:
return
return find_link(url)
except:
return
def finder79(html,url):
try:
ref = url
url = re.findall("playStream\('hls', '(.+?)'",html)[0]
url += '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': ref, 'X-Requested-With':constants.get_shockwave(), 'Host':urlparse.urlparse(url).netloc, 'Connection':'keep-alive','Accept':'*/*'})
return url
except:
return
#tvope
def finder80(html,ref):
try:
id = re.findall('c="(.+?)";.+?</script>\s*<script.+?src="http://i.tvope.com/js/.+?.js',html)[0]
url = 'http://tvope.com/emb/player.php?c=%s&w=700&h=480&referer=%s&d=www.popofthestreams.xyz'%(id,ref)
return url
except:
return
#dinozap
def finder81(html,url):
try:
ref = url
url = re.findall('[\"\'](https?://(?:www\.)?dinozap.info/redirect/channel.php\?id=[^"\']+)',html)[0]
return url + '&referer=' + ref
except:
return
#dinozap
def finder82(html,url):
try:
ref = url
url = re.findall('[\"\'](https?://(?:www\.)?tv.verdirectotv.org/channel.php\?file=[^"\']+)',html)[0]
return url + '&referer=' + ref
except:
return
#dinozap
def finder83(html,url):
try:
ref = url
url = re.findall('[\"\'](https?://(?:www\.)?dinostream.pw/channel.php\?file=[^"\']+)',html)[0]
return url + '&referer=' + ref
except:
return
#dinozap
def finder84(html,url):
try:
ref = url
url = re.findall('[\"\'](https?://(?:www\.)?(?:serverhd.eu|cast3d.me)/channel\w*\.php\?file=[^"\']+)',html)[0]
return url + '&referer=' + ref
except:
return
#dinozap
def finder85(html,url):
try:
ref = url
url = re.findall('[\"\'](https?://(?:www\.)?sstream.pw/channel.php\?file=[^"\']+)',html)[0]
return url + '&referer=' + ref
except:
return
#dinozap
def finder86(html,url):
try:
ref = url
url = re.findall('[\"\'](https?://(?:www\.)?ponlatv.com/channel.php\?file=[^"\']+)',html)[0]
return url + '&referer=' + ref
except:
return
#acestream
def finder90(html,ref):
try:
url = re.findall('(acestream://[^"\']+)["\']',html)[0]
return url
except:
return
#sopcast
def finder91(html,ref):
try:
url = re.findall('(sop://[^"\']+)["\']',html)[0]
return url
except:
return
#shadownet
def finder92(html,ref):
try:
url = re.findall('src=[\"\']([^\"\']+)[\"\'].+?mpeg',html)[0]
if 'rtmp' in url:
url+=' swfUrl=http://www.shadow-net.biz/javascript/videojs/flashls/video-js.swf flashver=%s live=true timeout=18 swfVfy=1 pageUrl=http://www.shadow-net.biz/'%FLASH
elif 'm3u8' in url:
url += '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': ref, 'X-Requested-With':constants.get_shockwave(), 'Host':urlparse.urlparse(url).netloc, 'Connection':'keep-alive','Accept':'*/*', 'Origin':'http://shadow.go.ro'})
return url
except:
return
#filmon
def finder93(html,ref):
try:
id = re.findall('filmon.(?:com|tv)/tv/channel/export\?channel_id=(\d+)',html)[0]
url = 'http://www.filmon.com/channel/' + id
return url
except:
return
#castto
def finder94(html,ref):
try:
id = re.findall('fid=["\'](.+?)["\'];.+?src=["\'](http://static.castto.me/js/.+?.js)', html)[0]
url = id[1]+'?id=%s&referer=%s'%(id[0],ref)
return url
except:
return
#redirect
def finder95(html,url):
try:
url = re.findall('<meta http-equiv="refresh".+?; url=(.+?)"',html)[0]
return find_link(url)
except:
return
#acelive
def finder96(html,url):
try:
url = re.findall('[\"\'](.+?.acelive.+?)[\"\']',html)[0]
return url
except:
return
#castasap
def finder97(html,url):
try:
ref = url
import requests
html = requests.get(url).text
chars = re.findall('&#(\d+)',html)
for c in chars:
html = html.replace('&#%s'%c, chr(int(c)))
html = html.replace(';','')
url = re.findall('src=[\"\'](http://www.(?:castasap|castflash|flashlive|fastflash).pw/embed.+?)[\"\']',html)[0]
url = add_args(url,{'referer':ref})
return url
except:
return
#deltatv
def finder98(html,ref):
try:
x,y = re.findall('id=[\'\"](.+?)[\'\"].+?src=[\'\"]http://deltatv.([^/]+)/.+?.js',html)[0]
url = 'http://deltatv.%s/stream.php?id=%s&width=640&height=480&referer=%s'%(y,x,ref)
return url
except:
return
#hdcast.info
def finder99(html,ref):
try:
id,rr = re.findall('fid=[\'\"](.+?)[\'\"].+?src=[\'\"]http://(?:www.)?hdcast.info/([^\.]+).js',html)[0]
url = 'http://www.hdcast.info/%s.php?live=%s&vw=620&vh=490&referer=%s'%(rr,id,ref)
return url
except:
return
#deltatv
def finder100(html,ref):
try:
url = re.findall('(http://deltatv.(?:pw|xyz)?/stream.php\?.+?[^"\']+)',html)[0]
url = url + '&referer=' + ref
return url
except:
return
#mybest
def finder103(html,ref):
try:
url = re.findall('(http://mybeststream.xyz.+?[^"\']+)',html)[0]
url = url + '&referer=' + ref
return url
except:
return
#blowfish decrypt
def finder100(html,ref):
try:
if 'Blowfish' not in html:
return
key = re.findall('new Blowfish\([\"\'](.+?)[\"\']\)',html)[0]
if len(key)>56:
key=key[:56]
crypted = re.findall('.decrypt\([\"\'](.+?)[\"\']\)',html)[0].decode("hex")
from modules import blowfish
cipher = blowfish.Blowfish(key)
decrypted = cipher.decrypt(crypted)
return find_link(ref,html=decrypted)
except:
return
#theactionlive
def finder101(html,ref):
try:
id = re.findall('id=[\"\'](.+?)[\"\'];.+?src=[\"\']http://theactionlive.com.+?.js',html)[0]
url = 'http://theactionlive.com?id=%s&referer=%s'%(id,ref)
return url
except:
return
#acestream
def finder102(html,ref):
try:
url = 'acestream://' + re.findall('ace/manifest.m3u8\?id\=([^\'\"]+)[\'\"]',url)[0]
return url
except:
return
#kolstg
def finder105(html,ref):
try:
id = re.findall('fid=["\'](.+?)["\'];.+?src=["\']http://(?:www.)?kolstg.pw/.+?.js', html)[0]
url = 'http://www.hornos.moy.su/channel/'+ id+'.htm?referer=' + ref
return url
except:
return
#mips
def finder106(html,ref):
try:
try:
ch,e = re.findall('channel=[\'\"](.+?)[\'\"]\s*,\s*e=[\'\"](.+?)[\'\"].+?src=[\'\"]http://(?:www.)?mipsplayer.com/.+?.js',html)[0]
except:
e,ch = re.findall('[,\s]e=[\'\"](.+?)[\'\"]\s*,\s*channel=[\'\"](.+?)[\'\"].+?src=[\'\"]http://(?:www.)?mipsplayer.com/.+?.js',html)[0]
url = 'http://www.mipsplayer.com/membedplayer/'+ch+'/'+e+'/675/400?referer=' + ref
return url
except:
return
#m3u8
def finder107(html,ref):
try:
m3u8 = re.findall('playlist_url:\s*[\"\']([^\"\']+)',html)[0]
host = re.findall('cdn_host:\s*[\"\']([^\"\']+)',html)[0]
url = 'http://' + host + m3u8
url+='|%s' % urllib.urlencode({'Referer':ref, 'User-agent':client.agent()})
return url
except:
return
#streamsus
def finder108(html,ref):
try:
url = re.findall('Watch Live\s*<a href=[\"\'](.+?)[\"\']>Here',html)[0]
return find_link(url)
except:
return
#f4m
def finder109(html,ref):
try:
f4m = re.findall('name=[\"\']flashvars[\"\'].+?value=[\"\']src=([^&]+)&',html)[0]
url = urllib.unquote(f4m)
return url
except:
return
return
#zona4vip
def finder110(html,ref):
try:
if 'zona4vip' not in ref:
return
fid = re.findall('fid=[\"\'](.+?)[\"\'].+?src=[\"\']/live.js',html)[0]
url = 'http://www.zona4vip.com/'+ fid
return find_link(url)
except:
return
#veetle livetvcdn
def finder111(html,ref):
try:
id = re.findall('veetle&c=([^&]+)',ref)[0]
url = 'http://veetle.com/v/' + id
return url
except:
return
#vlc new
def finder112(html,ref):
try:
url = re.findall('version=[\"\']VideoLAN.VLCPlugin.2[\"\'].+?target=[\"\']([^\"\']+)',html)[0]
return url
except:
return
#lsh stream embed
def finder113(html,ref):
try:
fid = re.findall('fid=[\"\'](.+?)[\"\'].+?src=[\"\'].+?lshstream.com/embed.js',html)[0]
loc = urlparse.urlparse(ref).netloc
url = 'http://www.lshstream.com/embed.php?u=%s&vw=640&vh=360&domain=%s'%(fid,loc)
return find_link(url)
except:
return
#castamp
def finder114(html,ref):
try:
fid = re.findall('channel=[\"\'](.+?)[\"\'].+?src=[\"\'].+?castamp.com/embed.js',html)[0]
url = 'http://castamp.com/embed.php?c=%s&vwidth=640&vheight=380&referer=%s'%(fid,ref)
return url
except:
return
#bro.adca.st
def finder115(html,ref):
try:
id = re.findall('id=[\"\'](.+?)[\"\'].+?src=[\"\'].+?bro.adca.st/.+?.js',html)[0]
url = 'http://bro.adca.st/stream.php?id='+id+'&width=640&height=460&referer=' + ref + '&stretching=uniform'
return url
except:
try:
url = re.findall('(http://bro.adca.st/stream.php[^\"\']+)',html)[0]
url = url + '&referer=' + ref
return url
except:
return
#akamai rtmpe
def finder116(html,ref):
if 'akamai' in ref:
html = decryptionUtils.doDemystify(html)
swf,streamer,file,token = re.findall('flashplayer:[\"\']([^\"\']+)[\"\'],streamer:[\"\']([^\"\']+)[\"\'],file:[\"\']([^\"\']+)[\"\'],token:[\"\']([^\"\']+)[\"\']',html)[0]
swf = 'http://akamaistreaming.com/' + swf
url = '%s playpath=%s token=%s swfUrl=%s pageUrl=%s flashver=%s'%(streamer,file,token,swf,ref,constants.flash_ver())
return url
#zunox stream
def finder117(html,ref):
if 'zunox' in ref:
url = 'http://zunox.hk/players/' + re.findall('(proxy.php\?id=[^\"\']+)',html)[0]
h2 = client.request(url)
import json
j = json.loads(h2)
host = urlparse.urlparse(j['url']).netloc.split(':')[0].replace(':80','')
url = j['url'].replace(':80','') +'.flv' + '|%s' % urllib.urlencode({'User-agent':client.agent(),'X-Requested-With':constants.get_shockwave(),'Referer':ref, 'Host':host, 'Connection':'keep-alive','Accept-Encodeing':'gzip, deflate, lzma, sdch'})
return url
#sportstream365
def finder118(html,ref):
try:
try:
id = re.findall('"sportstream365.com.+?game=(\d+)',html)[0]
except:
id = re.findall('"sportstream365.com.+?game=(\d+)',ref)[0]
return 'http://sportstream365.com/?game=%s&referer=%s'%(id,ref)
except:
return
#cndhls
def finder119(html,ref):
try:
id = re.findall('id=[\"\'](.+?)[\"\'].+?src=[\"\'].+?cndhls.+?.js',html)[0]
d = (urlparse.urlparse(ref).netloc).replace('www.','')
url = 'http://www.cndhlsstream.pw/embed.php?channel='+id+'&vw=640&vh=385&domain=' + d + '&referer=' + ref
return url
except:
return
#superplayer
def finder120(html,ref):
try:
id = re.findall("id=['\"](.+?)['\"];.+?src=['\"].+?superplayer.+?.js",html)[0]
url = 'http://nowlive.xyz/embed.php?id=%s&width=640&height=480&referer=%s'%(id,ref)
if '-->' in id[2]:
return
return find_link(url)
except:
return
#scity
def finder121(html,url):
try:
ref=url
id = re.findall("id=(?:\'|\")(.+?)(?:\'|\");.+?src.+?scity.tv.+?.js",html)[0]
url = 'http://scity.tv/stream.php?id=%s&width=630&height=450&referer=%s'%(id,ref)
return url
except:
return
def finder123(html,ref):
try:
url = re.findall('mpegurl.+?src=[\"\']([^\"\']+)[\"\']',html)[0]
return url + '|%s' % urllib.urlencode({'Referer':ref,'X-Requested-With':constants.get_shockwave(),'User-agent':client.agent()})
except:
return
#streamify
def finder124(html,url):
try:
ref=url
id = re.findall("channel=[\"\']([^\"\']+)[\"\'].+?src.+?streamifyplayer.com.+?.js",html)[0]
url = 'http://www.streamifyplayer.com/embedplayer/%s/1/620/430?referer=%s'%(id,ref)
return url
except:
return
#youtube live
def finder125(html,url):
try:
if 'youtube-live' in html:
url = re.findall("(https?://(?:www.)?youtube.com/[^\"\']+)",html)[0]
return url
except:
return
#streamp2p
def finder126(html,url):
try:
url = re.findall('(http://(?:www.)?streamp2p.com[^\"\']+)[\"\']',html)[0]
return url
except:
return
def finder127(html,url):
try:
try:
html = urllib.unquote(html)
except:
pass
url = re.findall('src=(http.+?m3.+?[^&]+)&',html)[0]
if 'amis' in url:
url = url.strip() +'|User-Agent=Mozilla/5.0'
return url.strip()
except:
return
#akamaistreaming
def finder128(html,ref):
try:
id = re.findall("id=['\"](.+?)['\"].+?src=['\"].+?akamaistreaming.+?.js",html)[0]
url = 'http://akamaistreaming.com/zn.php?id=%s&width=640&height=385&referer=%s'%(id,ref)
return url
except:
return
def finder129(html,ref):
try:
id = re.findall("id=['\"](.+?)['\"].+?src=['\"].+?akamaistreaming.+?.js",html)[0]
url = 'http://akamaistreaming.com/zn.php?id=%s&width=640&height=385&referer=%s'%(id,ref)
return url
except:
return
| apache-2.0 |
jordanaluft/gnome-music | gnomemusic/widgets.py | 1 | 34979 | # Copyright (c) 2013 Vadim Rutkovsky <[email protected]>
# Copyright (c) 2013 Shivani Poddar <[email protected]>
# Copyright (c) 2013 Arnel A. Borja <[email protected]>
# Copyright (c) 2013 Seif Lotfy <[email protected]>
# Copyright (c) 2013 Sai Suman Prayaga <[email protected]>
# Copyright (c) 2013 Jackson Isaac <[email protected]>
# Copyright (c) 2013 Felipe Borges <[email protected]>
# Copyright (c) 2013 Giovanni Campagna <[email protected]>
# Copyright (c) 2013 Guillaume Quintard <[email protected]>
#
# GNOME Music is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# GNOME Music is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with GNOME Music; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# The GNOME Music authors hereby grant permission for non-GPL compatible
# GStreamer plugins to be used and distributed together with GStreamer
# and GNOME Music. This permission is above and beyond the permissions
# granted by the GPL license by which GNOME Music is covered. If you
# modify this code, you may extend this exception to your version of the
# code, but you are not obligated to do so. If you do not wish to do so,
# delete this exception statement from your version.
from gi.repository import Gtk, Gdk, Gd, GLib, GObject, Pango, Gio
from gi.repository import GdkPixbuf, Grl
from gettext import gettext as _, ngettext
from gnomemusic.grilo import grilo
from gnomemusic.albumArtCache import AlbumArtCache
from gnomemusic.player import DiscoveryStatus
from gnomemusic.playlists import Playlists, StaticPlaylists
from gnomemusic import log
import logging
logger = logging.getLogger(__name__)
ALBUM_ART_CACHE = AlbumArtCache.get_default()
NOW_PLAYING_ICON_NAME = 'media-playback-start-symbolic'
ERROR_ICON_NAME = 'dialog-error-symbolic'
try:
settings = Gio.Settings.new('org.gnome.Music')
MAX_TITLE_WIDTH = settings.get_int('max-width-chars')
except Exception as e:
MAX_TITLE_WIDTH = 20
logger.error("Error on setting widget max-width-chars: %s", str(e))
playlists = Playlists.get_default()
class StarHandler():
def __repr__(self):
return '<StarHandler>'
@log
def __init__(self, parent, star_index):
self.star_index = star_index
self.star_renderer_click = False
self.parent = parent
@log
def _add_star_renderers(self, list_widget, cols, hidden=False):
star_renderer = CellRendererClickablePixbuf(self.parent.view, hidden=hidden)
star_renderer.connect("clicked", self._on_star_toggled)
list_widget.add_renderer(star_renderer, lambda *args: None, None)
cols[0].clear_attributes(star_renderer)
cols[0].add_attribute(star_renderer, 'show_star', self.star_index)
@log
def _on_star_toggled(self, widget, path):
try:
_iter = self.parent.model.get_iter(path)
except TypeError:
return
try:
if self.parent.model.get_value(_iter, 9) == 2:
return
except AttributeError:
return
new_value = not self.parent.model.get_value(_iter, self.star_index)
self.parent.model.set_value(_iter, self.star_index, new_value)
song_item = self.parent.model.get_value(_iter, 5)
grilo.toggle_favorite(song_item) # toggle favorite status in database
playlists.update_static_playlist(StaticPlaylists.Favorites)
# Use this flag to ignore the upcoming _on_item_activated call
self.star_renderer_click = True
class AlbumWidget(Gtk.EventBox):
tracks = []
duration = 0
loadingIcon = ALBUM_ART_CACHE.get_default_icon(256, 256, True)
noArtworkIcon = ALBUM_ART_CACHE.get_default_icon(256, 256, False)
def __repr__(self):
return '<AlbumWidget>'
@log
def __init__(self, player, parentview):
Gtk.EventBox.__init__(self)
self.player = player
self.iterToClean = None
self.parentview = parentview
self.ui = Gtk.Builder()
self.ui.add_from_resource('/org/gnome/Music/AlbumWidget.ui')
self._create_model()
self.view = Gd.MainView(
shadow_type=Gtk.ShadowType.NONE
)
self.view.set_view_type(Gd.MainViewType.LIST)
self.album = None
self.header_bar = None
self.view.connect('item-activated', self._on_item_activated)
view_box = self.ui.get_object('view')
self.ui.get_object('scrolledWindow').set_placement(Gtk.CornerType.
TOP_LEFT)
self.view.connect('selection-mode-request', self._on_selection_mode_request)
child_view = self.view.get_children()[0]
child_view.set_margin_top(64)
child_view.set_margin_bottom(64)
child_view.set_margin_end(32)
self.view.remove(child_view)
view_box.add(child_view)
self.add(self.ui.get_object('AlbumWidget'))
self.star_handler = StarHandler(self, 9)
self._add_list_renderers()
self.get_style_context().add_class('view')
self.get_style_context().add_class('content-view')
self.view.get_generic_view().get_style_context().remove_class('view')
self.show_all()
@log
def _on_selection_mode_request(self, *args):
self.header_bar._select_button.clicked()
@log
def _on_item_activated(self, widget, id, path):
if self.star_handler.star_renderer_click:
self.star_handler.star_renderer_click = False
return
_iter = self.model.get_iter(path)
if self.model.get_value(_iter, 10) != DiscoveryStatus.FAILED:
if (self.iterToClean and self.player.playlistId == self.album):
item = self.model.get_value(self.iterToClean, 5)
title = AlbumArtCache.get_media_title(item)
self.model.set_value(self.iterToClean, 0, title)
# Hide now playing icon
self.model.set_value(self.iterToClean, 6, False)
self.player.set_playlist('Album', self.album, self.model, _iter, 5, 11)
self.player.set_playing(True)
@log
def _add_list_renderers(self):
list_widget = self.view.get_generic_view()
cols = list_widget.get_columns()
cols[0].set_min_width(100)
cols[0].set_max_width(200)
cells = cols[0].get_cells()
cells[2].set_visible(False)
cells[1].set_visible(False)
now_playing_symbol_renderer = Gtk.CellRendererPixbuf(xpad=0,
xalign=0.5,
yalign=0.5)
column_now_playing = Gtk.TreeViewColumn()
column_now_playing.set_fixed_width(48)
column_now_playing.pack_start(now_playing_symbol_renderer, False)
column_now_playing.set_cell_data_func(now_playing_symbol_renderer,
self._on_list_widget_icon_render, None)
list_widget.insert_column(column_now_playing, 0)
type_renderer = Gd.StyledTextRenderer(
xpad=16,
ellipsize=Pango.EllipsizeMode.END,
xalign=0.0
)
list_widget.add_renderer(type_renderer, lambda *args: None, None)
cols[0].clear_attributes(type_renderer)
cols[0].add_attribute(type_renderer, 'markup', 0)
durationRenderer = Gd.StyledTextRenderer(
xpad=16,
ellipsize=Pango.EllipsizeMode.END,
xalign=1.0
)
durationRenderer.add_class('dim-label')
list_widget.add_renderer(durationRenderer, lambda *args: None, None)
cols[0].clear_attributes(durationRenderer)
cols[0].add_attribute(durationRenderer, 'markup', 1)
self.star_handler._add_star_renderers(list_widget, cols)
def _on_list_widget_icon_render(self, col, cell, model, _iter, data):
if not self.player.currentTrackUri:
cell.set_visible(False)
return
if model.get_value(_iter, 10) == DiscoveryStatus.FAILED:
cell.set_property('icon-name', ERROR_ICON_NAME)
cell.set_visible(True)
elif model.get_value(_iter, 5).get_url() == self.player.currentTrackUri:
cell.set_property('icon-name', NOW_PLAYING_ICON_NAME)
cell.set_visible(True)
else:
cell.set_visible(False)
@log
def _create_model(self):
self.model = Gtk.ListStore(
GObject.TYPE_STRING, # title
GObject.TYPE_STRING,
GObject.TYPE_STRING,
GObject.TYPE_STRING,
GdkPixbuf.Pixbuf, # icon
GObject.TYPE_OBJECT, # song object
GObject.TYPE_BOOLEAN, # item selected
GObject.TYPE_STRING,
GObject.TYPE_BOOLEAN,
GObject.TYPE_INT, # icon shown
GObject.TYPE_BOOLEAN,
GObject.TYPE_INT
)
@log
def update(self, artist, album, item, header_bar, selection_toolbar):
self.selection_toolbar = selection_toolbar
self.header_bar = header_bar
self.album = album
real_artist = item.get_string(Grl.METADATA_KEY_ARTIST)\
or item.get_author()\
or _("Unknown Artist")
self.ui.get_object('cover').set_from_pixbuf(self.loadingIcon)
ALBUM_ART_CACHE.lookup(item, 256, 256, self._on_look_up, None, real_artist, album)
self.duration = 0
self._create_model()
GLib.idle_add(grilo.populate_album_songs, item, self.add_item)
header_bar._select_button.connect(
'toggled', self._on_header_select_button_toggled)
header_bar._cancel_button.connect(
'clicked', self._on_header_cancel_button_clicked)
self.view.connect('view-selection-changed',
self._on_view_selection_changed)
self.view.set_model(self.model)
escaped_artist = GLib.markup_escape_text(artist)
escaped_album = GLib.markup_escape_text(album)
self.ui.get_object('artist_label').set_markup(escaped_artist)
self.ui.get_object('title_label').set_markup(escaped_album)
if (item.get_creation_date()):
self.ui.get_object('released_label_info').set_text(
str(item.get_creation_date().get_year()))
else:
self.ui.get_object('released_label_info').set_text('----')
self.player.connect('playlist-item-changed', self.update_model)
@log
def _on_view_selection_changed(self, widget):
items = self.view.get_selection()
self.selection_toolbar\
._add_to_playlist_button.set_sensitive(len(items) > 0)
if len(items) > 0:
self.header_bar._selection_menu_label.set_text(
ngettext("Selected %d item", "Selected %d items", len(items)) % len(items))
else:
self.header_bar._selection_menu_label.set_text(_("Click on items to select them"))
@log
def _on_header_cancel_button_clicked(self, button):
self.view.set_selection_mode(False)
self.header_bar.set_selection_mode(False)
self.header_bar.header_bar.title = self.album
@log
def _on_header_select_button_toggled(self, button):
if button.get_active():
self.view.set_selection_mode(True)
self.header_bar.set_selection_mode(True)
self.player.actionbar.set_visible(False)
self.selection_toolbar.actionbar.set_visible(True)
self.selection_toolbar._add_to_playlist_button.set_sensitive(False)
self.header_bar.header_bar.set_custom_title(self.header_bar._selection_menu_button)
else:
self.view.set_selection_mode(False)
self.header_bar.set_selection_mode(False)
self.header_bar.title = self.album
self.selection_toolbar.actionbar.set_visible(False)
if(self.player.get_playback_status() != 2):
self.player.actionbar.set_visible(True)
@log
def add_item(self, source, prefs, track, remaining, data=None):
if track:
self.tracks.append(track)
self.duration = self.duration + track.get_duration()
_iter = self.model.append()
escapedTitle = AlbumArtCache.get_media_title(track, True)
self.model.set(_iter,
[0, 1, 2, 3, 4, 5, 9],
[escapedTitle,
self.player.seconds_to_string(
track.get_duration()),
'', '', None, track, bool(track.get_lyrics())])
self.ui.get_object('running_length_label_info').set_text(
_("%d min") % (int(self.duration / 60) + 1))
@log
def _on_look_up(self, pixbuf, path, data=None):
_iter = self.iterToClean
if not pixbuf:
pixbuf = self.noArtworkIcon
self.ui.get_object('cover').set_from_pixbuf(pixbuf)
if _iter:
self.model.set(_iter, [4], [pixbuf])
@log
def update_model(self, player, playlist, currentIter):
# self is not our playlist, return
if (playlist != self.model):
return False
currentSong = playlist.get_value(currentIter, 5)
song_passed = False
_iter = playlist.get_iter_first()
self.duration = 0
while _iter:
song = playlist.get_value(_iter, 5)
self.duration += song.get_duration()
escapedTitle = AlbumArtCache.get_media_title(song, True)
if (song == currentSong):
title = '<b>%s</b>' % escapedTitle
song_passed = True
elif (song_passed):
title = '<span>%s</span>' % escapedTitle
else:
title = '<span color=\'grey\'>%s</span>' % escapedTitle
playlist.set_value(_iter, 0, title)
_iter = playlist.iter_next(_iter)
self.ui.get_object('running_length_label_info').set_text(
_("%d min") % (int(self.duration / 60) + 1))
return False
# @log
# def _on_star_toggled(self, widget, path):
# try:
# _iter = self.model.get_iter(path)
# except TypeError:
# return
# new_value = not self.model.get_value(_iter, 10)
# self.model.set_value(_iter, 10, new_value)
# song_item = self.model.get_value(_iter, 5) # er, will this definitely return MediaAudio obj.?
# grilo.toggle_favorite(song_item) # toggle favorite status in database
# playlists.update_static_playlist(StaticPlaylists.Favorites)
# # Use this flag to ignore the upcoming _on_item_activated call
# self.star_renderer_click = True
class ArtistAlbums(Gtk.Box):
def __repr__(self):
return '<ArtistAlbums>'
@log
def __init__(self, artist, albums, player,
header_bar, selection_toolbar, window, selectionModeAllowed=False):
Gtk.Box.__init__(self, orientation=Gtk.Orientation.VERTICAL)
self.player = player
self.artist = artist
self.albums = albums
self.window = window
self.selectionMode = False
self.selectionModeAllowed = selectionModeAllowed
self.selection_toolbar = selection_toolbar
self.header_bar = header_bar
self.ui = Gtk.Builder()
self.ui.add_from_resource('/org/gnome/Music/ArtistAlbumsWidget.ui')
self.set_border_width(0)
self.ui.get_object('artist').set_label(self.artist)
self.widgets = []
self.model = Gtk.ListStore(GObject.TYPE_STRING, # title
GObject.TYPE_STRING,
Gtk.Image,
GObject.TYPE_BOOLEAN, # icon shown
GObject.TYPE_STRING, # icon
GObject.TYPE_OBJECT, # song object
GObject.TYPE_BOOLEAN,
GObject.TYPE_INT
)
self.row_changed_source_id = None
self._hbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self._albumBox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL,
spacing=48)
self._scrolledWindow = Gtk.ScrolledWindow()
self._scrolledWindow.set_policy(
Gtk.PolicyType.NEVER,
Gtk.PolicyType.AUTOMATIC)
self._scrolledWindow.add(self._hbox)
self._hbox.pack_start(self.ui.get_object('ArtistAlbumsWidget'),
False, False, 0)
self._hbox.pack_start(self._albumBox, False, False, 16)
self._coverSizeGroup = Gtk.SizeGroup.new(Gtk.SizeGroupMode.HORIZONTAL)
self._songsGridSizeGroup = Gtk.SizeGroup.new(Gtk.SizeGroupMode.HORIZONTAL)
self.pack_start(self._scrolledWindow, True, True, 0)
self.hide()
self.window._init_loading_notification()
for album in albums:
is_last_album = False
if album == albums[-1]:
is_last_album = True
self.add_album(album, is_last_album)
self.player.connect('playlist-item-changed', self.update_model)
def _on_last_album_displayed(self, data=None):
self.window.notification.dismiss()
self.show_all()
@log
def add_album(self, album, is_last_album=False):
self.window.notification.set_timeout(0)
widget = ArtistAlbumWidget(
self.artist, album, self.player, self.model,
self.header_bar, self.selectionModeAllowed
)
self._coverSizeGroup.add_widget(widget.cover)
self._songsGridSizeGroup.add_widget(widget.songsGrid)
self._albumBox.pack_start(widget, False, False, 0)
self.widgets.append(widget)
if is_last_album:
widget.connect('tracks-loaded', self._on_last_album_displayed)
@log
def update_model(self, player, playlist, currentIter):
# this is not our playlist, return
if playlist != self.model:
# TODO, only clean once, but that can wait util we have clean
# the code a bit, and until the playlist refactoring.
# the overhead is acceptable for now
self.clean_model()
return False
currentSong = playlist.get_value(currentIter, 5)
song_passed = False
itr = playlist.get_iter_first()
while itr:
song = playlist.get_value(itr, 5)
song_widget = song.song_widget
if not song_widget.can_be_played:
itr = playlist.iter_next(itr)
continue
escapedTitle = AlbumArtCache.get_media_title(song, True)
if (song == currentSong):
song_widget.now_playing_sign.show()
song_widget.title.set_markup('<b>%s</b>' % escapedTitle)
song_passed = True
elif (song_passed):
song_widget.now_playing_sign.hide()
song_widget.title.set_markup('<span>%s</span>' % escapedTitle)
else:
song_widget.now_playing_sign.hide()
song_widget.title\
.set_markup('<span color=\'grey\'>%s</span>' % escapedTitle)
itr = playlist.iter_next(itr)
return False
@log
def clean_model(self):
itr = self.model.get_iter_first()
while itr:
song = self.model.get_value(itr, 5)
song_widget = song.song_widget
escapedTitle = AlbumArtCache.get_media_title(song, True)
if song_widget.can_be_played:
song_widget.now_playing_sign.hide()
song_widget.title.set_markup('<span>%s</span>' % escapedTitle)
itr = self.model.iter_next(itr)
return False
@log
def set_selection_mode(self, selectionMode):
if self.selectionMode == selectionMode:
return
self.selectionMode = selectionMode
try:
if self.row_changed_source_id:
self.model.disconnect(self.row_changed_source_id)
self.row_changed_source_id = self.model.connect('row-changed', self._model_row_changed)
except Exception as e:
logger.warning("Exception while tracking row-changed: %s", e)
for widget in self.widgets:
widget.set_selection_mode(selectionMode)
@log
def _model_row_changed(self, model, path, _iter):
if not self.selectionMode:
return
selected_items = 0
for row in model:
if row[6]:
selected_items += 1
self.selection_toolbar\
._add_to_playlist_button.set_sensitive(selected_items > 0)
if selected_items > 0:
self.header_bar._selection_menu_label.set_text(
ngettext("Selected %d item", "Selected %d items", selected_items) % selected_items)
else:
self.header_bar._selection_menu_label.set_text(_("Click on items to select them"))
class AllArtistsAlbums(ArtistAlbums):
def __repr__(self):
return '<AllArtistsAlbums>'
@log
def __init__(self, player, header_bar, selection_toolbar, selectionModeAllowed=False):
ArtistAlbums.__init__(self, _("All Artists"), [], player,
header_bar, selection_toolbar, selectionModeAllowed)
self._offset = 0
self._populate()
@log
def _populate(self, data=None):
if grilo.tracker:
GLib.idle_add(grilo.populate_albums,
self._offset, self.add_item)
@log
def add_item(self, source, param, item, remaining=0, data=None):
if remaining == 0:
self._on_last_album_displayed()
if item:
self._offset += 1
self.add_album(item)
class ArtistAlbumWidget(Gtk.Box):
__gsignals__ = {
'tracks-loaded': (GObject.SignalFlags.RUN_FIRST, None, ()),
}
loadingIcon = AlbumArtCache.get_default().get_default_icon(128, 128, True)
noArtworkIcon = ALBUM_ART_CACHE.get_default_icon(128, 128, False)
def __repr__(self):
return '<ArtistAlbumWidget>'
@log
def __init__(self, artist, album, player, model, header_bar, selectionModeAllowed):
Gtk.Box.__init__(self, orientation=Gtk.Orientation.HORIZONTAL)
self.player = player
self.album = album
self.artist = artist
self.model = model
self.model.connect('row-changed', self._model_row_changed)
self.header_bar = header_bar
self.selectionMode = False
self.selectionModeAllowed = selectionModeAllowed
self.songs = []
self.ui = Gtk.Builder()
self.ui.add_from_resource('/org/gnome/Music/ArtistAlbumWidget.ui')
GLib.idle_add(self._update_album_art)
self.cover = self.ui.get_object('cover')
self.cover.set_from_pixbuf(self.loadingIcon)
self.songsGrid = self.ui.get_object('grid1')
self.ui.get_object('title').set_label(album.get_title())
if album.get_creation_date():
self.ui.get_object('year').set_markup(
'<span color=\'grey\'>(%s)</span>' %
str(album.get_creation_date().get_year())
)
self.tracks = []
grilo.populate_album_songs(album, self.add_item)
self.pack_start(self.ui.get_object('ArtistAlbumWidget'), True, True, 0)
@log
def add_item(self, source, prefs, track, remaining, data=None):
if remaining == 0:
self.songsGrid.show_all()
self.emit("tracks-loaded")
if track:
self.tracks.append(track)
else:
for i, track in enumerate(self.tracks):
ui = Gtk.Builder()
ui.add_from_resource('/org/gnome/Music/TrackWidget.ui')
song_widget = ui.get_object('eventbox1')
self.songs.append(song_widget)
ui.get_object('num')\
.set_markup('<span color=\'grey\'>%d</span>'
% len(self.songs))
title = AlbumArtCache.get_media_title(track)
ui.get_object('title').set_text(title)
ui.get_object('title').set_alignment(0.0, 0.5)
ui.get_object('title').set_max_width_chars(MAX_TITLE_WIDTH)
self.songsGrid.attach(
song_widget,
int(i / (len(self.tracks) / 2)),
int(i % (len(self.tracks) / 2)), 1, 1
)
track.song_widget = song_widget
itr = self.model.append(None)
song_widget._iter = itr
song_widget.model = self.model
song_widget.title = ui.get_object('title')
song_widget.checkButton = ui.get_object('select')
song_widget.checkButton.set_visible(self.selectionMode)
song_widget.checkButton.connect(
'toggled', self._check_button_toggled, song_widget
)
self.model.set(itr,
[0, 1, 2, 3, 5],
[title, self.artist, self.cover, False, track])
song_widget.now_playing_sign = ui.get_object('image1')
song_widget.now_playing_sign.set_from_icon_name(
NOW_PLAYING_ICON_NAME,
Gtk.IconSize.SMALL_TOOLBAR)
song_widget.now_playing_sign.set_no_show_all('True')
song_widget.now_playing_sign.set_alignment(1, 0.6)
song_widget.can_be_played = True
song_widget.connect('button-release-event',
self.track_selected)
@log
def _update_album_art(self):
real_artist = self.album.get_string(Grl.METADATA_KEY_ARTIST)\
or self.album.get_author()\
or _("Unknown Artist")
ALBUM_ART_CACHE.lookup(
self.album, 128, 128, self._get_album_cover, None,
real_artist, self.album.get_title())
@log
def _get_album_cover(self, pixbuf, path, data=None):
if not pixbuf:
pixbuf = self.noArtworkIcon
self.cover.set_from_pixbuf(pixbuf)
@log
def track_selected(self, widget, event):
if not widget.can_be_played:
return
if not self.selectionMode and \
(event.button == Gdk.BUTTON_SECONDARY or
(event.button == 1 and event.state & Gdk.ModifierType.CONTROL_MASK)):
if self.selectionModeAllowed:
self.header_bar._select_button.set_active(True)
else:
return
if self.selectionMode:
self.model[widget._iter][6] = not self.model[widget._iter][6]
return
self.player.stop()
self.player.set_playlist('Artist', self.artist,
widget.model, widget._iter, 5, 6)
self.player.set_playing(True)
@log
def set_selection_mode(self, selectionMode):
if self.selectionMode == selectionMode:
return
self.selectionMode = selectionMode
for songWidget in self.songs:
songWidget.checkButton.set_visible(selectionMode)
if not selectionMode:
songWidget.model[songWidget._iter][6] = False
@log
def _check_button_toggled(self, button, songWidget):
if songWidget.model[songWidget._iter][6] != button.get_active():
songWidget.model[songWidget._iter][6] = button.get_active()
@log
def _model_row_changed(self, model, path, _iter):
if not self.selectionMode:
return
if not model[_iter][5]:
return
songWidget = model[_iter][5].song_widget
selected = model[_iter][6]
if model[_iter][11] == DiscoveryStatus.FAILED:
songWidget.now_playing_sign.set_from_icon_name(
ERROR_ICON_NAME,
Gtk.IconSize.SMALL_TOOLBAR)
songWidget.now_playing_sign.show()
songWidget.can_be_played = False
if selected != songWidget.checkButton.get_active():
songWidget.checkButton.set_active(selected)
class PlaylistDialog():
def __repr__(self):
return '<PlaylistDialog>'
@log
def __init__(self, parent):
self.ui = Gtk.Builder()
self.ui.add_from_resource('/org/gnome/Music/PlaylistDialog.ui')
self.dialog_box = self.ui.get_object('dialog1')
self.dialog_box.set_transient_for(parent)
self.view = self.ui.get_object('treeview1')
self.view.set_activate_on_single_click(False)
self.selection = self.ui.get_object('treeview-selection1')
self.selection.connect('changed', self._on_selection_changed)
self._add_list_renderers()
self.view.connect('row-activated', self._on_item_activated)
self.model = self.ui.get_object('liststore1')
self.populate()
self.title_bar = self.ui.get_object('headerbar1')
self.dialog_box.set_titlebar(self.title_bar)
self._cancel_button = self.ui.get_object('cancel-button')
self._select_button = self.ui.get_object('select-button')
self._select_button.set_sensitive(False)
self._cancel_button.connect('clicked', self._on_cancel_button_clicked)
self._select_button.connect('clicked', self._on_selection)
self.playlist = Playlists.get_default()
self.playlist.connect('playlist-created', self._on_playlist_created)
@log
def get_selected(self):
_iter = self.selection.get_selected()[1]
if not _iter or self.model[_iter][1]:
return None
return self.model[_iter][2]
@log
def _add_list_renderers(self):
cols = Gtk.TreeViewColumn()
type_renderer = Gd.StyledTextRenderer(
xpad=8,
ypad=8,
ellipsize=Pango.EllipsizeMode.END,
xalign=0.0
)
type_renderer.connect('editing-started', self._on_editing_started, None)
cols.pack_start(type_renderer, True)
cols.add_attribute(type_renderer, "text", 0)
cols.add_attribute(type_renderer, "editable", 1)
cols.set_cell_data_func(type_renderer, self._on_list_text_render)
self.view.append_column(cols)
@log
def populate(self):
self.add_playlist_iter = self.model.append()
self.model.set(self.add_playlist_iter, [0, 1], [_("New Playlist"), True])
if grilo.tracker:
GLib.idle_add(grilo.populate_playlists, 0, self._add_item)
@log
def _add_item(self, source, param, item, remaining=0, data=None):
if item:
self._add_item_to_model(item)
@log
def _add_item_to_model(self, item):
new_iter = self.model.insert_before(self.add_playlist_iter)
self.model.set(
new_iter,
[0, 1, 2],
[AlbumArtCache.get_media_title(item), False, item]
)
return new_iter
@log
def _on_list_text_render(self, col, cell, model, _iter, data):
editable = model.get_value(_iter, 1)
if editable:
cell.add_class("dim-label")
else:
cell.remove_class("dim-label")
@log
def _on_selection(self, select_button):
self.dialog_box.response(Gtk.ResponseType.ACCEPT)
@log
def _on_cancel_button_clicked(self, cancel_button):
self.dialog_box.response(Gtk.ResponseType.REJECT)
@log
def _on_item_activated(self, view, path, column):
_iter = self.model.get_iter(path)
if self.model.get_value(_iter, 1):
self.view.set_cursor(path, column, True)
else:
self.dialog_box.response(Gtk.ResponseType.ACCEPT)
@log
def _on_selection_changed(self, selection):
model, _iter = self.selection.get_selected()
if _iter == None or self.model.get_value(_iter, 1):
self._select_button.set_sensitive(False)
else:
self._select_button.set_sensitive(True)
@log
def _on_editing_started(self, renderer, editable, path, data=None):
editable.set_text('')
editable.connect('editing-done', self._on_editing_done, None)
@log
def _on_editing_done(self, editable, data=None):
if editable.get_text() != '':
self.playlist.create_playlist(editable.get_text())
@log
def _on_playlist_created(self, playlists, item):
new_iter = self._add_item_to_model(item)
if self.view.get_columns():
self.view.set_cursor(self.model.get_path(new_iter),
self.view.get_columns()[0], False)
self.view.row_activated(self.model.get_path(new_iter),
self.view.get_columns()[0])
class CellRendererClickablePixbuf(Gtk.CellRendererPixbuf):
__gsignals__ = {'clicked': (GObject.SignalFlags.RUN_LAST, GObject.TYPE_NONE,
(GObject.TYPE_STRING,))}
__gproperties__ = {
'show_star': (GObject.TYPE_INT, 'Show star', 'show star',0 ,2 ,1 , GObject.ParamFlags.READWRITE)}
starIcon = 'starred-symbolic'
nonStarIcon = 'non-starred-symbolic'
def __repr__(self):
return '<CellRendererClickablePixbuf>'
def __init__(self, view, hidden=False, *args, **kwargs):
Gtk.CellRendererPixbuf.__init__(self, *args, **kwargs)
self.set_property('mode', Gtk.CellRendererMode.ACTIVATABLE)
self.set_property('xpad', 32)
self.set_property('icon_name', '')
self.view = view
self.hidden = hidden
self.show_star = 0
def do_activate(self, event, widget, path, background_area, cell_area, flags):
self.show_star = 0
self.emit('clicked', path)
def do_get_property(self, property):
if property.name == 'show-star':
return self.show_star
def do_set_property(self, property, value):
if property.name == 'show-star':
if self.show_star == 1:
self.set_property('icon_name', self.starIcon)
elif self.show_star == 0:
self.set_property('icon_name', self.nonStarIcon)
else:
self.set_property('icon_name', '')
self.show_star = value
| gpl-2.0 |
Richard-Mathie/cassandra_benchmark | vendor/github.com/datastax/python-driver/tests/integration/cqlengine/columns/test_value_io.py | 6 | 7015 | # Copyright 2013-2016 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from datetime import datetime, timedelta, time
from decimal import Decimal
from uuid import uuid1, uuid4, UUID
import six
from cassandra.cqlengine import columns
from cassandra.cqlengine.management import sync_table
from cassandra.cqlengine.management import drop_table
from cassandra.cqlengine.models import Model
from cassandra.util import Date, Time
from tests.integration import PROTOCOL_VERSION
from tests.integration.cqlengine.base import BaseCassEngTestCase
class BaseColumnIOTest(BaseCassEngTestCase):
"""
Tests that values are come out of cassandra in the format we expect
To test a column type, subclass this test, define the column, and the primary key
and data values you want to test
"""
# The generated test model is assigned here
_generated_model = None
# the column we want to test
column = None
# the values we want to test against, you can
# use a single value, or multiple comma separated values
pkey_val = None
data_val = None
@classmethod
def setUpClass(cls):
super(BaseColumnIOTest, cls).setUpClass()
# if the test column hasn't been defined, bail out
if not cls.column:
return
# create a table with the given column
class IOTestModel(Model):
pkey = cls.column(primary_key=True)
data = cls.column()
cls._generated_model = IOTestModel
sync_table(cls._generated_model)
# tupleify the tested values
if not isinstance(cls.pkey_val, tuple):
cls.pkey_val = cls.pkey_val,
if not isinstance(cls.data_val, tuple):
cls.data_val = cls.data_val,
@classmethod
def tearDownClass(cls):
super(BaseColumnIOTest, cls).tearDownClass()
if not cls.column:
return
drop_table(cls._generated_model)
def comparator_converter(self, val):
""" If you want to convert the original value used to compare the model vales """
return val
def test_column_io(self):
""" Tests the given models class creates and retrieves values as expected """
if not self.column:
return
for pkey, data in zip(self.pkey_val, self.data_val):
# create
m1 = self._generated_model.create(pkey=pkey, data=data)
# get
m2 = self._generated_model.get(pkey=pkey)
assert m1.pkey == m2.pkey == self.comparator_converter(pkey), self.column
assert m1.data == m2.data == self.comparator_converter(data), self.column
# delete
self._generated_model.filter(pkey=pkey).delete()
class TestBlobIO(BaseColumnIOTest):
column = columns.Blob
pkey_val = six.b('blake'), uuid4().bytes
data_val = six.b('eggleston'), uuid4().bytes
class TestBlobIO2(BaseColumnIOTest):
column = columns.Blob
pkey_val = bytearray(six.b('blake')), uuid4().bytes
data_val = bytearray(six.b('eggleston')), uuid4().bytes
class TestTextIO(BaseColumnIOTest):
column = columns.Text
pkey_val = 'bacon'
data_val = 'monkey'
class TestNonBinaryTextIO(BaseColumnIOTest):
column = columns.Text
pkey_val = 'bacon'
data_val = '0xmonkey'
class TestInteger(BaseColumnIOTest):
column = columns.Integer
pkey_val = 5
data_val = 6
class TestBigInt(BaseColumnIOTest):
column = columns.BigInt
pkey_val = 6
data_val = pow(2, 63) - 1
class TestDateTime(BaseColumnIOTest):
column = columns.DateTime
now = datetime(*datetime.now().timetuple()[:6])
pkey_val = now
data_val = now + timedelta(days=1)
class TestUUID(BaseColumnIOTest):
column = columns.UUID
pkey_val = str(uuid4()), uuid4()
data_val = str(uuid4()), uuid4()
def comparator_converter(self, val):
return val if isinstance(val, UUID) else UUID(val)
class TestTimeUUID(BaseColumnIOTest):
column = columns.TimeUUID
pkey_val = str(uuid1()), uuid1()
data_val = str(uuid1()), uuid1()
def comparator_converter(self, val):
return val if isinstance(val, UUID) else UUID(val)
class TestFloatIO(BaseColumnIOTest):
column = columns.Float
pkey_val = 4.75
data_val = -1.5
class TestDoubleIO(BaseColumnIOTest):
column = columns.Double
pkey_val = 3.14
data_val = -1982.11
class TestDecimalIO(BaseColumnIOTest):
column = columns.Decimal
pkey_val = Decimal('1.35'), 5, '2.4'
data_val = Decimal('0.005'), 3.5, '8'
def comparator_converter(self, val):
return Decimal(repr(val) if isinstance(val, float) else val)
class ProtocolV4Test(BaseColumnIOTest):
@classmethod
def setUpClass(cls):
if PROTOCOL_VERSION >= 4:
super(ProtocolV4Test, cls).setUpClass()
@classmethod
def tearDownClass(cls):
if PROTOCOL_VERSION >= 4:
super(ProtocolV4Test, cls).tearDownClass()
class TestDate(ProtocolV4Test):
def setUp(self):
if PROTOCOL_VERSION < 4:
raise unittest.SkipTest("Protocol v4 datatypes require native protocol 4+, currently using: {0}".format(PROTOCOL_VERSION))
super(TestDate, self).setUp()
column = columns.Date
now = Date(datetime.now().date())
pkey_val = now
data_val = Date(now.days_from_epoch + 1)
class TestTime(ProtocolV4Test):
def setUp(self):
if PROTOCOL_VERSION < 4:
raise unittest.SkipTest("Protocol v4 datatypes require native protocol 4+, currently using: {0}".format(PROTOCOL_VERSION))
super(TestTime, self).setUp()
column = columns.Time
pkey_val = Time(time(2, 12, 7, 48))
data_val = Time(time(16, 47, 25, 7))
class TestSmallInt(ProtocolV4Test):
def setUp(self):
if PROTOCOL_VERSION < 4:
raise unittest.SkipTest("Protocol v4 datatypes require native protocol 4+, currently using: {0}".format(PROTOCOL_VERSION))
super(TestSmallInt, self).setUp()
column = columns.SmallInt
pkey_val = 16768
data_val = 32523
class TestTinyInt(ProtocolV4Test):
def setUp(self):
if PROTOCOL_VERSION < 4:
raise unittest.SkipTest("Protocol v4 datatypes require native protocol 4+, currently using: {0}".format(PROTOCOL_VERSION))
super(TestTinyInt, self).setUp()
column = columns.TinyInt
pkey_val = 1
data_val = 123
| apache-2.0 |
jerbob92/CouchPotatoServer | libs/guessit/transfo/guess_properties.py | 150 | 1273 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2012 Nicolas Wack <[email protected]>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit.transfo import SingleNodeGuesser
from guessit.patterns import find_properties
import logging
log = logging.getLogger(__name__)
def guess_properties(string):
try:
prop, value, pos, end = find_properties(string)[0]
return { prop: value }, (pos, end)
except IndexError:
return None, None
def process(mtree):
SingleNodeGuesser(guess_properties, 1.0, log).process(mtree)
| gpl-3.0 |
spaceone/pyjs | examples/infohierarchy/public/services/jsonrpc/http.py | 24 | 1236 | """
Copyright (c) 2006 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
from jsonrpc import SimpleServiceHandler
import urllib2
class HTTPClientConnectionHandler(SimpleServiceHandler):
def __init__(self, url, service,messageDelimiter=""):
self.url = url
SimpleServiceHandler.__init__(self, service,messageDelimiter=messageDelimiter)
def send(self, data):
req = urllib2.Request(self.url, data)
resp = urllib2.urlopen(req)
self.handlePartialData(resp.read())
| apache-2.0 |
msultan/mdtraj | mdtraj/utils/test.py | 9 | 6569 | ##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2013 Stanford University and the Authors
#
# Authors: Robert McGibbon
# Contributors:
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
"""Tests for some of the utilities
"""
##############################################################################
# imports
##############################################################################
from __future__ import print_function, division
import numpy as np
from mdtraj.utils import ensure_type
from mdtraj.utils.validation import TypeCastPerformanceWarning
from mdtraj.utils.unit import in_units_of, _str_to_unit
from mdtraj.utils import (import_, lengths_and_angles_to_box_vectors,
box_vectors_to_lengths_and_angles)
from mdtraj.testing import raises, eq
import warnings
from itertools import combinations
##############################################################################
# globals
##############################################################################
a = np.ones(10, dtype=np.float32)
b = np.ones((10,10), dtype=np.float64, order='F')
random = np.random.RandomState(0)
##############################################################################
# tests
##############################################################################
def test_unitcell_0():
result = lengths_and_angles_to_box_vectors(1, 1, 1, 90.0, 90.0, 90.0)
expected = (np.array([1, 0, 0]), np.array([ 0., 1., 0.]), np.array([ 0., 0., 1.]))
for (a, b) in zip(result, expected):
np.testing.assert_array_almost_equal(a, b)
def test_unitcell_1():
# try round-tripping some random lengths and angles through
# lengths_and_angles_to_box_vectors and box_vectors_to_lengths_and_angles,
# and make sure we get back to where we started
for _ in range(10):
arg = np.hstack((random.rand(3), random.uniform(70, 110, size=3)))
vectors = lengths_and_angles_to_box_vectors(*arg)
out = box_vectors_to_lengths_and_angles(*vectors)
np.testing.assert_array_almost_equal(arg, out)
def test_ensure_type_1():
ensure_type(a, np.float32, 1, '', length=10)
def test_ensure_type_2():
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
val = ensure_type(a, np.float64, 1, '', length=10)
assert val.dtype == np.float64
assert a.dtype == np.float32 # a should not be changed
assert len(w) == 1
assert issubclass(w[-1].category, TypeCastPerformanceWarning)
def test_ensure_type_25():
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
val = ensure_type(a, np.float64, 1, '', length=10, warn_on_cast=False)
assert val.dtype == np.float64
assert a.dtype == np.float32 # a should not be changed
assert len(w) == 0 # no warning since we set warn_on_cast to False
@raises(ValueError)
def test_ensure_type_3():
ensure_type(a, np.float32, 1, '', length=11)
def test_ensure_type_4():
ensure_type(None, np.float64, 1, '', length=11, can_be_none=True)
@raises(ValueError)
def test_ensure_type_5():
ensure_type(a, np.float32, 1, '', length=11, can_be_none=True)
def test_ensure_type_6():
val = ensure_type(b, np.float64, 2, '', shape=(10,10))
assert val.flags.c_contiguous is True
def test_ensure_type_7():
c = ensure_type(a, np.float32, ndim=2, name='', add_newaxis_on_deficient_ndim=True)
assert c.shape == (1, len(a))
def test_ensure_type_8():
c = ensure_type(np.zeros((5,10)), np.float32, ndim=2, name='', shape=(None, 10))
assert c.shape == (5, 10)
@raises(ValueError)
def test_ensure_type_9():
c = ensure_type(np.zeros((5,11)), np.float32, ndim=2, name='', shape=(None, 10))
@raises(ValueError)
def test_ensure_type_10():
c = ensure_type([0,1], np.float32, ndim=2, name='')
def test_ensure_type_11():
c = ensure_type(0, np.float32, ndim=1, name='', add_newaxis_on_deficient_ndim=True)
assert c.shape == (1,)
@raises(TypeError)
def test_ensure_type_12():
ensure_type(np.zeros((2,2)), np.float32, ndim=3)
@raises(ValueError)
def test_ensure_type_13():
ensure_type(np.zeros((2,2)), np.float32, ndim=2, name='', shape=(None, None, None))
def test_ensure_type_14():
# test that the generators work
value = ensure_type(combinations(range(10), 2), int, ndim=2, name='')
assert isinstance(value, np.ndarray)
ref = np.array(list(combinations(range(10), 2)))
eq(value, ref)
def test_ensure_type_15():
# test that lists
x = [1, 2, 3]
value = ensure_type(x, int, ndim=1, name='')
ref = np.array(x)
eq(value, ref)
@raises(ImportError)
def test_delay_import_fail_1():
import_('sdfsdfsfsfdsdf')
def test_delay_import():
import_('scipy.sparse')
def test_unit_0():
a = np.array([1.0])
b = in_units_of(a, 'nanometers', 'angstroms', inplace=False)
c = in_units_of(a, 'angstroms', 'nanometers', inplace=False)
eq(b, np.array([10.0]))
eq(c, np.array([0.1]))
assert a.ctypes.data != b.ctypes.data
assert a.ctypes.data != c.ctypes.data
def test_unit_1():
a = np.array([1.0])
b = in_units_of(a, 'nanometers', 'angstroms', inplace=True)
eq(a, np.array([10.0]))
eq(b, np.array([10.0]))
# a and b point to the same memory
assert a.ctypes.data == b.ctypes.data
def test_unit_2():
a = np.array([1.0])
a.flags['WRITEABLE'] = False
b = in_units_of(a, 'nanometers', 'angstroms', inplace=True)
eq(b, np.array([10.0]))
# a and b do not point to the same memory, since a isn't writeable
assert a.ctypes.data != b.ctypes.data
def test_unit_3():
eq(1000000.0, in_units_of(1, 'meter**2/second', 'nanometers**2/picosecond'))
| lgpl-2.1 |
jmathai/elodie | elodie/tests/media/media_test.py | 1 | 5268 | # Project imports
import os
import sys
import hashlib
import random
import re
import shutil
import string
import tempfile
import time
sys.path.insert(0, os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))))))
sys.path.insert(0, os.path.abspath(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))))
import helper
from elodie.media.audio import Audio
from elodie.media.media import Media
from elodie.media.photo import Photo
from elodie.media.video import Video
os.environ['TZ'] = 'GMT'
setup_module = helper.setup_module
teardown_module = helper.teardown_module
def test_get_file_path():
media = Media(helper.get_file('plain.jpg'))
path = media.get_file_path()
assert 'plain.jpg' in path, path
def test_get_class_by_file_photo():
media = Media.get_class_by_file(helper.get_file('plain.jpg'), [Photo, Video])
assert media.__name__ == 'Photo'
def test_get_class_by_file_video():
media = Media.get_class_by_file(helper.get_file('video.mov'), [Photo, Video])
assert media.__name__ == 'Video'
def test_get_class_by_file_unsupported():
media = Media.get_class_by_file(helper.get_file('text.txt'), [Photo, Video])
assert media is None
def test_get_class_by_file_ds_store():
media = Media.get_class_by_file(helper.get_file('.DS_Store'),
[Photo, Video, Audio])
assert media is None
def test_get_class_by_file_invalid_type():
media = Media.get_class_by_file(None,
[Photo, Video, Audio])
assert media is None
media = Media.get_class_by_file(False,
[Photo, Video, Audio])
assert media is None
media = Media.get_class_by_file(True,
[Photo, Video, Audio])
assert media is None
def test_get_original_name():
temporary_folder, folder = helper.create_working_folder()
origin = '%s/%s' % (folder, 'with-original-name.jpg')
file = helper.get_file('with-original-name.jpg')
shutil.copyfile(file, origin)
media = Media.get_class_by_file(origin, [Photo])
original_name = media.get_original_name()
assert original_name == 'originalfilename.jpg', original_name
def test_get_original_name_invalid_file():
temporary_folder, folder = helper.create_working_folder()
origin = '%s/%s' % (folder, 'invalid.jpg')
file = helper.get_file('invalid.jpg')
shutil.copyfile(file, origin)
media = Media.get_class_by_file(origin, [Photo])
original_name = media.get_original_name()
assert original_name is None, original_name
def test_set_original_name_when_exists():
temporary_folder, folder = helper.create_working_folder()
origin = '%s/%s' % (folder, 'with-original-name.jpg')
file = helper.get_file('with-original-name.jpg')
shutil.copyfile(file, origin)
media = Media.get_class_by_file(origin, [Photo])
result = media.set_original_name()
assert result is None, result
def test_set_original_name_when_does_not_exist():
temporary_folder, folder = helper.create_working_folder()
origin = '%s/%s' % (folder, 'plain.jpg')
file = helper.get_file('plain.jpg')
shutil.copyfile(file, origin)
media = Media.get_class_by_file(origin, [Photo])
metadata_before = media.get_metadata()
result = media.set_original_name()
metadata_after = media.get_metadata()
assert metadata_before['original_name'] is None, metadata_before
assert metadata_after['original_name'] == 'plain.jpg', metadata_after
assert result is True, result
def test_set_original_name_with_arg():
temporary_folder, folder = helper.create_working_folder()
origin = '%s/%s' % (folder, 'plain.jpg')
file = helper.get_file('plain.jpg')
shutil.copyfile(file, origin)
new_name = helper.random_string(15)
media = Media.get_class_by_file(origin, [Photo])
metadata_before = media.get_metadata()
result = media.set_original_name(new_name)
metadata_after = media.get_metadata()
assert metadata_before['original_name'] is None, metadata_before
assert metadata_after['original_name'] == new_name, metadata_after
assert result is True, result
def test_set_original_name():
files = ['plain.jpg', 'audio.m4a', 'photo.nef', 'video.mov']
for file in files:
ext = os.path.splitext(file)[1]
temporary_folder, folder = helper.create_working_folder()
random_file_name = '%s%s' % (helper.random_string(10), ext)
origin = '%s/%s' % (folder, random_file_name)
file_path = helper.get_file(file)
if file_path is False:
file_path = helper.download_file(file, folder)
shutil.copyfile(file_path, origin)
media = Media.get_class_by_file(origin, [Audio, Media, Photo, Video])
metadata = media.get_metadata()
media.set_original_name()
metadata_updated = media.get_metadata()
shutil.rmtree(folder)
assert metadata['original_name'] is None, metadata['original_name']
assert metadata_updated['original_name'] == random_file_name, metadata_updated['original_name']
def is_valid():
media = Media()
assert not media.is_valid()
| apache-2.0 |
valkjsaaa/sl4a | python/src/Lib/encodings/mac_centeuro.py | 593 | 14358 | """ Python Character Mapping Codec mac_centeuro generated from 'MAPPINGS/VENDORS/APPLE/CENTEURO.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-centeuro',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\u0100' # 0x81 -> LATIN CAPITAL LETTER A WITH MACRON
u'\u0101' # 0x82 -> LATIN SMALL LETTER A WITH MACRON
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\u0104' # 0x84 -> LATIN CAPITAL LETTER A WITH OGONEK
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
u'\u0105' # 0x88 -> LATIN SMALL LETTER A WITH OGONEK
u'\u010c' # 0x89 -> LATIN CAPITAL LETTER C WITH CARON
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\u010d' # 0x8B -> LATIN SMALL LETTER C WITH CARON
u'\u0106' # 0x8C -> LATIN CAPITAL LETTER C WITH ACUTE
u'\u0107' # 0x8D -> LATIN SMALL LETTER C WITH ACUTE
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\u0179' # 0x8F -> LATIN CAPITAL LETTER Z WITH ACUTE
u'\u017a' # 0x90 -> LATIN SMALL LETTER Z WITH ACUTE
u'\u010e' # 0x91 -> LATIN CAPITAL LETTER D WITH CARON
u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
u'\u010f' # 0x93 -> LATIN SMALL LETTER D WITH CARON
u'\u0112' # 0x94 -> LATIN CAPITAL LETTER E WITH MACRON
u'\u0113' # 0x95 -> LATIN SMALL LETTER E WITH MACRON
u'\u0116' # 0x96 -> LATIN CAPITAL LETTER E WITH DOT ABOVE
u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
u'\u0117' # 0x98 -> LATIN SMALL LETTER E WITH DOT ABOVE
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
u'\u011a' # 0x9D -> LATIN CAPITAL LETTER E WITH CARON
u'\u011b' # 0x9E -> LATIN SMALL LETTER E WITH CARON
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u2020' # 0xA0 -> DAGGER
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\u0118' # 0xA2 -> LATIN CAPITAL LETTER E WITH OGONEK
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\u0119' # 0xAB -> LATIN SMALL LETTER E WITH OGONEK
u'\xa8' # 0xAC -> DIAERESIS
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\u0123' # 0xAE -> LATIN SMALL LETTER G WITH CEDILLA
u'\u012e' # 0xAF -> LATIN CAPITAL LETTER I WITH OGONEK
u'\u012f' # 0xB0 -> LATIN SMALL LETTER I WITH OGONEK
u'\u012a' # 0xB1 -> LATIN CAPITAL LETTER I WITH MACRON
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\u012b' # 0xB4 -> LATIN SMALL LETTER I WITH MACRON
u'\u0136' # 0xB5 -> LATIN CAPITAL LETTER K WITH CEDILLA
u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
u'\u2211' # 0xB7 -> N-ARY SUMMATION
u'\u0142' # 0xB8 -> LATIN SMALL LETTER L WITH STROKE
u'\u013b' # 0xB9 -> LATIN CAPITAL LETTER L WITH CEDILLA
u'\u013c' # 0xBA -> LATIN SMALL LETTER L WITH CEDILLA
u'\u013d' # 0xBB -> LATIN CAPITAL LETTER L WITH CARON
u'\u013e' # 0xBC -> LATIN SMALL LETTER L WITH CARON
u'\u0139' # 0xBD -> LATIN CAPITAL LETTER L WITH ACUTE
u'\u013a' # 0xBE -> LATIN SMALL LETTER L WITH ACUTE
u'\u0145' # 0xBF -> LATIN CAPITAL LETTER N WITH CEDILLA
u'\u0146' # 0xC0 -> LATIN SMALL LETTER N WITH CEDILLA
u'\u0143' # 0xC1 -> LATIN CAPITAL LETTER N WITH ACUTE
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0144' # 0xC4 -> LATIN SMALL LETTER N WITH ACUTE
u'\u0147' # 0xC5 -> LATIN CAPITAL LETTER N WITH CARON
u'\u2206' # 0xC6 -> INCREMENT
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\u0148' # 0xCB -> LATIN SMALL LETTER N WITH CARON
u'\u0150' # 0xCC -> LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
u'\u0151' # 0xCE -> LATIN SMALL LETTER O WITH DOUBLE ACUTE
u'\u014c' # 0xCF -> LATIN CAPITAL LETTER O WITH MACRON
u'\u2013' # 0xD0 -> EN DASH
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u25ca' # 0xD7 -> LOZENGE
u'\u014d' # 0xD8 -> LATIN SMALL LETTER O WITH MACRON
u'\u0154' # 0xD9 -> LATIN CAPITAL LETTER R WITH ACUTE
u'\u0155' # 0xDA -> LATIN SMALL LETTER R WITH ACUTE
u'\u0158' # 0xDB -> LATIN CAPITAL LETTER R WITH CARON
u'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\u0159' # 0xDE -> LATIN SMALL LETTER R WITH CARON
u'\u0156' # 0xDF -> LATIN CAPITAL LETTER R WITH CEDILLA
u'\u0157' # 0xE0 -> LATIN SMALL LETTER R WITH CEDILLA
u'\u0160' # 0xE1 -> LATIN CAPITAL LETTER S WITH CARON
u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
u'\u0161' # 0xE4 -> LATIN SMALL LETTER S WITH CARON
u'\u015a' # 0xE5 -> LATIN CAPITAL LETTER S WITH ACUTE
u'\u015b' # 0xE6 -> LATIN SMALL LETTER S WITH ACUTE
u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\u0164' # 0xE8 -> LATIN CAPITAL LETTER T WITH CARON
u'\u0165' # 0xE9 -> LATIN SMALL LETTER T WITH CARON
u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
u'\u017d' # 0xEB -> LATIN CAPITAL LETTER Z WITH CARON
u'\u017e' # 0xEC -> LATIN SMALL LETTER Z WITH CARON
u'\u016a' # 0xED -> LATIN CAPITAL LETTER U WITH MACRON
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\u016b' # 0xF0 -> LATIN SMALL LETTER U WITH MACRON
u'\u016e' # 0xF1 -> LATIN CAPITAL LETTER U WITH RING ABOVE
u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\u016f' # 0xF3 -> LATIN SMALL LETTER U WITH RING ABOVE
u'\u0170' # 0xF4 -> LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
u'\u0171' # 0xF5 -> LATIN SMALL LETTER U WITH DOUBLE ACUTE
u'\u0172' # 0xF6 -> LATIN CAPITAL LETTER U WITH OGONEK
u'\u0173' # 0xF7 -> LATIN SMALL LETTER U WITH OGONEK
u'\xdd' # 0xF8 -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xfd' # 0xF9 -> LATIN SMALL LETTER Y WITH ACUTE
u'\u0137' # 0xFA -> LATIN SMALL LETTER K WITH CEDILLA
u'\u017b' # 0xFB -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
u'\u0141' # 0xFC -> LATIN CAPITAL LETTER L WITH STROKE
u'\u017c' # 0xFD -> LATIN SMALL LETTER Z WITH DOT ABOVE
u'\u0122' # 0xFE -> LATIN CAPITAL LETTER G WITH CEDILLA
u'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
adamrp/qiime | qiime/nmds.py | 15 | 1417 | #!/usr/bin/env python
from __future__ import division
import numpy
import os.path
import cogent.cluster.nmds as nmds_module
from qiime.format import format_nmds_coords
from qiime.parse import parse_distmat
__author__ = "Justin Kuzynski"
__copyright__ = "Copyright 2011, The QIIME Project"
__credits__ = ["Justin Kuczynski"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Justin Kuczynski"
__email__ = "[email protected]"
def nmds(file, dimensions=2):
samples, distmtx = parse_distmat(file)
nmds_res = nmds_module.NMDS(distmtx, verbosity=0, dimension=dimensions)
pts = nmds_res.getPoints()
stress = nmds_res.getStress()
return format_nmds_coords(samples, pts, stress)
def multiple_file_nmds(input_dir, output_dir, dimensions=2):
"""perform PCoAs on all distance matrices in the input_dir
"""
if not os.path.exists(output_dir):
os.makedirs(output_dir)
file_names = os.listdir(input_dir)
file_names = [fname for fname in file_names if not fname.startswith('.')]
for fname in file_names:
base_fname, ext = os.path.splitext(fname)
infile = os.path.join(input_dir, fname)
lines = open(infile, 'U')
nmds_res_string = nmds(lines, dimensions)
outfile = os.path.join(output_dir, 'nmds_' + base_fname + '.txt')
outfile = open(outfile, 'w')
outfile.write(nmds_res_string)
outfile.close()
| gpl-2.0 |
andrebellafronte/stoq | stoqlib/gui/test/test_qualitytesteditor.py | 3 | 1183 | # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2012 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <[email protected]>
##
from stoqlib.gui.editors.producteditor import QualityTestEditor
from stoqlib.gui.test.uitestutils import GUITest
class TestQualityTestEditor(GUITest):
def test_create(self):
editor = QualityTestEditor(self.store)
self.check_editor(editor, 'editor-qualitytest-create')
| gpl-2.0 |
leodavesne/leodavesne.net | leodavesne/settings.py | 1 | 3452 | """
Django settings for leodavesne project.
Generated by 'django-admin startproject' using Django 3.0.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = = bool(int(os.environ.get('DEBUG', 0)))
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'leodavesne.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'leodavesne.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
},
},
'root': {
'level': 'WARNING',
'handlers': ['console'],
},
}
django_heroku.settings(
locals(), logging=False
)
| mit |
MobinRanjbar/hue | desktop/core/ext-py/ndg_httpsclient-0.4.0/ndg/httpsclient/subj_alt_name.py | 79 | 6131 | """NDG HTTPS Client package
Use pyasn1 to provide support for parsing ASN.1 formatted subjectAltName
content for SSL peer verification. Code based on:
http://stackoverflow.com/questions/5519958/how-do-i-parse-subjectaltname-extension-data-using-pyasn1
"""
__author__ = "P J Kershaw"
__date__ = "01/02/12"
__copyright__ = "(C) 2012 Science and Technology Facilities Council"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "[email protected]"
__revision__ = '$Id$'
try:
from pyasn1.type import univ, constraint, char, namedtype, tag
except ImportError as e:
import_error_msg = ('Error importing pyasn1, subjectAltName check for SSL '
'peer verification will be disabled. Import error '
'is: %s' % e)
import warnings
warnings.warn(import_error_msg)
class Pyasn1ImportError(ImportError):
"Raise for pyasn1 import error"
raise Pyasn1ImportError(import_error_msg)
MAX = 64
class DirectoryString(univ.Choice):
"""ASN.1 Directory string class"""
componentType = namedtype.NamedTypes(
namedtype.NamedType(
'teletexString', char.TeletexString().subtype(
subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
namedtype.NamedType(
'printableString', char.PrintableString().subtype(
subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
namedtype.NamedType(
'universalString', char.UniversalString().subtype(
subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
namedtype.NamedType(
'utf8String', char.UTF8String().subtype(
subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
namedtype.NamedType(
'bmpString', char.BMPString().subtype(
subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
namedtype.NamedType(
'ia5String', char.IA5String().subtype(
subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
)
class AttributeValue(DirectoryString):
"""ASN.1 Attribute value"""
class AttributeType(univ.ObjectIdentifier):
"""ASN.1 Attribute type"""
class AttributeTypeAndValue(univ.Sequence):
"""ASN.1 Attribute type and value class"""
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', AttributeType()),
namedtype.NamedType('value', AttributeValue()),
)
class RelativeDistinguishedName(univ.SetOf):
'''ASN.1 Realtive distinguished name'''
componentType = AttributeTypeAndValue()
class RDNSequence(univ.SequenceOf):
'''ASN.1 RDN sequence class'''
componentType = RelativeDistinguishedName()
class Name(univ.Choice):
'''ASN.1 name class'''
componentType = namedtype.NamedTypes(
namedtype.NamedType('', RDNSequence()),
)
class Extension(univ.Sequence):
'''ASN.1 extension class'''
componentType = namedtype.NamedTypes(
namedtype.NamedType('extnID', univ.ObjectIdentifier()),
namedtype.DefaultedNamedType('critical', univ.Boolean('False')),
namedtype.NamedType('extnValue', univ.OctetString()),
)
class Extensions(univ.SequenceOf):
'''ASN.1 extensions class'''
componentType = Extension()
sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX)
class AnotherName(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type-id', univ.ObjectIdentifier()),
namedtype.NamedType('value', univ.Any().subtype(
explicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatSimple, 0)))
)
class GeneralName(univ.Choice):
'''ASN.1 configuration for X.509 certificate subjectAltNames fields'''
componentType = namedtype.NamedTypes(
namedtype.NamedType('otherName', AnotherName().subtype(
implicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatSimple, 0))),
namedtype.NamedType('rfc822Name', char.IA5String().subtype(
implicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatSimple, 1))),
namedtype.NamedType('dNSName', char.IA5String().subtype(
implicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatSimple, 2))),
# namedtype.NamedType('x400Address', ORAddress().subtype(
# implicitTag=tag.Tag(tag.tagClassContext,
# tag.tagFormatSimple, 3))),
namedtype.NamedType('directoryName', Name().subtype(
implicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatSimple, 4))),
# namedtype.NamedType('ediPartyName', EDIPartyName().subtype(
# implicitTag=tag.Tag(tag.tagClassContext,
# tag.tagFormatSimple, 5))),
namedtype.NamedType('uniformResourceIdentifier', char.IA5String().subtype(
implicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatSimple, 6))),
namedtype.NamedType('iPAddress', univ.OctetString().subtype(
implicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatSimple, 7))),
namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(
implicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatSimple, 8))),
)
class GeneralNames(univ.SequenceOf):
'''Sequence of names for ASN.1 subjectAltNames settings'''
componentType = GeneralName()
sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX)
class SubjectAltName(GeneralNames):
'''ASN.1 implementation for subjectAltNames support'''
| apache-2.0 |
stefan-jonasson/home-assistant | homeassistant/components/device_tracker/keenetic_ndms2.py | 6 | 3728 | """
Support for Zyxel Keenetic NDMS2 based routers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.keenetic_ndms2/
"""
import logging
from collections import namedtuple
import requests
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.device_tracker import (
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
from homeassistant.const import (
CONF_HOST, CONF_PASSWORD, CONF_USERNAME
)
_LOGGER = logging.getLogger(__name__)
# Interface name to track devices for. Most likely one will not need to
# change it from default 'Home'. This is needed not to track Guest WI-FI-
# clients and router itself
CONF_INTERFACE = 'interface'
DEFAULT_INTERFACE = 'Home'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_INTERFACE, default=DEFAULT_INTERFACE): cv.string,
})
def get_scanner(_hass, config):
"""Validate the configuration and return a Nmap scanner."""
scanner = KeeneticNDMS2DeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
Device = namedtuple('Device', ['mac', 'name'])
class KeeneticNDMS2DeviceScanner(DeviceScanner):
"""This class scans for devices using keenetic NDMS2 web interface."""
def __init__(self, config):
"""Initialize the scanner."""
self.last_results = []
self._url = 'http://%s/rci/show/ip/arp' % config[CONF_HOST]
self._interface = config[CONF_INTERFACE]
self._username = config.get(CONF_USERNAME)
self._password = config.get(CONF_PASSWORD)
self.success_init = self._update_info()
_LOGGER.info("Scanner initialized")
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return [device.mac for device in self.last_results]
def get_device_name(self, mac):
"""Return the name of the given device or None if we don't know."""
filter_named = [device.name for device in self.last_results
if device.mac == mac]
if filter_named:
return filter_named[0]
return None
def _update_info(self):
"""Get ARP from keenetic router."""
_LOGGER.info("Fetching...")
last_results = []
# doing a request
try:
from requests.auth import HTTPDigestAuth
res = requests.get(self._url, timeout=10, auth=HTTPDigestAuth(
self._username, self._password
))
except requests.exceptions.Timeout:
_LOGGER.error(
"Connection to the router timed out at URL %s", self._url)
return False
if res.status_code != 200:
_LOGGER.error(
"Connection failed with http code %s", res.status_code)
return False
try:
result = res.json()
except ValueError:
# If json decoder could not parse the response
_LOGGER.error("Failed to parse response from router")
return False
# parsing response
for info in result:
if info.get('interface') != self._interface:
continue
mac = info.get('mac')
name = info.get('name')
# No address = no item :)
if mac is None:
continue
last_results.append(Device(mac.upper(), name))
self.last_results = last_results
_LOGGER.info("Request successful")
return True
| mit |
ICromwell/OCaSimTest | 10 - Dentist Screening Appointment.py | 1 | 1935 | # -*- coding: utf-8 -*-
"""
A screening process wherein a person returns for regular checkups at a dentist
"""
# Time interval between appointments
appInt = 6*30
# Create a counter for the number of dental appointments, if one doesn't already exist
if getattr(entity, "count_DentAppt", 0) == 0:
entity.count_DentAppt = 0
def devOPL(entity, env):
while True:
t_OPL = random.normalvariate(500, 100)
yield env.timeout(t_OPL)
#entity.OPL.append(env.now)
print(env.now, 'Developed OPL')
entity.OPLStatus = 1
entity.time_OPL = env.now
env.exit()
def appointment_process(entity, env):
while True:
if entity.OPLStatus ==0:
yield env.timeout(appInt)
print(env.now, 'Everything looks fine, see you in %2.0f days'%appInt)
elif entity.OPLStatus == 1:
print(env.now, 'Found something at %2.0f'%env.now)
entity.time_detectOPL = env.now # The time at which an OPL is detected
entity.allTime = entity.allTime + entity.time_detectOPL # Update total simulation runtime
entity.currentState = "Detected OPL, undergoing evaluation" # Update state
entity.stateNum = 1.1
env.exit()
entity.count_DentAppt = entity.count_DentAppt +1 # Add running count to the number of dental appointments
# Run simulation
env = simpy.Environment()
env.process(devOPL(entity, env))
env.process(appointment_process(entity, env))
env.run()
# VARIABLES CREATED IN THIS STEP:
# count_DentAppt - a counter for how many dentist's appointments an entity has had
# time_OPL - the time that the entity develops an OPL
# time_detectOPL - the time that an OPL is detected by a dentist
# OPLStatus - a flag for whether or not an entity has an OPL
| gpl-3.0 |
hwu25/AppPkg | Applications/Python/Python-2.7.2/Lib/hmac.py | 66 | 4664 | """HMAC (Keyed-Hashing for Message Authentication) Python module.
Implements the HMAC algorithm as described by RFC 2104.
"""
import warnings as _warnings
trans_5C = "".join ([chr (x ^ 0x5C) for x in xrange(256)])
trans_36 = "".join ([chr (x ^ 0x36) for x in xrange(256)])
# The size of the digests returned by HMAC depends on the underlying
# hashing module used. Use digest_size from the instance of HMAC instead.
digest_size = None
# A unique object passed by HMAC.copy() to the HMAC constructor, in order
# that the latter return very quickly. HMAC("") in contrast is quite
# expensive.
_secret_backdoor_key = []
class HMAC:
"""RFC 2104 HMAC class. Also complies with RFC 4231.
This supports the API for Cryptographic Hash Functions (PEP 247).
"""
blocksize = 64 # 512-bit HMAC; can be changed in subclasses.
def __init__(self, key, msg = None, digestmod = None):
"""Create a new HMAC object.
key: key for the keyed hash object.
msg: Initial input for the hash, if provided.
digestmod: A module supporting PEP 247. *OR*
A hashlib constructor returning a new hash object.
Defaults to hashlib.md5.
"""
if key is _secret_backdoor_key: # cheap
return
if digestmod is None:
import hashlib
digestmod = hashlib.md5
if hasattr(digestmod, '__call__'):
self.digest_cons = digestmod
else:
self.digest_cons = lambda d='': digestmod.new(d)
self.outer = self.digest_cons()
self.inner = self.digest_cons()
self.digest_size = self.inner.digest_size
if hasattr(self.inner, 'block_size'):
blocksize = self.inner.block_size
if blocksize < 16:
# Very low blocksize, most likely a legacy value like
# Lib/sha.py and Lib/md5.py have.
_warnings.warn('block_size of %d seems too small; using our '
'default of %d.' % (blocksize, self.blocksize),
RuntimeWarning, 2)
blocksize = self.blocksize
else:
_warnings.warn('No block_size attribute on given digest object; '
'Assuming %d.' % (self.blocksize),
RuntimeWarning, 2)
blocksize = self.blocksize
if len(key) > blocksize:
key = self.digest_cons(key).digest()
key = key + chr(0) * (blocksize - len(key))
self.outer.update(key.translate(trans_5C))
self.inner.update(key.translate(trans_36))
if msg is not None:
self.update(msg)
## def clear(self):
## raise NotImplementedError, "clear() method not available in HMAC."
def update(self, msg):
"""Update this hashing object with the string msg.
"""
self.inner.update(msg)
def copy(self):
"""Return a separate copy of this hashing object.
An update to this copy won't affect the original object.
"""
other = self.__class__(_secret_backdoor_key)
other.digest_cons = self.digest_cons
other.digest_size = self.digest_size
other.inner = self.inner.copy()
other.outer = self.outer.copy()
return other
def _current(self):
"""Return a hash object for the current state.
To be used only internally with digest() and hexdigest().
"""
h = self.outer.copy()
h.update(self.inner.digest())
return h
def digest(self):
"""Return the hash value of this hashing object.
This returns a string containing 8-bit data. The object is
not altered in any way by this function; you can continue
updating the object after calling this function.
"""
h = self._current()
return h.digest()
def hexdigest(self):
"""Like digest(), but returns a string of hexadecimal digits instead.
"""
h = self._current()
return h.hexdigest()
def new(key, msg = None, digestmod = None):
"""Create a new hashing object and return it.
key: The starting key for the hash.
msg: if available, will immediately be hashed into the object's starting
state.
You can now feed arbitrary strings into the object using its update()
method, and can ask for the hash value at any time by calling its digest()
method.
"""
return HMAC(key, msg, digestmod)
| bsd-2-clause |
fxsjy/pybrain | examples/rl/environments/cartpole/cart_all.py | 30 | 2089 | #!/usr/bin/env python
#########################################################################
# Reinforcement Learning with several optimization algorithms
# on the CartPoleEnvironment
#
# Requirements: pylab (for plotting only). If not available, comment the
# last 3 lines out
#########################################################################
__author__ = "Thomas Rueckstiess, Frank Sehnke"
from pybrain.tools.example_tools import ExTools
from pybrain.tools.shortcuts import buildNetwork
from pybrain.rl.environments.cartpole import CartPoleEnvironment, BalanceTask
from pybrain.rl.agents import OptimizationAgent
from pybrain.optimization import PGPE #@UnusedImport
from pybrain.optimization import ExactNES #@UnusedImport
from pybrain.optimization import FEM #@UnusedImport
from pybrain.optimization import CMAES #@UnusedImport
from pybrain.rl.experiments import EpisodicExperiment
batch=2 #number of samples per learning step
prnts=100 #number of learning steps after results are printed
epis=4000/batch/prnts #number of roleouts
numbExp=40 #number of experiments
et = ExTools(batch, prnts) #tool for printing and plotting
expList = ["PGPE(storeAllEvaluations = True)", "ExactNES(storeAllEvaluations = True)", "FEM(storeAllEvaluations = True)", "CMAES(storeAllEvaluations = True)"]
for e in expList:
for runs in range(numbExp):
# create environment
env = CartPoleEnvironment()
# create task
task = BalanceTask(env, 200, desiredValue=None)
# create controller network
net = buildNetwork(4, 1, bias=False)
# create agent with controller and learner (and its options)
agent = OptimizationAgent(net, eval(e))
et.agent = agent
# create the experiment
experiment = EpisodicExperiment(task, agent)
#Do the experiment
for updates in range(epis):
for i in range(prnts):
experiment.doEpisodes(batch)
et.printResults((agent.learner._allEvaluations)[-50:-1], runs, updates)
et.addExps()
et.nextExps()
et.showExps()
| bsd-3-clause |
videetssinghai/Blog-Rest-Api | lib/python2.7/site-packages/pip/_vendor/distlib/util.py | 327 | 52991 | #
# Copyright (C) 2012-2016 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import codecs
from collections import deque
import contextlib
import csv
from glob import iglob as std_iglob
import io
import json
import logging
import os
import py_compile
import re
import shutil
import socket
try:
import ssl
except ImportError: # pragma: no cover
ssl = None
import subprocess
import sys
import tarfile
import tempfile
import textwrap
try:
import threading
except ImportError: # pragma: no cover
import dummy_threading as threading
import time
from . import DistlibException
from .compat import (string_types, text_type, shutil, raw_input, StringIO,
cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
splittype, HTTPHandler, BaseConfigurator, valid_ident,
Container, configparser, URLError, ZipFile, fsdecode,
unquote)
logger = logging.getLogger(__name__)
#
# Requirement parsing code for name + optional constraints + optional extras
#
# e.g. 'foo >= 1.2, < 2.0 [bar, baz]'
#
# The regex can seem a bit hairy, so we build it up out of smaller pieces
# which are manageable.
#
COMMA = r'\s*,\s*'
COMMA_RE = re.compile(COMMA)
IDENT = r'(\w|[.-])+'
EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')'
VERSPEC = IDENT + r'\*?'
RELOP = '([<>=!~]=)|[<>]'
#
# The first relop is optional - if absent, will be taken as '~='
#
BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' +
RELOP + r')\s*(' + VERSPEC + '))*')
DIRECT_REF = '(from\s+(?P<diref>.*))'
#
# Either the bare constraints or the bare constraints in parentheses
#
CONSTRAINTS = (r'\(\s*(?P<c1>' + BARE_CONSTRAINTS + '|' + DIRECT_REF +
r')\s*\)|(?P<c2>' + BARE_CONSTRAINTS + '\s*)')
EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*'
EXTRAS = r'\[\s*(?P<ex>' + EXTRA_LIST + r')?\s*\]'
REQUIREMENT = ('(?P<dn>' + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' +
CONSTRAINTS + ')?$')
REQUIREMENT_RE = re.compile(REQUIREMENT)
#
# Used to scan through the constraints
#
RELOP_IDENT = '(?P<op>' + RELOP + r')\s*(?P<vn>' + VERSPEC + ')'
RELOP_IDENT_RE = re.compile(RELOP_IDENT)
def parse_requirement(s):
def get_constraint(m):
d = m.groupdict()
return d['op'], d['vn']
result = None
m = REQUIREMENT_RE.match(s)
if m:
d = m.groupdict()
name = d['dn']
cons = d['c1'] or d['c2']
if not d['diref']:
url = None
else:
# direct reference
cons = None
url = d['diref'].strip()
if not cons:
cons = None
constr = ''
rs = d['dn']
else:
if cons[0] not in '<>!=':
cons = '~=' + cons
iterator = RELOP_IDENT_RE.finditer(cons)
cons = [get_constraint(m) for m in iterator]
rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons]))
if not d['ex']:
extras = None
else:
extras = COMMA_RE.split(d['ex'])
result = Container(name=name, constraints=cons, extras=extras,
requirement=rs, source=s, url=url)
return result
def get_resources_dests(resources_root, rules):
"""Find destinations for resources files"""
def get_rel_path(base, path):
# normalizes and returns a lstripped-/-separated path
base = base.replace(os.path.sep, '/')
path = path.replace(os.path.sep, '/')
assert path.startswith(base)
return path[len(base):].lstrip('/')
destinations = {}
for base, suffix, dest in rules:
prefix = os.path.join(resources_root, base)
for abs_base in iglob(prefix):
abs_glob = os.path.join(abs_base, suffix)
for abs_path in iglob(abs_glob):
resource_file = get_rel_path(resources_root, abs_path)
if dest is None: # remove the entry if it was here
destinations.pop(resource_file, None)
else:
rel_path = get_rel_path(abs_base, abs_path)
rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
destinations[resource_file] = rel_dest + '/' + rel_path
return destinations
def in_venv():
if hasattr(sys, 'real_prefix'):
# virtualenv venvs
result = True
else:
# PEP 405 venvs
result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
return result
def get_executable():
# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
# changes to the stub launcher mean that sys.executable always points
# to the stub on macOS
# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
# in os.environ):
# result = os.environ['__PYVENV_LAUNCHER__']
# else:
# result = sys.executable
# return result
result = os.path.normcase(sys.executable)
if not isinstance(result, text_type):
result = fsdecode(result)
return result
def proceed(prompt, allowed_chars, error_prompt=None, default=None):
p = prompt
while True:
s = raw_input(p)
p = prompt
if not s and default:
s = default
if s:
c = s[0].lower()
if c in allowed_chars:
break
if error_prompt:
p = '%c: %s\n%s' % (c, error_prompt, prompt)
return c
def extract_by_key(d, keys):
if isinstance(keys, string_types):
keys = keys.split()
result = {}
for key in keys:
if key in d:
result[key] = d[key]
return result
def read_exports(stream):
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getreader('utf-8')(stream)
# Try to load as JSON, falling back on legacy format
data = stream.read()
stream = StringIO(data)
try:
jdata = json.load(stream)
result = jdata['extensions']['python.exports']['exports']
for group, entries in result.items():
for k, v in entries.items():
s = '%s = %s' % (k, v)
entry = get_export_entry(s)
assert entry is not None
entries[k] = entry
return result
except Exception:
stream.seek(0, 0)
def read_stream(cp, stream):
if hasattr(cp, 'read_file'):
cp.read_file(stream)
else:
cp.readfp(stream)
cp = configparser.ConfigParser()
try:
read_stream(cp, stream)
except configparser.MissingSectionHeaderError:
stream.close()
data = textwrap.dedent(data)
stream = StringIO(data)
read_stream(cp, stream)
result = {}
for key in cp.sections():
result[key] = entries = {}
for name, value in cp.items(key):
s = '%s = %s' % (name, value)
entry = get_export_entry(s)
assert entry is not None
#entry.dist = self
entries[name] = entry
return result
def write_exports(exports, stream):
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getwriter('utf-8')(stream)
cp = configparser.ConfigParser()
for k, v in exports.items():
# TODO check k, v for valid values
cp.add_section(k)
for entry in v.values():
if entry.suffix is None:
s = entry.prefix
else:
s = '%s:%s' % (entry.prefix, entry.suffix)
if entry.flags:
s = '%s [%s]' % (s, ', '.join(entry.flags))
cp.set(k, entry.name, s)
cp.write(stream)
@contextlib.contextmanager
def tempdir():
td = tempfile.mkdtemp()
try:
yield td
finally:
shutil.rmtree(td)
@contextlib.contextmanager
def chdir(d):
cwd = os.getcwd()
try:
os.chdir(d)
yield
finally:
os.chdir(cwd)
@contextlib.contextmanager
def socket_timeout(seconds=15):
cto = socket.getdefaulttimeout()
try:
socket.setdefaulttimeout(seconds)
yield
finally:
socket.setdefaulttimeout(cto)
class cached_property(object):
def __init__(self, func):
self.func = func
#for attr in ('__name__', '__module__', '__doc__'):
# setattr(self, attr, getattr(func, attr, None))
def __get__(self, obj, cls=None):
if obj is None:
return self
value = self.func(obj)
object.__setattr__(obj, self.func.__name__, value)
#obj.__dict__[self.func.__name__] = value = self.func(obj)
return value
def convert_path(pathname):
"""Return 'pathname' as a name that will work on the native filesystem.
The path is split on '/' and put back together again using the current
directory separator. Needed because filenames in the setup script are
always supplied in Unix style, and have to be converted to the local
convention before we can actually use them in the filesystem. Raises
ValueError on non-Unix-ish systems if 'pathname' either starts or
ends with a slash.
"""
if os.sep == '/':
return pathname
if not pathname:
return pathname
if pathname[0] == '/':
raise ValueError("path '%s' cannot be absolute" % pathname)
if pathname[-1] == '/':
raise ValueError("path '%s' cannot end with '/'" % pathname)
paths = pathname.split('/')
while os.curdir in paths:
paths.remove(os.curdir)
if not paths:
return os.curdir
return os.path.join(*paths)
class FileOperator(object):
def __init__(self, dry_run=False):
self.dry_run = dry_run
self.ensured = set()
self._init_record()
def _init_record(self):
self.record = False
self.files_written = set()
self.dirs_created = set()
def record_as_written(self, path):
if self.record:
self.files_written.add(path)
def newer(self, source, target):
"""Tell if the target is newer than the source.
Returns true if 'source' exists and is more recently modified than
'target', or if 'source' exists and 'target' doesn't.
Returns false if both exist and 'target' is the same age or younger
than 'source'. Raise PackagingFileError if 'source' does not exist.
Note that this test is not very accurate: files created in the same
second will have the same "age".
"""
if not os.path.exists(source):
raise DistlibException("file '%r' does not exist" %
os.path.abspath(source))
if not os.path.exists(target):
return True
return os.stat(source).st_mtime > os.stat(target).st_mtime
def copy_file(self, infile, outfile, check=True):
"""Copy a file respecting dry-run and force flags.
"""
self.ensure_dir(os.path.dirname(outfile))
logger.info('Copying %s to %s', infile, outfile)
if not self.dry_run:
msg = None
if check:
if os.path.islink(outfile):
msg = '%s is a symlink' % outfile
elif os.path.exists(outfile) and not os.path.isfile(outfile):
msg = '%s is a non-regular file' % outfile
if msg:
raise ValueError(msg + ' which would be overwritten')
shutil.copyfile(infile, outfile)
self.record_as_written(outfile)
def copy_stream(self, instream, outfile, encoding=None):
assert not os.path.isdir(outfile)
self.ensure_dir(os.path.dirname(outfile))
logger.info('Copying stream %s to %s', instream, outfile)
if not self.dry_run:
if encoding is None:
outstream = open(outfile, 'wb')
else:
outstream = codecs.open(outfile, 'w', encoding=encoding)
try:
shutil.copyfileobj(instream, outstream)
finally:
outstream.close()
self.record_as_written(outfile)
def write_binary_file(self, path, data):
self.ensure_dir(os.path.dirname(path))
if not self.dry_run:
with open(path, 'wb') as f:
f.write(data)
self.record_as_written(path)
def write_text_file(self, path, data, encoding):
self.ensure_dir(os.path.dirname(path))
if not self.dry_run:
with open(path, 'wb') as f:
f.write(data.encode(encoding))
self.record_as_written(path)
def set_mode(self, bits, mask, files):
if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
# Set the executable bits (owner, group, and world) on
# all the files specified.
for f in files:
if self.dry_run:
logger.info("changing mode of %s", f)
else:
mode = (os.stat(f).st_mode | bits) & mask
logger.info("changing mode of %s to %o", f, mode)
os.chmod(f, mode)
set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
def ensure_dir(self, path):
path = os.path.abspath(path)
if path not in self.ensured and not os.path.exists(path):
self.ensured.add(path)
d, f = os.path.split(path)
self.ensure_dir(d)
logger.info('Creating %s' % path)
if not self.dry_run:
os.mkdir(path)
if self.record:
self.dirs_created.add(path)
def byte_compile(self, path, optimize=False, force=False, prefix=None):
dpath = cache_from_source(path, not optimize)
logger.info('Byte-compiling %s to %s', path, dpath)
if not self.dry_run:
if force or self.newer(path, dpath):
if not prefix:
diagpath = None
else:
assert path.startswith(prefix)
diagpath = path[len(prefix):]
py_compile.compile(path, dpath, diagpath, True) # raise error
self.record_as_written(dpath)
return dpath
def ensure_removed(self, path):
if os.path.exists(path):
if os.path.isdir(path) and not os.path.islink(path):
logger.debug('Removing directory tree at %s', path)
if not self.dry_run:
shutil.rmtree(path)
if self.record:
if path in self.dirs_created:
self.dirs_created.remove(path)
else:
if os.path.islink(path):
s = 'link'
else:
s = 'file'
logger.debug('Removing %s %s', s, path)
if not self.dry_run:
os.remove(path)
if self.record:
if path in self.files_written:
self.files_written.remove(path)
def is_writable(self, path):
result = False
while not result:
if os.path.exists(path):
result = os.access(path, os.W_OK)
break
parent = os.path.dirname(path)
if parent == path:
break
path = parent
return result
def commit(self):
"""
Commit recorded changes, turn off recording, return
changes.
"""
assert self.record
result = self.files_written, self.dirs_created
self._init_record()
return result
def rollback(self):
if not self.dry_run:
for f in list(self.files_written):
if os.path.exists(f):
os.remove(f)
# dirs should all be empty now, except perhaps for
# __pycache__ subdirs
# reverse so that subdirs appear before their parents
dirs = sorted(self.dirs_created, reverse=True)
for d in dirs:
flist = os.listdir(d)
if flist:
assert flist == ['__pycache__']
sd = os.path.join(d, flist[0])
os.rmdir(sd)
os.rmdir(d) # should fail if non-empty
self._init_record()
def resolve(module_name, dotted_path):
if module_name in sys.modules:
mod = sys.modules[module_name]
else:
mod = __import__(module_name)
if dotted_path is None:
result = mod
else:
parts = dotted_path.split('.')
result = getattr(mod, parts.pop(0))
for p in parts:
result = getattr(result, p)
return result
class ExportEntry(object):
def __init__(self, name, prefix, suffix, flags):
self.name = name
self.prefix = prefix
self.suffix = suffix
self.flags = flags
@cached_property
def value(self):
return resolve(self.prefix, self.suffix)
def __repr__(self): # pragma: no cover
return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix,
self.suffix, self.flags)
def __eq__(self, other):
if not isinstance(other, ExportEntry):
result = False
else:
result = (self.name == other.name and
self.prefix == other.prefix and
self.suffix == other.suffix and
self.flags == other.flags)
return result
__hash__ = object.__hash__
ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+)
\s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
\s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
''', re.VERBOSE)
def get_export_entry(specification):
m = ENTRY_RE.search(specification)
if not m:
result = None
if '[' in specification or ']' in specification:
raise DistlibException("Invalid specification "
"'%s'" % specification)
else:
d = m.groupdict()
name = d['name']
path = d['callable']
colons = path.count(':')
if colons == 0:
prefix, suffix = path, None
else:
if colons != 1:
raise DistlibException("Invalid specification "
"'%s'" % specification)
prefix, suffix = path.split(':')
flags = d['flags']
if flags is None:
if '[' in specification or ']' in specification:
raise DistlibException("Invalid specification "
"'%s'" % specification)
flags = []
else:
flags = [f.strip() for f in flags.split(',')]
result = ExportEntry(name, prefix, suffix, flags)
return result
def get_cache_base(suffix=None):
"""
Return the default base location for distlib caches. If the directory does
not exist, it is created. Use the suffix provided for the base directory,
and default to '.distlib' if it isn't provided.
On Windows, if LOCALAPPDATA is defined in the environment, then it is
assumed to be a directory, and will be the parent directory of the result.
On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
directory - using os.expanduser('~') - will be the parent directory of
the result.
The result is just the directory '.distlib' in the parent directory as
determined above, or with the name specified with ``suffix``.
"""
if suffix is None:
suffix = '.distlib'
if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
result = os.path.expandvars('$localappdata')
else:
# Assume posix, or old Windows
result = os.path.expanduser('~')
# we use 'isdir' instead of 'exists', because we want to
# fail if there's a file with that name
if os.path.isdir(result):
usable = os.access(result, os.W_OK)
if not usable:
logger.warning('Directory exists but is not writable: %s', result)
else:
try:
os.makedirs(result)
usable = True
except OSError:
logger.warning('Unable to create %s', result, exc_info=True)
usable = False
if not usable:
result = tempfile.mkdtemp()
logger.warning('Default location unusable, using %s', result)
return os.path.join(result, suffix)
def path_to_cache_dir(path):
"""
Convert an absolute path to a directory name for use in a cache.
The algorithm used is:
#. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
#. Any occurrence of ``os.sep`` is replaced with ``'--'``.
#. ``'.cache'`` is appended.
"""
d, p = os.path.splitdrive(os.path.abspath(path))
if d:
d = d.replace(':', '---')
p = p.replace(os.sep, '--')
return d + p + '.cache'
def ensure_slash(s):
if not s.endswith('/'):
return s + '/'
return s
def parse_credentials(netloc):
username = password = None
if '@' in netloc:
prefix, netloc = netloc.split('@', 1)
if ':' not in prefix:
username = prefix
else:
username, password = prefix.split(':', 1)
return username, password, netloc
def get_process_umask():
result = os.umask(0o22)
os.umask(result)
return result
def is_string_sequence(seq):
result = True
i = None
for i, s in enumerate(seq):
if not isinstance(s, string_types):
result = False
break
assert i is not None
return result
PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
'([a-z0-9_.+-]+)', re.I)
PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
def split_filename(filename, project_name=None):
"""
Extract name, version, python version from a filename (no extension)
Return name, version, pyver or None
"""
result = None
pyver = None
filename = unquote(filename).replace(' ', '-')
m = PYTHON_VERSION.search(filename)
if m:
pyver = m.group(1)
filename = filename[:m.start()]
if project_name and len(filename) > len(project_name) + 1:
m = re.match(re.escape(project_name) + r'\b', filename)
if m:
n = m.end()
result = filename[:n], filename[n + 1:], pyver
if result is None:
m = PROJECT_NAME_AND_VERSION.match(filename)
if m:
result = m.group(1), m.group(3), pyver
return result
# Allow spaces in name because of legacy dists like "Twisted Core"
NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
r'\(\s*(?P<ver>[^\s)]+)\)$')
def parse_name_and_version(p):
"""
A utility method used to get name and version from a string.
From e.g. a Provides-Dist value.
:param p: A value in a form 'foo (1.0)'
:return: The name and version as a tuple.
"""
m = NAME_VERSION_RE.match(p)
if not m:
raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
d = m.groupdict()
return d['name'].strip().lower(), d['ver']
def get_extras(requested, available):
result = set()
requested = set(requested or [])
available = set(available or [])
if '*' in requested:
requested.remove('*')
result |= available
for r in requested:
if r == '-':
result.add(r)
elif r.startswith('-'):
unwanted = r[1:]
if unwanted not in available:
logger.warning('undeclared extra: %s' % unwanted)
if unwanted in result:
result.remove(unwanted)
else:
if r not in available:
logger.warning('undeclared extra: %s' % r)
result.add(r)
return result
#
# Extended metadata functionality
#
def _get_external_data(url):
result = {}
try:
# urlopen might fail if it runs into redirections,
# because of Python issue #13696. Fixed in locators
# using a custom redirect handler.
resp = urlopen(url)
headers = resp.info()
ct = headers.get('Content-Type')
if not ct.startswith('application/json'):
logger.debug('Unexpected response for JSON request: %s', ct)
else:
reader = codecs.getreader('utf-8')(resp)
#data = reader.read().decode('utf-8')
#result = json.loads(data)
result = json.load(reader)
except Exception as e:
logger.exception('Failed to get external data for %s: %s', url, e)
return result
_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'
def get_project_data(name):
url = '%s/%s/project.json' % (name[0].upper(), name)
url = urljoin(_external_data_base_url, url)
result = _get_external_data(url)
return result
def get_package_data(name, version):
url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
url = urljoin(_external_data_base_url, url)
return _get_external_data(url)
class Cache(object):
"""
A class implementing a cache for resources that need to live in the file system
e.g. shared libraries. This class was moved from resources to here because it
could be used by other modules, e.g. the wheel module.
"""
def __init__(self, base):
"""
Initialise an instance.
:param base: The base directory where the cache should be located.
"""
# we use 'isdir' instead of 'exists', because we want to
# fail if there's a file with that name
if not os.path.isdir(base): # pragma: no cover
os.makedirs(base)
if (os.stat(base).st_mode & 0o77) != 0:
logger.warning('Directory \'%s\' is not private', base)
self.base = os.path.abspath(os.path.normpath(base))
def prefix_to_dir(self, prefix):
"""
Converts a resource prefix to a directory name in the cache.
"""
return path_to_cache_dir(prefix)
def clear(self):
"""
Clear the cache.
"""
not_removed = []
for fn in os.listdir(self.base):
fn = os.path.join(self.base, fn)
try:
if os.path.islink(fn) or os.path.isfile(fn):
os.remove(fn)
elif os.path.isdir(fn):
shutil.rmtree(fn)
except Exception:
not_removed.append(fn)
return not_removed
class EventMixin(object):
"""
A very simple publish/subscribe system.
"""
def __init__(self):
self._subscribers = {}
def add(self, event, subscriber, append=True):
"""
Add a subscriber for an event.
:param event: The name of an event.
:param subscriber: The subscriber to be added (and called when the
event is published).
:param append: Whether to append or prepend the subscriber to an
existing subscriber list for the event.
"""
subs = self._subscribers
if event not in subs:
subs[event] = deque([subscriber])
else:
sq = subs[event]
if append:
sq.append(subscriber)
else:
sq.appendleft(subscriber)
def remove(self, event, subscriber):
"""
Remove a subscriber for an event.
:param event: The name of an event.
:param subscriber: The subscriber to be removed.
"""
subs = self._subscribers
if event not in subs:
raise ValueError('No subscribers: %r' % event)
subs[event].remove(subscriber)
def get_subscribers(self, event):
"""
Return an iterator for the subscribers for an event.
:param event: The event to return subscribers for.
"""
return iter(self._subscribers.get(event, ()))
def publish(self, event, *args, **kwargs):
"""
Publish a event and return a list of values returned by its
subscribers.
:param event: The event to publish.
:param args: The positional arguments to pass to the event's
subscribers.
:param kwargs: The keyword arguments to pass to the event's
subscribers.
"""
result = []
for subscriber in self.get_subscribers(event):
try:
value = subscriber(event, *args, **kwargs)
except Exception:
logger.exception('Exception during event publication')
value = None
result.append(value)
logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
event, args, kwargs, result)
return result
#
# Simple sequencing
#
class Sequencer(object):
def __init__(self):
self._preds = {}
self._succs = {}
self._nodes = set() # nodes with no preds/succs
def add_node(self, node):
self._nodes.add(node)
def remove_node(self, node, edges=False):
if node in self._nodes:
self._nodes.remove(node)
if edges:
for p in set(self._preds.get(node, ())):
self.remove(p, node)
for s in set(self._succs.get(node, ())):
self.remove(node, s)
# Remove empties
for k, v in list(self._preds.items()):
if not v:
del self._preds[k]
for k, v in list(self._succs.items()):
if not v:
del self._succs[k]
def add(self, pred, succ):
assert pred != succ
self._preds.setdefault(succ, set()).add(pred)
self._succs.setdefault(pred, set()).add(succ)
def remove(self, pred, succ):
assert pred != succ
try:
preds = self._preds[succ]
succs = self._succs[pred]
except KeyError: # pragma: no cover
raise ValueError('%r not a successor of anything' % succ)
try:
preds.remove(pred)
succs.remove(succ)
except KeyError: # pragma: no cover
raise ValueError('%r not a successor of %r' % (succ, pred))
def is_step(self, step):
return (step in self._preds or step in self._succs or
step in self._nodes)
def get_steps(self, final):
if not self.is_step(final):
raise ValueError('Unknown: %r' % final)
result = []
todo = []
seen = set()
todo.append(final)
while todo:
step = todo.pop(0)
if step in seen:
# if a step was already seen,
# move it to the end (so it will appear earlier
# when reversed on return) ... but not for the
# final step, as that would be confusing for
# users
if step != final:
result.remove(step)
result.append(step)
else:
seen.add(step)
result.append(step)
preds = self._preds.get(step, ())
todo.extend(preds)
return reversed(result)
@property
def strong_connections(self):
#http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
index_counter = [0]
stack = []
lowlinks = {}
index = {}
result = []
graph = self._succs
def strongconnect(node):
# set the depth index for this node to the smallest unused index
index[node] = index_counter[0]
lowlinks[node] = index_counter[0]
index_counter[0] += 1
stack.append(node)
# Consider successors
try:
successors = graph[node]
except Exception:
successors = []
for successor in successors:
if successor not in lowlinks:
# Successor has not yet been visited
strongconnect(successor)
lowlinks[node] = min(lowlinks[node],lowlinks[successor])
elif successor in stack:
# the successor is in the stack and hence in the current
# strongly connected component (SCC)
lowlinks[node] = min(lowlinks[node],index[successor])
# If `node` is a root node, pop the stack and generate an SCC
if lowlinks[node] == index[node]:
connected_component = []
while True:
successor = stack.pop()
connected_component.append(successor)
if successor == node: break
component = tuple(connected_component)
# storing the result
result.append(component)
for node in graph:
if node not in lowlinks:
strongconnect(node)
return result
@property
def dot(self):
result = ['digraph G {']
for succ in self._preds:
preds = self._preds[succ]
for pred in preds:
result.append(' %s -> %s;' % (pred, succ))
for node in self._nodes:
result.append(' %s;' % node)
result.append('}')
return '\n'.join(result)
#
# Unarchiving functionality for zip, tar, tgz, tbz, whl
#
ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
'.tgz', '.tbz', '.whl')
def unarchive(archive_filename, dest_dir, format=None, check=True):
def check_path(path):
if not isinstance(path, text_type):
path = path.decode('utf-8')
p = os.path.abspath(os.path.join(dest_dir, path))
if not p.startswith(dest_dir) or p[plen] != os.sep:
raise ValueError('path outside destination: %r' % p)
dest_dir = os.path.abspath(dest_dir)
plen = len(dest_dir)
archive = None
if format is None:
if archive_filename.endswith(('.zip', '.whl')):
format = 'zip'
elif archive_filename.endswith(('.tar.gz', '.tgz')):
format = 'tgz'
mode = 'r:gz'
elif archive_filename.endswith(('.tar.bz2', '.tbz')):
format = 'tbz'
mode = 'r:bz2'
elif archive_filename.endswith('.tar'):
format = 'tar'
mode = 'r'
else: # pragma: no cover
raise ValueError('Unknown format for %r' % archive_filename)
try:
if format == 'zip':
archive = ZipFile(archive_filename, 'r')
if check:
names = archive.namelist()
for name in names:
check_path(name)
else:
archive = tarfile.open(archive_filename, mode)
if check:
names = archive.getnames()
for name in names:
check_path(name)
if format != 'zip' and sys.version_info[0] < 3:
# See Python issue 17153. If the dest path contains Unicode,
# tarfile extraction fails on Python 2.x if a member path name
# contains non-ASCII characters - it leads to an implicit
# bytes -> unicode conversion using ASCII to decode.
for tarinfo in archive.getmembers():
if not isinstance(tarinfo.name, text_type):
tarinfo.name = tarinfo.name.decode('utf-8')
archive.extractall(dest_dir)
finally:
if archive:
archive.close()
def zip_dir(directory):
"""zip a directory tree into a BytesIO object"""
result = io.BytesIO()
dlen = len(directory)
with ZipFile(result, "w") as zf:
for root, dirs, files in os.walk(directory):
for name in files:
full = os.path.join(root, name)
rel = root[dlen:]
dest = os.path.join(rel, name)
zf.write(full, dest)
return result
#
# Simple progress bar
#
UNITS = ('', 'K', 'M', 'G','T','P')
class Progress(object):
unknown = 'UNKNOWN'
def __init__(self, minval=0, maxval=100):
assert maxval is None or maxval >= minval
self.min = self.cur = minval
self.max = maxval
self.started = None
self.elapsed = 0
self.done = False
def update(self, curval):
assert self.min <= curval
assert self.max is None or curval <= self.max
self.cur = curval
now = time.time()
if self.started is None:
self.started = now
else:
self.elapsed = now - self.started
def increment(self, incr):
assert incr >= 0
self.update(self.cur + incr)
def start(self):
self.update(self.min)
return self
def stop(self):
if self.max is not None:
self.update(self.max)
self.done = True
@property
def maximum(self):
return self.unknown if self.max is None else self.max
@property
def percentage(self):
if self.done:
result = '100 %'
elif self.max is None:
result = ' ?? %'
else:
v = 100.0 * (self.cur - self.min) / (self.max - self.min)
result = '%3d %%' % v
return result
def format_duration(self, duration):
if (duration <= 0) and self.max is None or self.cur == self.min:
result = '??:??:??'
#elif duration < 1:
# result = '--:--:--'
else:
result = time.strftime('%H:%M:%S', time.gmtime(duration))
return result
@property
def ETA(self):
if self.done:
prefix = 'Done'
t = self.elapsed
#import pdb; pdb.set_trace()
else:
prefix = 'ETA '
if self.max is None:
t = -1
elif self.elapsed == 0 or (self.cur == self.min):
t = 0
else:
#import pdb; pdb.set_trace()
t = float(self.max - self.min)
t /= self.cur - self.min
t = (t - 1) * self.elapsed
return '%s: %s' % (prefix, self.format_duration(t))
@property
def speed(self):
if self.elapsed == 0:
result = 0.0
else:
result = (self.cur - self.min) / self.elapsed
for unit in UNITS:
if result < 1000:
break
result /= 1000.0
return '%d %sB/s' % (result, unit)
#
# Glob functionality
#
RICH_GLOB = re.compile(r'\{([^}]*)\}')
_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
def iglob(path_glob):
"""Extended globbing function that supports ** and {opt1,opt2,opt3}."""
if _CHECK_RECURSIVE_GLOB.search(path_glob):
msg = """invalid glob %r: recursive glob "**" must be used alone"""
raise ValueError(msg % path_glob)
if _CHECK_MISMATCH_SET.search(path_glob):
msg = """invalid glob %r: mismatching set marker '{' or '}'"""
raise ValueError(msg % path_glob)
return _iglob(path_glob)
def _iglob(path_glob):
rich_path_glob = RICH_GLOB.split(path_glob, 1)
if len(rich_path_glob) > 1:
assert len(rich_path_glob) == 3, rich_path_glob
prefix, set, suffix = rich_path_glob
for item in set.split(','):
for path in _iglob(''.join((prefix, item, suffix))):
yield path
else:
if '**' not in path_glob:
for item in std_iglob(path_glob):
yield item
else:
prefix, radical = path_glob.split('**', 1)
if prefix == '':
prefix = '.'
if radical == '':
radical = '*'
else:
# we support both
radical = radical.lstrip('/')
radical = radical.lstrip('\\')
for path, dir, files in os.walk(prefix):
path = os.path.normpath(path)
for fn in _iglob(os.path.join(path, radical)):
yield fn
if ssl:
from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
CertificateError)
#
# HTTPSConnection which verifies certificates/matches domains
#
class HTTPSConnection(httplib.HTTPSConnection):
ca_certs = None # set this to the path to the certs file (.pem)
check_domain = True # only used if ca_certs is not None
# noinspection PyPropertyAccess
def connect(self):
sock = socket.create_connection((self.host, self.port), self.timeout)
if getattr(self, '_tunnel_host', False):
self.sock = sock
self._tunnel()
if not hasattr(ssl, 'SSLContext'):
# For 2.x
if self.ca_certs:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
cert_reqs=cert_reqs,
ssl_version=ssl.PROTOCOL_SSLv23,
ca_certs=self.ca_certs)
else: # pragma: no cover
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.options |= ssl.OP_NO_SSLv2
if self.cert_file:
context.load_cert_chain(self.cert_file, self.key_file)
kwargs = {}
if self.ca_certs:
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(cafile=self.ca_certs)
if getattr(ssl, 'HAS_SNI', False):
kwargs['server_hostname'] = self.host
self.sock = context.wrap_socket(sock, **kwargs)
if self.ca_certs and self.check_domain:
try:
match_hostname(self.sock.getpeercert(), self.host)
logger.debug('Host verified: %s', self.host)
except CertificateError: # pragma: no cover
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
class HTTPSHandler(BaseHTTPSHandler):
def __init__(self, ca_certs, check_domain=True):
BaseHTTPSHandler.__init__(self)
self.ca_certs = ca_certs
self.check_domain = check_domain
def _conn_maker(self, *args, **kwargs):
"""
This is called to create a connection instance. Normally you'd
pass a connection class to do_open, but it doesn't actually check for
a class, and just expects a callable. As long as we behave just as a
constructor would have, we should be OK. If it ever changes so that
we *must* pass a class, we'll create an UnsafeHTTPSConnection class
which just sets check_domain to False in the class definition, and
choose which one to pass to do_open.
"""
result = HTTPSConnection(*args, **kwargs)
if self.ca_certs:
result.ca_certs = self.ca_certs
result.check_domain = self.check_domain
return result
def https_open(self, req):
try:
return self.do_open(self._conn_maker, req)
except URLError as e:
if 'certificate verify failed' in str(e.reason):
raise CertificateError('Unable to verify server certificate '
'for %s' % req.host)
else:
raise
#
# To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
# Middle proxy using HTTP listens on port 443, or an index mistakenly serves
# HTML containing a http://xyz link when it should be https://xyz),
# you can use the following handler class, which does not allow HTTP traffic.
#
# It works by inheriting from HTTPHandler - so build_opener won't add a
# handler for HTTP itself.
#
class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
def http_open(self, req):
raise URLError('Unexpected HTTP request on what should be a secure '
'connection: %s' % req)
#
# XML-RPC with timeouts
#
_ver_info = sys.version_info[:2]
if _ver_info == (2, 6):
class HTTP(httplib.HTTP):
def __init__(self, host='', port=None, **kwargs):
if port == 0: # 0 means use port 0, not the default port
port = None
self._setup(self._connection_class(host, port, **kwargs))
if ssl:
class HTTPS(httplib.HTTPS):
def __init__(self, host='', port=None, **kwargs):
if port == 0: # 0 means use port 0, not the default port
port = None
self._setup(self._connection_class(host, port, **kwargs))
class Transport(xmlrpclib.Transport):
def __init__(self, timeout, use_datetime=0):
self.timeout = timeout
xmlrpclib.Transport.__init__(self, use_datetime)
def make_connection(self, host):
h, eh, x509 = self.get_host_info(host)
if _ver_info == (2, 6):
result = HTTP(h, timeout=self.timeout)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPConnection(h)
result = self._connection[1]
return result
if ssl:
class SafeTransport(xmlrpclib.SafeTransport):
def __init__(self, timeout, use_datetime=0):
self.timeout = timeout
xmlrpclib.SafeTransport.__init__(self, use_datetime)
def make_connection(self, host):
h, eh, kwargs = self.get_host_info(host)
if not kwargs:
kwargs = {}
kwargs['timeout'] = self.timeout
if _ver_info == (2, 6):
result = HTTPS(host, None, **kwargs)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPSConnection(h, None,
**kwargs)
result = self._connection[1]
return result
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, **kwargs):
self.timeout = timeout = kwargs.pop('timeout', None)
# The above classes only come into play if a timeout
# is specified
if timeout is not None:
scheme, _ = splittype(uri)
use_datetime = kwargs.get('use_datetime', 0)
if scheme == 'https':
tcls = SafeTransport
else:
tcls = Transport
kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
self.transport = t
xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
#
# CSV functionality. This is provided because on 2.x, the csv module can't
# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
#
def _csv_open(fn, mode, **kwargs):
if sys.version_info[0] < 3:
mode += 'b'
else:
kwargs['newline'] = ''
return open(fn, mode, **kwargs)
class CSVBase(object):
defaults = {
'delimiter': str(','), # The strs are used because we need native
'quotechar': str('"'), # str in the csv API (2.x won't take
'lineterminator': str('\n') # Unicode)
}
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.stream.close()
class CSVReader(CSVBase):
def __init__(self, **kwargs):
if 'stream' in kwargs:
stream = kwargs['stream']
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getreader('utf-8')(stream)
self.stream = stream
else:
self.stream = _csv_open(kwargs['path'], 'r')
self.reader = csv.reader(self.stream, **self.defaults)
def __iter__(self):
return self
def next(self):
result = next(self.reader)
if sys.version_info[0] < 3:
for i, item in enumerate(result):
if not isinstance(item, text_type):
result[i] = item.decode('utf-8')
return result
__next__ = next
class CSVWriter(CSVBase):
def __init__(self, fn, **kwargs):
self.stream = _csv_open(fn, 'w')
self.writer = csv.writer(self.stream, **self.defaults)
def writerow(self, row):
if sys.version_info[0] < 3:
r = []
for item in row:
if isinstance(item, text_type):
item = item.encode('utf-8')
r.append(item)
row = r
self.writer.writerow(row)
#
# Configurator functionality
#
class Configurator(BaseConfigurator):
value_converters = dict(BaseConfigurator.value_converters)
value_converters['inc'] = 'inc_convert'
def __init__(self, config, base=None):
super(Configurator, self).__init__(config)
self.base = base or os.getcwd()
def configure_custom(self, config):
def convert(o):
if isinstance(o, (list, tuple)):
result = type(o)([convert(i) for i in o])
elif isinstance(o, dict):
if '()' in o:
result = self.configure_custom(o)
else:
result = {}
for k in o:
result[k] = convert(o[k])
else:
result = self.convert(o)
return result
c = config.pop('()')
if not callable(c):
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
args = config.pop('[]', ())
if args:
args = tuple([convert(o) for o in args])
items = [(k, convert(config[k])) for k in config if valid_ident(k)]
kwargs = dict(items)
result = c(*args, **kwargs)
if props:
for n, v in props.items():
setattr(result, n, convert(v))
return result
def __getitem__(self, key):
result = self.config[key]
if isinstance(result, dict) and '()' in result:
self.config[key] = result = self.configure_custom(result)
return result
def inc_convert(self, value):
"""Default converter for the inc:// protocol."""
if not os.path.isabs(value):
value = os.path.join(self.base, value)
with codecs.open(value, 'r', encoding='utf-8') as f:
result = json.load(f)
return result
#
# Mixin for running subprocesses and capturing their output
#
class SubprocessMixin(object):
def __init__(self, verbose=False, progress=None):
self.verbose = verbose
self.progress = progress
def reader(self, stream, context):
"""
Read lines from a subprocess' output stream and either pass to a progress
callable (if specified) or write progress information to sys.stderr.
"""
progress = self.progress
verbose = self.verbose
while True:
s = stream.readline()
if not s:
break
if progress is not None:
progress(s, context)
else:
if not verbose:
sys.stderr.write('.')
else:
sys.stderr.write(s.decode('utf-8'))
sys.stderr.flush()
stream.close()
def run_command(self, cmd, **kwargs):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, **kwargs)
t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
t1.start()
t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
t2.start()
p.wait()
t1.join()
t2.join()
if self.progress is not None:
self.progress('done.', 'main')
elif self.verbose:
sys.stderr.write('done.\n')
return p
def normalize_name(name):
"""Normalize a python package name a la PEP 503"""
# https://www.python.org/dev/peps/pep-0503/#normalized-names
return re.sub('[-_.]+', '-', name).lower()
| mit |
drbild/boto | tests/unit/cloudsearch/test_search.py | 114 | 13726 | #!/usr/bin env python
from tests.compat import mock, unittest
from httpretty import HTTPretty
import json
import requests
from boto.cloudsearch.search import SearchConnection, SearchServiceException
from boto.compat import six, map
HOSTNAME = "search-demo-userdomain.us-east-1.cloudsearch.amazonaws.com"
FULL_URL = 'http://%s/2011-02-01/search' % HOSTNAME
class CloudSearchSearchBaseTest(unittest.TestCase):
hits = [
{
'id': '12341',
'title': 'Document 1',
},
{
'id': '12342',
'title': 'Document 2',
},
{
'id': '12343',
'title': 'Document 3',
},
{
'id': '12344',
'title': 'Document 4',
},
{
'id': '12345',
'title': 'Document 5',
},
{
'id': '12346',
'title': 'Document 6',
},
{
'id': '12347',
'title': 'Document 7',
},
]
content_type = "text/xml"
response_status = 200
def get_args(self, requestline):
(_, request, _) = requestline.split(b" ")
(_, request) = request.split(b"?", 1)
args = six.moves.urllib.parse.parse_qs(request)
return args
def setUp(self):
HTTPretty.enable()
body = self.response
if not isinstance(body, bytes):
body = json.dumps(body).encode('utf-8')
HTTPretty.register_uri(HTTPretty.GET, FULL_URL,
body=body,
content_type=self.content_type,
status=self.response_status)
def tearDown(self):
HTTPretty.disable()
class CloudSearchSearchTest(CloudSearchSearchBaseTest):
response = {
'rank': '-text_relevance',
'match-expr': "Test",
'hits': {
'found': 30,
'start': 0,
'hit': CloudSearchSearchBaseTest.hits
},
'info': {
'rid': 'b7c167f6c2da6d93531b9a7b314ad030b3a74803b4b7797edb905ba5a6a08',
'time-ms': 2,
'cpu-time-ms': 0
}
}
def test_cloudsearch_qsearch(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test')
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'q'], [b"Test"])
self.assertEqual(args[b'start'], [b"0"])
self.assertEqual(args[b'size'], [b"10"])
def test_cloudsearch_bqsearch(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(bq="'Test'")
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'bq'], [b"'Test'"])
def test_cloudsearch_search_details(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', size=50, start=20)
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'q'], [b"Test"])
self.assertEqual(args[b'size'], [b"50"])
self.assertEqual(args[b'start'], [b"20"])
def test_cloudsearch_facet_single(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', facet=["Author"])
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'facet'], [b"Author"])
def test_cloudsearch_facet_multiple(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', facet=["author", "cat"])
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'facet'], [b"author,cat"])
def test_cloudsearch_facet_constraint_single(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(
q='Test',
facet_constraints={'author': "'John Smith','Mark Smith'"})
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'facet-author-constraints'],
[b"'John Smith','Mark Smith'"])
def test_cloudsearch_facet_constraint_multiple(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(
q='Test',
facet_constraints={'author': "'John Smith','Mark Smith'",
'category': "'News','Reviews'"})
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'facet-author-constraints'],
[b"'John Smith','Mark Smith'"])
self.assertEqual(args[b'facet-category-constraints'],
[b"'News','Reviews'"])
def test_cloudsearch_facet_sort_single(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', facet_sort={'author': 'alpha'})
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'facet-author-sort'], [b'alpha'])
def test_cloudsearch_facet_sort_multiple(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', facet_sort={'author': 'alpha',
'cat': 'count'})
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'facet-author-sort'], [b'alpha'])
self.assertEqual(args[b'facet-cat-sort'], [b'count'])
def test_cloudsearch_top_n_single(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', facet_top_n={'author': 5})
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'facet-author-top-n'], [b'5'])
def test_cloudsearch_top_n_multiple(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', facet_top_n={'author': 5, 'cat': 10})
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'facet-author-top-n'], [b'5'])
self.assertEqual(args[b'facet-cat-top-n'], [b'10'])
def test_cloudsearch_rank_single(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', rank=["date"])
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'rank'], [b'date'])
def test_cloudsearch_rank_multiple(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', rank=["date", "score"])
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'rank'], [b'date,score'])
def test_cloudsearch_result_fields_single(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', return_fields=['author'])
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'return-fields'], [b'author'])
def test_cloudsearch_result_fields_multiple(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', return_fields=['author', 'title'])
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'return-fields'], [b'author,title'])
def test_cloudsearch_t_field_single(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', t={'year': '2001..2007'})
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b't-year'], [b'2001..2007'])
def test_cloudsearch_t_field_multiple(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', t={'year': '2001..2007', 'score': '10..50'})
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b't-year'], [b'2001..2007'])
self.assertEqual(args[b't-score'], [b'10..50'])
def test_cloudsearch_results_meta(self):
"""Check returned metadata is parsed correctly"""
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test')
# These rely on the default response which is fed into HTTPretty
self.assertEqual(results.rank, "-text_relevance")
self.assertEqual(results.match_expression, "Test")
def test_cloudsearch_results_info(self):
"""Check num_pages_needed is calculated correctly"""
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test')
# This relies on the default response which is fed into HTTPretty
self.assertEqual(results.num_pages_needed, 3.0)
def test_cloudsearch_results_matched(self):
"""
Check that information objects are passed back through the API
correctly.
"""
search = SearchConnection(endpoint=HOSTNAME)
query = search.build_query(q='Test')
results = search(query)
self.assertEqual(results.search_service, search)
self.assertEqual(results.query, query)
def test_cloudsearch_results_hits(self):
"""Check that documents are parsed properly from AWS"""
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test')
hits = list(map(lambda x: x['id'], results.docs))
# This relies on the default response which is fed into HTTPretty
self.assertEqual(
hits, ["12341", "12342", "12343", "12344",
"12345", "12346", "12347"])
def test_cloudsearch_results_iterator(self):
"""Check the results iterator"""
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test')
results_correct = iter(["12341", "12342", "12343", "12344",
"12345", "12346", "12347"])
for x in results:
self.assertEqual(x['id'], next(results_correct))
def test_cloudsearch_results_internal_consistancy(self):
"""Check the documents length matches the iterator details"""
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test')
self.assertEqual(len(results), len(results.docs))
def test_cloudsearch_search_nextpage(self):
"""Check next page query is correct"""
search = SearchConnection(endpoint=HOSTNAME)
query1 = search.build_query(q='Test')
query2 = search.build_query(q='Test')
results = search(query2)
self.assertEqual(results.next_page().query.start,
query1.start + query1.size)
self.assertEqual(query1.q, query2.q)
class CloudSearchSearchFacetTest(CloudSearchSearchBaseTest):
response = {
'rank': '-text_relevance',
'match-expr': "Test",
'hits': {
'found': 30,
'start': 0,
'hit': CloudSearchSearchBaseTest.hits
},
'info': {
'rid': 'b7c167f6c2da6d93531b9a7b314ad030b3a74803b4b7797edb905ba5a6a08',
'time-ms': 2,
'cpu-time-ms': 0
},
'facets': {
'tags': {},
'animals': {'constraints': [{'count': '2', 'value': 'fish'}, {'count': '1', 'value': 'lions'}]},
}
}
def test_cloudsearch_search_facets(self):
#self.response['facets'] = {'tags': {}}
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test', facet=['tags'])
self.assertTrue('tags' not in results.facets)
self.assertEqual(results.facets['animals'], {u'lions': u'1', u'fish': u'2'})
class CloudSearchNonJsonTest(CloudSearchSearchBaseTest):
response = b'<html><body><h1>500 Internal Server Error</h1></body></html>'
response_status = 500
content_type = 'text/xml'
def test_response(self):
search = SearchConnection(endpoint=HOSTNAME)
with self.assertRaises(SearchServiceException):
search.search(q='Test')
class CloudSearchUnauthorizedTest(CloudSearchSearchBaseTest):
response = b'<html><body><h1>403 Forbidden</h1>foo bar baz</body></html>'
response_status = 403
content_type = 'text/html'
def test_response(self):
search = SearchConnection(endpoint=HOSTNAME)
with self.assertRaisesRegexp(SearchServiceException, 'foo bar baz'):
search.search(q='Test')
class FakeResponse(object):
status_code = 405
content = b''
class CloudSearchConnectionTest(unittest.TestCase):
cloudsearch = True
def setUp(self):
super(CloudSearchConnectionTest, self).setUp()
self.conn = SearchConnection(
endpoint='test-domain.cloudsearch.amazonaws.com'
)
def test_expose_additional_error_info(self):
mpo = mock.patch.object
fake = FakeResponse()
fake.content = b'Nopenopenope'
# First, in the case of a non-JSON, non-403 error.
with mpo(requests, 'get', return_value=fake) as mock_request:
with self.assertRaises(SearchServiceException) as cm:
self.conn.search(q='not_gonna_happen')
self.assertTrue('non-json response' in str(cm.exception))
self.assertTrue('Nopenopenope' in str(cm.exception))
# Then with JSON & an 'error' key within.
fake.content = json.dumps({
'error': "Something went wrong. Oops."
}).encode('utf-8')
with mpo(requests, 'get', return_value=fake) as mock_request:
with self.assertRaises(SearchServiceException) as cm:
self.conn.search(q='no_luck_here')
self.assertTrue('Unknown error' in str(cm.exception))
self.assertTrue('went wrong. Oops' in str(cm.exception))
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.