repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
odooindia/odoo | addons/base_action_rule/base_action_rule.py | 12 | 15745 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, timedelta
import time
import logging
import openerp
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
_logger = logging.getLogger(__name__)
DATE_RANGE_FUNCTION = {
'minutes': lambda interval: timedelta(minutes=interval),
'hour': lambda interval: timedelta(hours=interval),
'day': lambda interval: timedelta(days=interval),
'month': lambda interval: timedelta(months=interval),
False: lambda interval: timedelta(0),
}
def get_datetime(date_str):
'''Return a datetime from a date string or a datetime string'''
# complete date time if date_str contains only a date
if ' ' not in date_str:
date_str = date_str + " 00:00:00"
return datetime.strptime(date_str, DEFAULT_SERVER_DATETIME_FORMAT)
class base_action_rule(osv.osv):
""" Base Action Rules """
_name = 'base.action.rule'
_description = 'Action Rules'
_order = 'sequence'
_columns = {
'name': fields.char('Rule Name', required=True),
'model_id': fields.many2one('ir.model', 'Related Document Model',
required=True, domain=[('osv_memory', '=', False)]),
'model': fields.related('model_id', 'model', type="char", string='Model'),
'create_date': fields.datetime('Create Date', readonly=1),
'active': fields.boolean('Active',
help="When unchecked, the rule is hidden and will not be executed."),
'sequence': fields.integer('Sequence',
help="Gives the sequence order when displaying a list of rules."),
'kind': fields.selection(
[('on_create', 'On Creation'),
('on_write', 'On Update'),
('on_create_or_write', 'On Creation & Update'),
('on_time', 'Based on Timed Condition')],
string='When to Run'),
'trg_date_id': fields.many2one('ir.model.fields', string='Trigger Date',
help="When should the condition be triggered. If present, will be checked by the scheduler. If empty, will be checked at creation and update.",
domain="[('model_id', '=', model_id), ('ttype', 'in', ('date', 'datetime'))]"),
'trg_date_range': fields.integer('Delay after trigger date',
help="Delay after the trigger date." \
"You can put a negative number if you need a delay before the" \
"trigger date, like sending a reminder 15 minutes before a meeting."),
'trg_date_range_type': fields.selection([('minutes', 'Minutes'), ('hour', 'Hours'),
('day', 'Days'), ('month', 'Months')], 'Delay type'),
'trg_date_calendar_id': fields.many2one(
'resource.calendar', 'Use Calendar',
help='When calculating a day-based timed condition, it is possible to use a calendar to compute the date based on working days.',
ondelete='set null',
),
'act_user_id': fields.many2one('res.users', 'Set Responsible'),
'act_followers': fields.many2many("res.partner", string="Add Followers"),
'server_action_ids': fields.many2many('ir.actions.server', string='Server Actions',
domain="[('model_id', '=', model_id)]",
help="Examples: email reminders, call object service, etc."),
'filter_pre_id': fields.many2one('ir.filters', string='Before Update Filter',
ondelete='restrict',
domain="[('model_id', '=', model_id.model)]",
help="If present, this condition must be satisfied before the update of the record."),
'filter_id': fields.many2one('ir.filters', string='Filter',
ondelete='restrict',
domain="[('model_id', '=', model_id.model)]",
help="If present, this condition must be satisfied before executing the action rule."),
'last_run': fields.datetime('Last Run', readonly=1, copy=False),
}
_defaults = {
'active': True,
'trg_date_range_type': 'day',
}
def onchange_kind(self, cr, uid, ids, kind, context=None):
clear_fields = []
if kind in ['on_create', 'on_create_or_write']:
clear_fields = ['filter_pre_id', 'trg_date_id', 'trg_date_range', 'trg_date_range_type']
elif kind in ['on_write', 'on_create_or_write']:
clear_fields = ['trg_date_id', 'trg_date_range', 'trg_date_range_type']
elif kind == 'on_time':
clear_fields = ['filter_pre_id']
return {'value': dict.fromkeys(clear_fields, False)}
def _filter(self, cr, uid, action, action_filter, record_ids, context=None):
""" filter the list record_ids that satisfy the action filter """
if record_ids and action_filter:
assert action.model == action_filter.model_id, "Filter model different from action rule model"
model = self.pool[action_filter.model_id]
domain = [('id', 'in', record_ids)] + eval(action_filter.domain)
ctx = dict(context or {})
ctx.update(eval(action_filter.context))
record_ids = model.search(cr, uid, domain, context=ctx)
return record_ids
def _process(self, cr, uid, action, record_ids, context=None):
""" process the given action on the records """
model = self.pool[action.model_id.model]
# modify records
values = {}
if 'date_action_last' in model._all_columns:
values['date_action_last'] = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
if action.act_user_id and 'user_id' in model._all_columns:
values['user_id'] = action.act_user_id.id
if values:
model.write(cr, uid, record_ids, values, context=context)
if action.act_followers and hasattr(model, 'message_subscribe'):
follower_ids = map(int, action.act_followers)
model.message_subscribe(cr, uid, record_ids, follower_ids, context=context)
# execute server actions
if action.server_action_ids:
server_action_ids = map(int, action.server_action_ids)
for record in model.browse(cr, uid, record_ids, context):
action_server_obj = self.pool.get('ir.actions.server')
ctx = dict(context, active_model=model._name, active_ids=[record.id], active_id=record.id)
action_server_obj.run(cr, uid, server_action_ids, context=ctx)
return True
def _register_hook(self, cr, ids=None):
""" Wrap the methods `create` and `write` of the models specified by
the rules given by `ids` (or all existing rules if `ids` is `None`.)
"""
updated = False
if ids is None:
ids = self.search(cr, SUPERUSER_ID, [])
for action_rule in self.browse(cr, SUPERUSER_ID, ids):
model = action_rule.model_id.model
model_obj = self.pool[model]
if not hasattr(model_obj, 'base_action_ruled'):
# monkey-patch methods create and write
def create(self, cr, uid, vals, context=None, **kwargs):
# avoid loops or cascading actions
if context and context.get('action'):
return create.origin(self, cr, uid, vals, context=context)
# call original method with a modified context
context = dict(context or {}, action=True)
new_id = create.origin(self, cr, uid, vals, context=context, **kwargs)
# as it is a new record, we do not consider the actions that have a prefilter
action_model = self.pool.get('base.action.rule')
action_dom = [('model', '=', self._name),
('kind', 'in', ['on_create', 'on_create_or_write'])]
action_ids = action_model.search(cr, uid, action_dom, context=context)
# check postconditions, and execute actions on the records that satisfy them
for action in action_model.browse(cr, uid, action_ids, context=context):
if action_model._filter(cr, uid, action, action.filter_id, [new_id], context=context):
action_model._process(cr, uid, action, [new_id], context=context)
return new_id
def write(self, cr, uid, ids, vals, context=None, **kwargs):
# avoid loops or cascading actions
if context and context.get('action'):
return write.origin(self, cr, uid, ids, vals, context=context)
# modify context
context = dict(context or {}, action=True)
ids = [ids] if isinstance(ids, (int, long, str)) else ids
# retrieve the action rules to possibly execute
action_model = self.pool.get('base.action.rule')
action_dom = [('model', '=', self._name),
('kind', 'in', ['on_write', 'on_create_or_write'])]
action_ids = action_model.search(cr, uid, action_dom, context=context)
actions = action_model.browse(cr, uid, action_ids, context=context)
# check preconditions
pre_ids = {}
for action in actions:
pre_ids[action] = action_model._filter(cr, uid, action, action.filter_pre_id, ids, context=context)
# call original method
write.origin(self, cr, uid, ids, vals, context=context, **kwargs)
# check postconditions, and execute actions on the records that satisfy them
for action in actions:
post_ids = action_model._filter(cr, uid, action, action.filter_id, pre_ids[action], context=context)
if post_ids:
action_model._process(cr, uid, action, post_ids, context=context)
return True
model_obj._patch_method('create', create)
model_obj._patch_method('write', write)
model_obj.base_action_ruled = True
updated = True
return updated
def create(self, cr, uid, vals, context=None):
res_id = super(base_action_rule, self).create(cr, uid, vals, context=context)
if self._register_hook(cr, [res_id]):
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return res_id
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
super(base_action_rule, self).write(cr, uid, ids, vals, context=context)
if self._register_hook(cr, ids):
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return True
def onchange_model_id(self, cr, uid, ids, model_id, context=None):
data = {'model': False, 'filter_pre_id': False, 'filter_id': False}
if model_id:
model = self.pool.get('ir.model').browse(cr, uid, model_id, context=context)
data.update({'model': model.model})
return {'value': data}
def _check_delay(self, cr, uid, action, record, record_dt, context=None):
if action.trg_date_calendar_id and action.trg_date_range_type == 'day':
start_dt = get_datetime(record_dt)
action_dt = self.pool['resource.calendar'].schedule_days_get_date(
cr, uid, action.trg_date_calendar_id.id, action.trg_date_range,
day_date=start_dt, compute_leaves=True, context=context
)
else:
delay = DATE_RANGE_FUNCTION[action.trg_date_range_type](action.trg_date_range)
action_dt = get_datetime(record_dt) + delay
return action_dt
def _check(self, cr, uid, automatic=False, use_new_cursor=False, context=None):
""" This Function is called by scheduler. """
context = context or {}
# retrieve all the action rules to run based on a timed condition
action_dom = [('kind', '=', 'on_time')]
action_ids = self.search(cr, uid, action_dom, context=context)
for action in self.browse(cr, uid, action_ids, context=context):
now = datetime.now()
if action.last_run:
last_run = get_datetime(action.last_run)
else:
last_run = datetime.utcfromtimestamp(0)
# retrieve all the records that satisfy the action's condition
model = self.pool[action.model_id.model]
domain = []
ctx = dict(context)
if action.filter_id:
domain = eval(action.filter_id.domain)
ctx.update(eval(action.filter_id.context))
if 'lang' not in ctx:
# Filters might be language-sensitive, attempt to reuse creator lang
# as we are usually running this as super-user in background
[filter_meta] = action.filter_id.get_metadata()
user_id = filter_meta['write_uid'] and filter_meta['write_uid'][0] or \
filter_meta['create_uid'][0]
ctx['lang'] = self.pool['res.users'].browse(cr, uid, user_id).lang
record_ids = model.search(cr, uid, domain, context=ctx)
# determine when action should occur for the records
date_field = action.trg_date_id.name
if date_field == 'date_action_last' and 'create_date' in model._all_columns:
get_record_dt = lambda record: record[date_field] or record.create_date
else:
get_record_dt = lambda record: record[date_field]
# process action on the records that should be executed
for record in model.browse(cr, uid, record_ids, context=context):
record_dt = get_record_dt(record)
if not record_dt:
continue
action_dt = self._check_delay(cr, uid, action, record, record_dt, context=context)
if last_run <= action_dt < now:
try:
context = dict(context or {}, action=True)
self._process(cr, uid, action, [record.id], context=context)
except Exception:
import traceback
_logger.error(traceback.format_exc())
action.write({'last_run': now.strftime(DEFAULT_SERVER_DATETIME_FORMAT)})
if automatic:
# auto-commit for batch processing
cr.commit()
| agpl-3.0 |
Yannig/ansible | lib/ansible/modules/web_infrastructure/ansible_tower/tower_inventory.py | 15 | 5168 | #!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_inventory
version_added: "2.3"
author: "Wayne Witzel III (@wwitzel3)"
short_description: create, update, or destroy Ansible Tower inventory.
description:
- Create, update, or destroy Ansible Tower inventories. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- The name to use for the inventory.
required: True
description:
description:
- The description to use for the inventory.
required: False
default: null
organization:
description:
- Organization the inventory belongs to.
required: True
variables:
description:
- Inventory variables. Use '@' to get from file.
required: False
default: null
state:
description:
- Desired state of the resource.
required: False
default: "present"
choices: ["present", "absent"]
tower_host:
description:
- URL to your Tower instance.
required: False
default: null
tower_username:
description:
- Username for your Tower instance.
required: False
default: null
tower_password:
description:
- Password for your Tower instance.
required: False
default: null
tower_verify_ssl:
description:
- Dis/allow insecure connections to Tower. If C(no), SSL certificates will not be validated.
This should only be used on personally controlled sites using self-signed certificates.
required: False
default: True
tower_config_file:
description:
- Path to the Tower config file. See notes.
required: False
default: null
requirements:
- "python >= 2.6"
- "ansible-tower-cli >= 3.0.3"
notes:
- If no I(config_file) is provided we will attempt to use the tower-cli library
defaults to find your Tower host information.
- I(config_file) should contain Tower configuration in the following format
host=hostname
username=username
password=password
'''
EXAMPLES = '''
- name: Add tower inventory
tower_inventory:
name: "Foo Inventory"
description: "Our Foo Cloud Servers"
organization: "Bar Org"
state: present
tower_config_file: "~/tower_cli.cfg"
'''
try:
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
from ansible.module_utils.ansible_tower import tower_auth_config, tower_check_mode
HAS_TOWER_CLI = True
except ImportError:
HAS_TOWER_CLI = False
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
description=dict(),
organization=dict(required=True),
variables=dict(),
tower_host=dict(),
tower_username=dict(),
tower_password=dict(no_log=True),
tower_verify_ssl=dict(type='bool', default=True),
tower_config_file=dict(type='path'),
state=dict(choices=['present', 'absent'], default='present'),
),
supports_check_mode=True
)
if not HAS_TOWER_CLI:
module.fail_json(msg='ansible-tower-cli required for this module')
name = module.params.get('name')
description = module.params.get('description')
organization = module.params.get('organization')
variables = module.params.get('variables')
state = module.params.get('state')
json_output = {'inventory': name, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
inventory = tower_cli.get_resource('inventory')
try:
org_res = tower_cli.get_resource('organization')
org = org_res.get(name=organization)
if state == 'present':
result = inventory.modify(name=name, organization=org['id'], variables=variables,
description=description, create_on_missing=True)
json_output['id'] = result['id']
elif state == 'absent':
result = inventory.delete(name=name, organization=org['id'])
except (exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update inventory, organization not found: {0}'.format(excinfo), changed=False)
except (exc.ConnectionError, exc.BadRequest) as excinfo:
module.fail_json(msg='Failed to update inventory: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
| gpl-3.0 |
nicebug/SuckerTest | web/wsgiserver/ssl_pyopenssl.py | 177 | 9605 | """A library for integrating pyOpenSSL with CherryPy.
The OpenSSL module must be importable for SSL functionality.
You can obtain it from http://pyopenssl.sourceforge.net/
To use this module, set CherryPyWSGIServer.ssl_adapter to an instance of
SSLAdapter. There are two ways to use SSL:
Method One
----------
* ``ssl_adapter.context``: an instance of SSL.Context.
If this is not None, it is assumed to be an SSL.Context instance,
and will be passed to SSL.Connection on bind(). The developer is
responsible for forming a valid Context object. This approach is
to be preferred for more flexibility, e.g. if the cert and key are
streams instead of files, or need decryption, or SSL.SSLv3_METHOD
is desired instead of the default SSL.SSLv23_METHOD, etc. Consult
the pyOpenSSL documentation for complete options.
Method Two (shortcut)
---------------------
* ``ssl_adapter.certificate``: the filename of the server SSL certificate.
* ``ssl_adapter.private_key``: the filename of the server's private key file.
Both are None by default. If ssl_adapter.context is None, but .private_key
and .certificate are both given and valid, they will be read, and the
context will be automatically created from them.
"""
import socket
import threading
import time
from cherrypy import wsgiserver
try:
from OpenSSL import SSL
from OpenSSL import crypto
except ImportError:
SSL = None
class SSL_fileobject(wsgiserver.CP_fileobject):
"""SSL file object attached to a socket object."""
ssl_timeout = 3
ssl_retry = .01
def _safe_call(self, is_reader, call, *args, **kwargs):
"""Wrap the given call with SSL error-trapping.
is_reader: if False EOF errors will be raised. If True, EOF errors
will return "" (to emulate normal sockets).
"""
start = time.time()
while True:
try:
return call(*args, **kwargs)
except SSL.WantReadError:
# Sleep and try again. This is dangerous, because it means
# the rest of the stack has no way of differentiating
# between a "new handshake" error and "client dropped".
# Note this isn't an endless loop: there's a timeout below.
time.sleep(self.ssl_retry)
except SSL.WantWriteError:
time.sleep(self.ssl_retry)
except SSL.SysCallError, e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return ""
errnum = e.args[0]
if is_reader and errnum in wsgiserver.socket_errors_to_ignore:
return ""
raise socket.error(errnum)
except SSL.Error, e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return ""
thirdarg = None
try:
thirdarg = e.args[0][0][2]
except IndexError:
pass
if thirdarg == 'http request':
# The client is talking HTTP to an HTTPS server.
raise wsgiserver.NoSSLError()
raise wsgiserver.FatalSSLAlert(*e.args)
except:
raise
if time.time() - start > self.ssl_timeout:
raise socket.timeout("timed out")
def recv(self, *args, **kwargs):
buf = []
r = super(SSL_fileobject, self).recv
while True:
data = self._safe_call(True, r, *args, **kwargs)
buf.append(data)
p = self._sock.pending()
if not p:
return "".join(buf)
def sendall(self, *args, **kwargs):
return self._safe_call(False, super(SSL_fileobject, self).sendall,
*args, **kwargs)
def send(self, *args, **kwargs):
return self._safe_call(False, super(SSL_fileobject, self).send,
*args, **kwargs)
class SSLConnection:
"""A thread-safe wrapper for an SSL.Connection.
``*args``: the arguments to create the wrapped ``SSL.Connection(*args)``.
"""
def __init__(self, *args):
self._ssl_conn = SSL.Connection(*args)
self._lock = threading.RLock()
for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read',
'renegotiate', 'bind', 'listen', 'connect', 'accept',
'setblocking', 'fileno', 'close', 'get_cipher_list',
'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
'makefile', 'get_app_data', 'set_app_data', 'state_string',
'sock_shutdown', 'get_peer_certificate', 'want_read',
'want_write', 'set_connect_state', 'set_accept_state',
'connect_ex', 'sendall', 'settimeout', 'gettimeout'):
exec("""def %s(self, *args):
self._lock.acquire()
try:
return self._ssl_conn.%s(*args)
finally:
self._lock.release()
""" % (f, f))
def shutdown(self, *args):
self._lock.acquire()
try:
# pyOpenSSL.socket.shutdown takes no args
return self._ssl_conn.shutdown()
finally:
self._lock.release()
class pyOpenSSLAdapter(wsgiserver.SSLAdapter):
"""A wrapper for integrating pyOpenSSL with CherryPy."""
context = None
"""An instance of SSL.Context."""
certificate = None
"""The filename of the server SSL certificate."""
private_key = None
"""The filename of the server's private key file."""
certificate_chain = None
"""Optional. The filename of CA's intermediate certificate bundle.
This is needed for cheaper "chained root" SSL certificates, and should be
left as None if not required."""
def __init__(self, certificate, private_key, certificate_chain=None):
if SSL is None:
raise ImportError("You must install pyOpenSSL to use HTTPS.")
self.context = None
self.certificate = certificate
self.private_key = private_key
self.certificate_chain = certificate_chain
self._environ = None
def bind(self, sock):
"""Wrap and return the given socket."""
if self.context is None:
self.context = self.get_context()
conn = SSLConnection(self.context, sock)
self._environ = self.get_environ()
return conn
def wrap(self, sock):
"""Wrap and return the given socket, plus WSGI environ entries."""
return sock, self._environ.copy()
def get_context(self):
"""Return an SSL.Context from self attributes."""
# See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473
c = SSL.Context(SSL.SSLv23_METHOD)
c.use_privatekey_file(self.private_key)
if self.certificate_chain:
c.load_verify_locations(self.certificate_chain)
c.use_certificate_file(self.certificate)
return c
def get_environ(self):
"""Return WSGI environ entries to be merged into each request."""
ssl_environ = {
"HTTPS": "on",
# pyOpenSSL doesn't provide access to any of these AFAICT
## 'SSL_PROTOCOL': 'SSLv2',
## SSL_CIPHER string The cipher specification name
## SSL_VERSION_INTERFACE string The mod_ssl program version
## SSL_VERSION_LIBRARY string The OpenSSL program version
}
if self.certificate:
# Server certificate attributes
cert = open(self.certificate, 'rb').read()
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
ssl_environ.update({
'SSL_SERVER_M_VERSION': cert.get_version(),
'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
## 'SSL_SERVER_V_START': Validity of server's certificate (start time),
## 'SSL_SERVER_V_END': Validity of server's certificate (end time),
})
for prefix, dn in [("I", cert.get_issuer()),
("S", cert.get_subject())]:
# X509Name objects don't seem to have a way to get the
# complete DN string. Use str() and slice it instead,
# because str(dn) == "<X509Name object '/C=US/ST=...'>"
dnstr = str(dn)[18:-2]
wsgikey = 'SSL_SERVER_%s_DN' % prefix
ssl_environ[wsgikey] = dnstr
# The DN should be of the form: /k1=v1/k2=v2, but we must allow
# for any value to contain slashes itself (in a URL).
while dnstr:
pos = dnstr.rfind("=")
dnstr, value = dnstr[:pos], dnstr[pos + 1:]
pos = dnstr.rfind("/")
dnstr, key = dnstr[:pos], dnstr[pos + 1:]
if key and value:
wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
ssl_environ[wsgikey] = value
return ssl_environ
def makefile(self, sock, mode='r', bufsize=-1):
if SSL and isinstance(sock, SSL.ConnectionType):
timeout = sock.gettimeout()
f = SSL_fileobject(sock, mode, bufsize)
f.ssl_timeout = timeout
return f
else:
return wsgiserver.CP_fileobject(sock, mode, bufsize)
| mit |
scorphus/passpie | passpie/database.py | 2 | 4638 | from datetime import datetime
import logging
import os
import shutil
from tinydb import TinyDB, Storage, where, Query
import yaml
from .utils import mkdir_open
from .history import Repository
from .credential import split_fullname, make_fullname
class PasspieStorage(Storage):
extension = ".pass"
def __init__(self, path):
super(PasspieStorage, self).__init__()
self.path = path
def make_credpath(self, name, login):
dirname, filename = name, login + self.extension
credpath = os.path.join(self.path, dirname, filename)
return credpath
def delete(self, credentials):
for cred in credentials:
credpath = self.make_credpath(cred["name"], cred["login"])
os.remove(credpath)
if not os.listdir(os.path.dirname(credpath)):
shutil.rmtree(os.path.dirname(credpath))
def read(self):
elements = []
for rootdir, dirs, files in os.walk(self.path):
filenames = [f for f in files if f.endswith(self.extension)]
for filename in filenames:
docpath = os.path.join(rootdir, filename)
with open(docpath) as f:
elements.append(yaml.load(f.read()))
return {"_default":
{idx: elem for idx, elem in enumerate(elements, start=1)}}
def write(self, data):
deleted = [c for c in self.read()["_default"].values()
if c not in data["_default"].values()]
self.delete(deleted)
for eid, cred in data["_default"].items():
credpath = self.make_credpath(cred["name"], cred["login"])
with mkdir_open(credpath, "w") as f:
f.write(yaml.safe_dump(dict(cred), default_flow_style=False))
class Database(TinyDB):
def __init__(self, config, storage=PasspieStorage):
self.config = config
self.path = config['path']
self.repo = Repository(self.path,
autopull=config.get('autopull'),
autopush=config.get('autopush'))
PasspieStorage.extension = config['extension']
super(Database, self).__init__(self.path, storage=storage)
def has_keys(self):
return os.path.exists(os.path.join(self.path, '.keys'))
def filename(self, fullname):
login, name = split_fullname(fullname)
return self._storage.make_credpath(name=name, login=login)
def credential(self, fullname):
login, name = split_fullname(fullname)
Credential = Query()
if login is None:
creds = self.get(Credential.name == name)
else:
creds = self.get((Credential.login == login) & (Credential.name == name))
return creds
def add(self, fullname, password, comment):
login, name = split_fullname(fullname)
if login is None:
logging.error('Cannot add credential with empty login. use "@<name>" syntax')
return None
credential = dict(fullname=fullname,
name=name,
login=login,
password=password,
comment=comment,
modified=datetime.now())
self.insert(credential)
return credential
def update(self, fullname, values):
login, name = split_fullname(fullname)
values['fullname'] = make_fullname(values["login"], values["name"])
values['modified'] = datetime.now()
Credential = Query()
if login is None:
query = (Credential.name == name)
else:
query = ((Credential.login == login) & (Credential.name == name))
self.table().update(values, query)
def credentials(self, fullname=None):
if fullname:
login, name = split_fullname(fullname)
Credential = Query()
if login is None:
creds = self.search(Credential.name == name)
else:
creds = self.search((Credential.login == login) & (Credential.name == name))
else:
creds = self.all()
return sorted(creds, key=lambda x: x["name"] + x["login"])
def remove(self, fullname):
self.table().remove(where('fullname') == fullname)
def matches(self, regex):
Credential = Query()
credentials = self.search(
Credential.name.matches(regex) |
Credential.login.matches(regex) |
Credential.comment.matches(regex)
)
return sorted(credentials, key=lambda x: x["name"] + x["login"])
| mit |
bharatsingh430/py-R-FCN-multiGPU | lib/datasets/voc_eval.py | 12 | 6938 | # --------------------------------------------------------
# Fast/er R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Bharath Hariharan
# --------------------------------------------------------
import xml.etree.ElementTree as ET
import os
import cPickle
import numpy as np
def parse_rec(filename):
""" Parse a PASCAL VOC xml file """
tree = ET.parse(filename)
objects = []
for obj in tree.findall('object'):
obj_struct = {}
obj_struct['name'] = obj.find('name').text
obj_struct['pose'] = obj.find('pose').text
obj_struct['truncated'] = int(obj.find('truncated').text)
obj_struct['difficult'] = int(obj.find('difficult').text)
bbox = obj.find('bndbox')
obj_struct['bbox'] = [int(bbox.find('xmin').text),
int(bbox.find('ymin').text),
int(bbox.find('xmax').text),
int(bbox.find('ymax').text)]
objects.append(obj_struct)
return objects
def voc_ap(rec, prec, use_07_metric=False):
""" ap = voc_ap(rec, prec, [use_07_metric])
Compute VOC AP given precision and recall.
If use_07_metric is true, uses the
VOC 07 11 point method (default:False).
"""
if use_07_metric:
# 11 point metric
ap = 0.
for t in np.arange(0., 1.1, 0.1):
if np.sum(rec >= t) == 0:
p = 0
else:
p = np.max(prec[rec >= t])
ap = ap + p / 11.
else:
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def voc_eval(detpath,
annopath,
imagesetfile,
classname,
cachedir,
ovthresh=0.5,
use_07_metric=False):
"""rec, prec, ap = voc_eval(detpath,
annopath,
imagesetfile,
classname,
[ovthresh],
[use_07_metric])
Top level function that does the PASCAL VOC evaluation.
detpath: Path to detections
detpath.format(classname) should produce the detection results file.
annopath: Path to annotations
annopath.format(imagename) should be the xml annotations file.
imagesetfile: Text file containing the list of images, one image per line.
classname: Category name (duh)
cachedir: Directory for caching the annotations
[ovthresh]: Overlap threshold (default = 0.5)
[use_07_metric]: Whether to use VOC07's 11 point AP computation
(default False)
"""
# assumes detections are in detpath.format(classname)
# assumes annotations are in annopath.format(imagename)
# assumes imagesetfile is a text file with each line an image name
# cachedir caches the annotations in a pickle file
# first load gt
if not os.path.isdir(cachedir):
os.mkdir(cachedir)
cachefile = os.path.join(cachedir, 'annots.pkl')
# read list of images
with open(imagesetfile, 'r') as f:
lines = f.readlines()
imagenames = [x.strip() for x in lines]
if not os.path.isfile(cachefile):
# load annots
recs = {}
for i, imagename in enumerate(imagenames):
recs[imagename] = parse_rec(annopath.format(imagename))
if i % 100 == 0:
print 'Reading annotation for {:d}/{:d}'.format(
i + 1, len(imagenames))
# save
print 'Saving cached annotations to {:s}'.format(cachefile)
with open(cachefile, 'w') as f:
cPickle.dump(recs, f)
else:
# load
with open(cachefile, 'r') as f:
recs = cPickle.load(f)
# extract gt objects for this class
class_recs = {}
npos = 0
for imagename in imagenames:
R = [obj for obj in recs[imagename] if obj['name'] == classname]
bbox = np.array([x['bbox'] for x in R])
difficult = np.array([x['difficult'] for x in R]).astype(np.bool)
det = [False] * len(R)
npos = npos + sum(~difficult)
class_recs[imagename] = {'bbox': bbox,
'difficult': difficult,
'det': det}
# read dets
detfile = detpath.format(classname)
with open(detfile, 'r') as f:
lines = f.readlines()
splitlines = [x.strip().split(' ') for x in lines]
image_ids = [x[0] for x in splitlines]
confidence = np.array([float(x[1]) for x in splitlines])
BB = np.array([[float(z) for z in x[2:]] for x in splitlines])
# sort by confidence
sorted_ind = np.argsort(-confidence)
sorted_scores = np.sort(-confidence)
BB = BB[sorted_ind, :]
image_ids = [image_ids[x] for x in sorted_ind]
# go down dets and mark TPs and FPs
nd = len(image_ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
for d in range(nd):
R = class_recs[image_ids[d]]
bb = BB[d, :].astype(float)
ovmax = -np.inf
BBGT = R['bbox'].astype(float)
if BBGT.size > 0:
# compute overlaps
# intersection
ixmin = np.maximum(BBGT[:, 0], bb[0])
iymin = np.maximum(BBGT[:, 1], bb[1])
ixmax = np.minimum(BBGT[:, 2], bb[2])
iymax = np.minimum(BBGT[:, 3], bb[3])
iw = np.maximum(ixmax - ixmin + 1., 0.)
ih = np.maximum(iymax - iymin + 1., 0.)
inters = iw * ih
# union
uni = ((bb[2] - bb[0] + 1.) * (bb[3] - bb[1] + 1.) +
(BBGT[:, 2] - BBGT[:, 0] + 1.) *
(BBGT[:, 3] - BBGT[:, 1] + 1.) - inters)
overlaps = inters / uni
ovmax = np.max(overlaps)
jmax = np.argmax(overlaps)
if ovmax > ovthresh:
if not R['difficult'][jmax]:
if not R['det'][jmax]:
tp[d] = 1.
R['det'][jmax] = 1
else:
fp[d] = 1.
else:
fp[d] = 1.
# compute precision recall
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / float(npos)
# avoid divide by zero in case the first detection matches a difficult
# ground truth
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = voc_ap(rec, prec, use_07_metric)
return rec, prec, ap
| mit |
SnappleCap/oh-mainline | vendor/packages/gdata/src/gdata/alt/appengine.py | 77 | 11666 | #!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides HTTP functions for gdata.service to use on Google App Engine
AppEngineHttpClient: Provides an HTTP request method which uses App Engine's
urlfetch API. Set the http_client member of a GDataService object to an
instance of an AppEngineHttpClient to allow the gdata library to run on
Google App Engine.
run_on_appengine: Function which will modify an existing GDataService object
to allow it to run on App Engine. It works by creating a new instance of
the AppEngineHttpClient and replacing the GDataService object's
http_client.
"""
__author__ = 'api.jscudder (Jeff Scudder)'
import StringIO
import pickle
import atom.http_interface
import atom.token_store
from google.appengine.api import urlfetch
from google.appengine.ext import db
from google.appengine.api import users
from google.appengine.api import memcache
def run_on_appengine(gdata_service, store_tokens=True,
single_user_mode=False, deadline=None):
"""Modifies a GDataService object to allow it to run on App Engine.
Args:
gdata_service: An instance of AtomService, GDataService, or any
of their subclasses which has an http_client member and a
token_store member.
store_tokens: Boolean, defaults to True. If True, the gdata_service
will attempt to add each token to it's token_store when
SetClientLoginToken or SetAuthSubToken is called. If False
the tokens will not automatically be added to the
token_store.
single_user_mode: Boolean, defaults to False. If True, the current_token
member of gdata_service will be set when
SetClientLoginToken or SetAuthTubToken is called. If set
to True, the current_token is set in the gdata_service
and anyone who accesses the object will use the same
token.
Note: If store_tokens is set to False and
single_user_mode is set to False, all tokens will be
ignored, since the library assumes: the tokens should not
be stored in the datastore and they should not be stored
in the gdata_service object. This will make it
impossible to make requests which require authorization.
deadline: int (optional) The number of seconds to wait for a response
before timing out on the HTTP request. If no deadline is
specified, the deafault deadline for HTTP requests from App
Engine is used. The maximum is currently 10 (for 10 seconds).
The default deadline for App Engine is 5 seconds.
"""
gdata_service.http_client = AppEngineHttpClient(deadline=deadline)
gdata_service.token_store = AppEngineTokenStore()
gdata_service.auto_store_tokens = store_tokens
gdata_service.auto_set_current_token = single_user_mode
return gdata_service
class AppEngineHttpClient(atom.http_interface.GenericHttpClient):
def __init__(self, headers=None, deadline=None):
self.debug = False
self.headers = headers or {}
self.deadline = deadline
def request(self, operation, url, data=None, headers=None):
"""Performs an HTTP call to the server, supports GET, POST, PUT, and
DELETE.
Usage example, perform and HTTP GET on http://www.google.com/:
import atom.http
client = atom.http.HttpClient()
http_response = client.request('GET', 'http://www.google.com/')
Args:
operation: str The HTTP operation to be performed. This is usually one
of 'GET', 'POST', 'PUT', or 'DELETE'
data: filestream, list of parts, or other object which can be converted
to a string. Should be set to None when performing a GET or DELETE.
If data is a file-like object which can be read, this method will
read a chunk of 100K bytes at a time and send them.
If the data is a list of parts to be sent, each part will be
evaluated and sent.
url: The full URL to which the request should be sent. Can be a string
or atom.url.Url.
headers: dict of strings. HTTP headers which should be sent
in the request.
"""
all_headers = self.headers.copy()
if headers:
all_headers.update(headers)
# Construct the full payload.
# Assume that data is None or a string.
data_str = data
if data:
if isinstance(data, list):
# If data is a list of different objects, convert them all to strings
# and join them together.
converted_parts = [_convert_data_part(x) for x in data]
data_str = ''.join(converted_parts)
else:
data_str = _convert_data_part(data)
# If the list of headers does not include a Content-Length, attempt to
# calculate it based on the data object.
if data and 'Content-Length' not in all_headers:
all_headers['Content-Length'] = str(len(data_str))
# Set the content type to the default value if none was set.
if 'Content-Type' not in all_headers:
all_headers['Content-Type'] = 'application/atom+xml'
# Lookup the urlfetch operation which corresponds to the desired HTTP verb.
if operation == 'GET':
method = urlfetch.GET
elif operation == 'POST':
method = urlfetch.POST
elif operation == 'PUT':
method = urlfetch.PUT
elif operation == 'DELETE':
method = urlfetch.DELETE
else:
method = None
if self.deadline is None:
return HttpResponse(urlfetch.Fetch(url=str(url), payload=data_str,
method=method, headers=all_headers, follow_redirects=False))
return HttpResponse(urlfetch.Fetch(url=str(url), payload=data_str,
method=method, headers=all_headers, follow_redirects=False,
deadline=self.deadline))
def _convert_data_part(data):
if not data or isinstance(data, str):
return data
elif hasattr(data, 'read'):
# data is a file like object, so read it completely.
return data.read()
# The data object was not a file.
# Try to convert to a string and send the data.
return str(data)
class HttpResponse(object):
"""Translates a urlfetch resoinse to look like an hhtplib resoinse.
Used to allow the resoinse from HttpRequest to be usable by gdata.service
methods.
"""
def __init__(self, urlfetch_response):
self.body = StringIO.StringIO(urlfetch_response.content)
self.headers = urlfetch_response.headers
self.status = urlfetch_response.status_code
self.reason = ''
def read(self, length=None):
if not length:
return self.body.read()
else:
return self.body.read(length)
def getheader(self, name):
if not self.headers.has_key(name):
return self.headers[name.lower()]
return self.headers[name]
class TokenCollection(db.Model):
"""Datastore Model which associates auth tokens with the current user."""
user = db.UserProperty()
pickled_tokens = db.BlobProperty()
class AppEngineTokenStore(atom.token_store.TokenStore):
"""Stores the user's auth tokens in the App Engine datastore.
Tokens are only written to the datastore if a user is signed in (if
users.get_current_user() returns a user object).
"""
def __init__(self):
self.user = None
def add_token(self, token):
"""Associates the token with the current user and stores it.
If there is no current user, the token will not be stored.
Returns:
False if the token was not stored.
"""
tokens = load_auth_tokens(self.user)
if not hasattr(token, 'scopes') or not token.scopes:
return False
for scope in token.scopes:
tokens[str(scope)] = token
key = save_auth_tokens(tokens, self.user)
if key:
return True
return False
def find_token(self, url):
"""Searches the current user's collection of token for a token which can
be used for a request to the url.
Returns:
The stored token which belongs to the current user and is valid for the
desired URL. If there is no current user, or there is no valid user
token in the datastore, a atom.http_interface.GenericToken is returned.
"""
if url is None:
return None
if isinstance(url, (str, unicode)):
url = atom.url.parse_url(url)
tokens = load_auth_tokens(self.user)
if url in tokens:
token = tokens[url]
if token.valid_for_scope(url):
return token
else:
del tokens[url]
save_auth_tokens(tokens, self.user)
for scope, token in tokens.iteritems():
if token.valid_for_scope(url):
return token
return atom.http_interface.GenericToken()
def remove_token(self, token):
"""Removes the token from the current user's collection in the datastore.
Returns:
False if the token was not removed, this could be because the token was
not in the datastore, or because there is no current user.
"""
token_found = False
scopes_to_delete = []
tokens = load_auth_tokens(self.user)
for scope, stored_token in tokens.iteritems():
if stored_token == token:
scopes_to_delete.append(scope)
token_found = True
for scope in scopes_to_delete:
del tokens[scope]
if token_found:
save_auth_tokens(tokens, self.user)
return token_found
def remove_all_tokens(self):
"""Removes all of the current user's tokens from the datastore."""
save_auth_tokens({}, self.user)
def save_auth_tokens(token_dict, user=None):
"""Associates the tokens with the current user and writes to the datastore.
If there us no current user, the tokens are not written and this function
returns None.
Returns:
The key of the datastore entity containing the user's tokens, or None if
there was no current user.
"""
if user is None:
user = users.get_current_user()
if user is None:
return None
memcache.set('gdata_pickled_tokens:%s' % user, pickle.dumps(token_dict))
user_tokens = TokenCollection.all().filter('user =', user).get()
if user_tokens:
user_tokens.pickled_tokens = pickle.dumps(token_dict)
return user_tokens.put()
else:
user_tokens = TokenCollection(
user=user,
pickled_tokens=pickle.dumps(token_dict))
return user_tokens.put()
def load_auth_tokens(user=None):
"""Reads a dictionary of the current user's tokens from the datastore.
If there is no current user (a user is not signed in to the app) or the user
does not have any tokens, an empty dictionary is returned.
"""
if user is None:
user = users.get_current_user()
if user is None:
return {}
pickled_tokens = memcache.get('gdata_pickled_tokens:%s' % user)
if pickled_tokens:
return pickle.loads(pickled_tokens)
user_tokens = TokenCollection.all().filter('user =', user).get()
if user_tokens:
memcache.set('gdata_pickled_tokens:%s' % user, user_tokens.pickled_tokens)
return pickle.loads(user_tokens.pickled_tokens)
return {}
| agpl-3.0 |
JKatzwinkel/mps-youtube | mps_youtube/c.py | 1 | 1141 | """ Module for holding colour code values. """
import os
import re
import sys
try:
# pylint: disable=F0401
from colorama import Fore, Style
has_colorama = True
except ImportError:
has_colorama = False
mswin = os.name == "nt"
if mswin and has_colorama:
white = Style.RESET_ALL
ul = Style.DIM + Fore.YELLOW
red, green, yellow = Fore.RED, Fore.GREEN, Fore.YELLOW
blue, pink = Fore.CYAN, Fore.MAGENTA
elif mswin:
ul = red = green = yellow = blue = pink = white = ""
elif sys.stdout.isatty():
white = "\x1b[%sm" % 0
ul = "\x1b[%sm" * 3 % (2, 4, 33)
cols = ["\x1b[%sm" % n for n in range(91, 96)]
red, green, yellow, blue, pink = cols
else:
ul = red = green = yellow = blue = pink = white = ""
r, g, y, b, p, w = red, green, yellow, blue, pink, white
ansirx = re.compile(r'\x1b\[\d*m', re.UNICODE)
def c(colour, text):
""" Return coloured text. """
colours = {'r': r, 'g': g, 'y': y, 'b':b, 'p':p}
return colours[colour] + text + w
def charcount(s):
""" Return number of characters in string, with ANSI color codes excluded. """
return len(ansirx.sub('', s))
| gpl-3.0 |
b0ttl3z/SickRage | lib/twilio/rest/resources/recordings.py | 23 | 1755 | from .util import normalize_dates
from .transcriptions import Transcriptions
from .base import InstanceResource, ListResource
class Recording(InstanceResource):
subresources = [Transcriptions]
def __init__(self, *args, **kwargs):
super(Recording, self).__init__(*args, **kwargs)
self.formats = {
"mp3": self.uri + ".mp3",
"wav": self.uri + ".wav",
}
def delete(self):
"""
Delete this recording
"""
return self.delete_instance()
class Recordings(ListResource):
name = "Recordings"
instance = Recording
@normalize_dates
def list(self, before=None, after=None, **kwargs):
"""
Returns a page of :class:`Recording` resources as a list.
For paging information see :class:`ListResource`.
:param date after: Only list recordings logged after this datetime
:param date before: Only list recordings logger before this datetime
:param call_sid: Only list recordings from this :class:`Call`
"""
kwargs["DateCreated<"] = before
kwargs["DateCreated>"] = after
return self.get_instances(kwargs)
@normalize_dates
def iter(self, before=None, after=None, **kwargs):
"""
Returns an iterator of :class:`Recording` resources.
:param date after: Only list recordings logged after this datetime
:param date before: Only list recordings logger before this datetime
"""
kwargs["DateCreated<"] = before
kwargs["DateCreated>"] = after
return super(Recordings, self).iter(**kwargs)
def delete(self, sid):
"""
Delete the given recording
"""
return self.delete_instance(sid)
| gpl-3.0 |
mhbu50/erpnext | erpnext/crm/report/lead_owner_efficiency/lead_owner_efficiency.py | 3 | 1463 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from erpnext.crm.report.campaign_efficiency.campaign_efficiency import get_lead_data
def execute(filters=None):
columns, data = [], []
columns=get_columns()
data=get_lead_data(filters, "Lead Owner")
return columns, data
def get_columns():
return [
{
"fieldname": "lead_owner",
"label": _("Lead Owner"),
"fieldtype": "Link",
"options": "User",
"width": "130"
},
{
"fieldname": "lead_count",
"label": _("Lead Count"),
"fieldtype": "Int",
"width": "80"
},
{
"fieldname": "opp_count",
"label": _("Opp Count"),
"fieldtype": "Int",
"width": "80"
},
{
"fieldname": "quot_count",
"label": _("Quot Count"),
"fieldtype": "Int",
"width": "80"
},
{
"fieldname": "order_count",
"label": _("Order Count"),
"fieldtype": "Int",
"width": "100"
},
{
"fieldname": "order_value",
"label": _("Order Value"),
"fieldtype": "Float",
"width": "100"
},
{
"fieldname": "opp_lead",
"label": _("Opp/Lead %"),
"fieldtype": "Float",
"width": "100"
},
{
"fieldname": "quot_lead",
"label": _("Quot/Lead %"),
"fieldtype": "Float",
"width": "100"
},
{
"fieldname": "order_quot",
"label": _("Order/Quot %"),
"fieldtype": "Float",
"width": "100"
}
]
| gpl-3.0 |
craynot/django | tests/custom_pk/models.py | 282 | 1272 | # -*- coding: utf-8 -*-
"""
Using a custom primary key
By default, Django adds an ``"id"`` field to each model. But you can override
this behavior by explicitly adding ``primary_key=True`` to a field.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from .fields import MyAutoField
@python_2_unicode_compatible
class Employee(models.Model):
employee_code = models.IntegerField(primary_key=True, db_column='code')
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
class Meta:
ordering = ('last_name', 'first_name')
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
@python_2_unicode_compatible
class Business(models.Model):
name = models.CharField(max_length=20, primary_key=True)
employees = models.ManyToManyField(Employee)
class Meta:
verbose_name_plural = 'businesses'
def __str__(self):
return self.name
@python_2_unicode_compatible
class Bar(models.Model):
id = MyAutoField(primary_key=True, db_index=True)
def __str__(self):
return repr(self.pk)
class Foo(models.Model):
bar = models.ForeignKey(Bar, models.CASCADE)
| bsd-3-clause |
boshnivolo/TIY-Assignments | node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/input_test.py | 1841 | 3207 | #!/usr/bin/env python
# Copyright 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the input.py file."""
import gyp.input
import unittest
import sys
class TestFindCycles(unittest.TestCase):
def setUp(self):
self.nodes = {}
for x in ('a', 'b', 'c', 'd', 'e'):
self.nodes[x] = gyp.input.DependencyGraphNode(x)
def _create_dependency(self, dependent, dependency):
dependent.dependencies.append(dependency)
dependency.dependents.append(dependent)
def test_no_cycle_empty_graph(self):
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_line(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_dag(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['a'], self.nodes['c'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a'])
self.assertEquals([[self.nodes['a'], self.nodes['a']]],
self.nodes['a'].FindCycles())
def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
self.nodes['a'].FindCycles())
self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
self.nodes['b'].FindCycles())
def test_two_cycles(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['b'])
cycles = self.nodes['a'].FindCycles()
self.assertTrue(
[self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
self.assertTrue(
[self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
self.assertEquals(2, len(cycles))
def test_big_cycle(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a'])
self.assertEquals([[self.nodes['a'],
self.nodes['b'],
self.nodes['c'],
self.nodes['d'],
self.nodes['e'],
self.nodes['a']]],
self.nodes['a'].FindCycles())
if __name__ == '__main__':
unittest.main()
| cc0-1.0 |
nirmeshk/oh-mainline | vendor/packages/gdata/src/gdata/tlslite/TLSConnection.py | 278 | 70347 | """
MAIN CLASS FOR TLS LITE (START HERE!).
"""
from __future__ import generators
import socket
from utils.compat import formatExceptionTrace
from TLSRecordLayer import TLSRecordLayer
from Session import Session
from constants import *
from utils.cryptomath import getRandomBytes
from errors import *
from messages import *
from mathtls import *
from HandshakeSettings import HandshakeSettings
class TLSConnection(TLSRecordLayer):
"""
This class wraps a socket and provides TLS handshaking and data
transfer.
To use this class, create a new instance, passing a connected
socket into the constructor. Then call some handshake function.
If the handshake completes without raising an exception, then a TLS
connection has been negotiated. You can transfer data over this
connection as if it were a socket.
This class provides both synchronous and asynchronous versions of
its key functions. The synchronous versions should be used when
writing single-or multi-threaded code using blocking sockets. The
asynchronous versions should be used when performing asynchronous,
event-based I/O with non-blocking sockets.
Asynchronous I/O is a complicated subject; typically, you should
not use the asynchronous functions directly, but should use some
framework like asyncore or Twisted which TLS Lite integrates with
(see
L{tlslite.integration.TLSAsyncDispatcherMixIn.TLSAsyncDispatcherMixIn} or
L{tlslite.integration.TLSTwistedProtocolWrapper.TLSTwistedProtocolWrapper}).
"""
def __init__(self, sock):
"""Create a new TLSConnection instance.
@param sock: The socket data will be transmitted on. The
socket should already be connected. It may be in blocking or
non-blocking mode.
@type sock: L{socket.socket}
"""
TLSRecordLayer.__init__(self, sock)
def handshakeClientSRP(self, username, password, session=None,
settings=None, checker=None, async=False):
"""Perform an SRP handshake in the role of client.
This function performs a TLS/SRP handshake. SRP mutually
authenticates both parties to each other using only a
username and password. This function may also perform a
combined SRP and server-certificate handshake, if the server
chooses to authenticate itself with a certificate chain in
addition to doing SRP.
TLS/SRP is non-standard. Most TLS implementations don't
support it. See
U{http://www.ietf.org/html.charters/tls-charter.html} or
U{http://trevp.net/tlssrp/} for the latest information on
TLS/SRP.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type username: str
@param username: The SRP username.
@type password: str
@param password: The SRP password.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. This
session must be an SRP session performed with the same username
and password as were passed in. If the resumption does not
succeed, a full SRP handshake will be performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(srpParams=(username, password),
session=session, settings=settings, checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientCert(self, certChain=None, privateKey=None,
session=None, settings=None, checker=None,
async=False):
"""Perform a certificate-based handshake in the role of client.
This function performs an SSL or TLS handshake. The server
will authenticate itself using an X.509 or cryptoID certificate
chain. If the handshake succeeds, the server's certificate
chain will be stored in the session's serverCertChain attribute.
Unless a checker object is passed in, this function does no
validation or checking of the server's certificate chain.
If the server requests client authentication, the
client will send the passed-in certificate chain, and use the
passed-in private key to authenticate itself. If no
certificate chain and private key were passed in, the client
will attempt to proceed without client authentication. The
server may or may not allow this.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: The certificate chain to be used if the
server requests client authentication.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: The private key to be used if the server
requests client authentication.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. If the
resumption does not succeed, a full handshake will be
performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(certParams=(certChain,
privateKey), session=session, settings=settings,
checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientUnknown(self, srpCallback=None, certCallback=None,
session=None, settings=None, checker=None,
async=False):
"""Perform a to-be-determined type of handshake in the role of client.
This function performs an SSL or TLS handshake. If the server
requests client certificate authentication, the
certCallback will be invoked and should return a (certChain,
privateKey) pair. If the callback returns None, the library
will attempt to proceed without client authentication. The
server may or may not allow this.
If the server requests SRP authentication, the srpCallback
will be invoked and should return a (username, password) pair.
If the callback returns None, the local implementation will
signal a user_canceled error alert.
After the handshake completes, the client can inspect the
connection's session attribute to determine what type of
authentication was performed.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type srpCallback: callable
@param srpCallback: The callback to be used if the server
requests SRP authentication. If None, the client will not
offer support for SRP ciphersuites.
@type certCallback: callable
@param certCallback: The callback to be used if the server
requests client certificate authentication.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. If the
resumption does not succeed, a full handshake will be
performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(unknownParams=(srpCallback,
certCallback), session=session, settings=settings,
checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientSharedKey(self, username, sharedKey, settings=None,
checker=None, async=False):
"""Perform a shared-key handshake in the role of client.
This function performs a shared-key handshake. Using shared
symmetric keys of high entropy (128 bits or greater) mutually
authenticates both parties to each other.
TLS with shared-keys is non-standard. Most TLS
implementations don't support it. See
U{http://www.ietf.org/html.charters/tls-charter.html} for the
latest information on TLS with shared-keys. If the shared-keys
Internet-Draft changes or is superceded, TLS Lite will track
those changes, so the shared-key support in later versions of
TLS Lite may become incompatible with this version.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type username: str
@param username: The shared-key username.
@type sharedKey: str
@param sharedKey: The shared key.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(sharedKeyParams=(username,
sharedKey), settings=settings, checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def _handshakeClientAsync(self, srpParams=(), certParams=(),
unknownParams=(), sharedKeyParams=(),
session=None, settings=None, checker=None,
recursive=False):
handshaker = self._handshakeClientAsyncHelper(srpParams=srpParams,
certParams=certParams, unknownParams=unknownParams,
sharedKeyParams=sharedKeyParams, session=session,
settings=settings, recursive=recursive)
for result in self._handshakeWrapperAsync(handshaker, checker):
yield result
def _handshakeClientAsyncHelper(self, srpParams, certParams, unknownParams,
sharedKeyParams, session, settings, recursive):
if not recursive:
self._handshakeStart(client=True)
#Unpack parameters
srpUsername = None # srpParams
password = None # srpParams
clientCertChain = None # certParams
privateKey = None # certParams
srpCallback = None # unknownParams
certCallback = None # unknownParams
#session # sharedKeyParams (or session)
#settings # settings
if srpParams:
srpUsername, password = srpParams
elif certParams:
clientCertChain, privateKey = certParams
elif unknownParams:
srpCallback, certCallback = unknownParams
elif sharedKeyParams:
session = Session()._createSharedKey(*sharedKeyParams)
if not settings:
settings = HandshakeSettings()
settings = settings._filter()
#Validate parameters
if srpUsername and not password:
raise ValueError("Caller passed a username but no password")
if password and not srpUsername:
raise ValueError("Caller passed a password but no username")
if clientCertChain and not privateKey:
raise ValueError("Caller passed a certChain but no privateKey")
if privateKey and not clientCertChain:
raise ValueError("Caller passed a privateKey but no certChain")
if clientCertChain:
foundType = False
try:
import cryptoIDlib.CertChain
if isinstance(clientCertChain, cryptoIDlib.CertChain.CertChain):
if "cryptoID" not in settings.certificateTypes:
raise ValueError("Client certificate doesn't "\
"match Handshake Settings")
settings.certificateTypes = ["cryptoID"]
foundType = True
except ImportError:
pass
if not foundType and isinstance(clientCertChain,
X509CertChain):
if "x509" not in settings.certificateTypes:
raise ValueError("Client certificate doesn't match "\
"Handshake Settings")
settings.certificateTypes = ["x509"]
foundType = True
if not foundType:
raise ValueError("Unrecognized certificate type")
if session:
if not session.valid():
session = None #ignore non-resumable sessions...
elif session.resumable and \
(session.srpUsername != srpUsername):
raise ValueError("Session username doesn't match")
#Add Faults to parameters
if srpUsername and self.fault == Fault.badUsername:
srpUsername += "GARBAGE"
if password and self.fault == Fault.badPassword:
password += "GARBAGE"
if sharedKeyParams:
identifier = sharedKeyParams[0]
sharedKey = sharedKeyParams[1]
if self.fault == Fault.badIdentifier:
identifier += "GARBAGE"
session = Session()._createSharedKey(identifier, sharedKey)
elif self.fault == Fault.badSharedKey:
sharedKey += "GARBAGE"
session = Session()._createSharedKey(identifier, sharedKey)
#Initialize locals
serverCertChain = None
cipherSuite = 0
certificateType = CertificateType.x509
premasterSecret = None
#Get client nonce
clientRandom = getRandomBytes(32)
#Initialize acceptable ciphersuites
cipherSuites = []
if srpParams:
cipherSuites += CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
elif certParams:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
elif unknownParams:
if srpCallback:
cipherSuites += \
CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += \
CipherSuite.getSrpSuites(settings.cipherNames)
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
elif sharedKeyParams:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
else:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
#Initialize acceptable certificate types
certificateTypes = settings._getCertificateTypes()
#Tentatively set the version to the client's minimum version.
#We'll use this for the ClientHello, and if an error occurs
#parsing the Server Hello, we'll use this version for the response
self.version = settings.maxVersion
#Either send ClientHello (with a resumable session)...
if session:
#If it's a resumable (i.e. not a shared-key session), then its
#ciphersuite must be one of the acceptable ciphersuites
if (not sharedKeyParams) and \
session.cipherSuite not in cipherSuites:
raise ValueError("Session's cipher suite not consistent "\
"with parameters")
else:
clientHello = ClientHello()
clientHello.create(settings.maxVersion, clientRandom,
session.sessionID, cipherSuites,
certificateTypes, session.srpUsername)
#Or send ClientHello (without)
else:
clientHello = ClientHello()
clientHello.create(settings.maxVersion, clientRandom,
createByteArraySequence([]), cipherSuites,
certificateTypes, srpUsername)
for result in self._sendMsg(clientHello):
yield result
#Get ServerHello (or missing_srp_username)
for result in self._getMsg((ContentType.handshake,
ContentType.alert),
HandshakeType.server_hello):
if result in (0,1):
yield result
else:
break
msg = result
if isinstance(msg, ServerHello):
serverHello = msg
elif isinstance(msg, Alert):
alert = msg
#If it's not a missing_srp_username, re-raise
if alert.description != AlertDescription.missing_srp_username:
self._shutdown(False)
raise TLSRemoteAlert(alert)
#If we're not in SRP callback mode, we won't have offered SRP
#without a username, so we shouldn't get this alert
if not srpCallback:
for result in self._sendError(\
AlertDescription.unexpected_message):
yield result
srpParams = srpCallback()
#If the callback returns None, cancel the handshake
if srpParams == None:
for result in self._sendError(AlertDescription.user_canceled):
yield result
#Recursively perform handshake
for result in self._handshakeClientAsyncHelper(srpParams,
None, None, None, None, settings, True):
yield result
return
#Get the server version. Do this before anything else, so any
#error alerts will use the server's version
self.version = serverHello.server_version
#Future responses from server must use this version
self._versionCheck = True
#Check ServerHello
if serverHello.server_version < settings.minVersion:
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(serverHello.server_version)):
yield result
if serverHello.server_version > settings.maxVersion:
for result in self._sendError(\
AlertDescription.protocol_version,
"Too new version: %s" % str(serverHello.server_version)):
yield result
if serverHello.cipher_suite not in cipherSuites:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect ciphersuite"):
yield result
if serverHello.certificate_type not in certificateTypes:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect certificate type"):
yield result
if serverHello.compression_method != 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect compression method"):
yield result
#Get the server nonce
serverRandom = serverHello.random
#If the server agrees to resume
if session and session.sessionID and \
serverHello.session_id == session.sessionID:
#If a shared-key, we're flexible about suites; otherwise the
#server-chosen suite has to match the session's suite
if sharedKeyParams:
session.cipherSuite = serverHello.cipher_suite
elif serverHello.cipher_suite != session.cipherSuite:
for result in self._sendError(\
AlertDescription.illegal_parameter,\
"Server's ciphersuite doesn't match session"):
yield result
#Set the session for this connection
self.session = session
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._getFinished():
yield result
for result in self._sendFinished():
yield result
#Mark the connection as open
self._handshakeDone(resumed=True)
#If server DOES NOT agree to resume
else:
if sharedKeyParams:
for result in self._sendError(\
AlertDescription.user_canceled,
"Was expecting a shared-key resumption"):
yield result
#We've already validated these
cipherSuite = serverHello.cipher_suite
certificateType = serverHello.certificate_type
#If the server chose an SRP suite...
if cipherSuite in CipherSuite.srpSuites:
#Get ServerKeyExchange, ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_key_exchange, cipherSuite):
if result in (0,1):
yield result
else:
break
serverKeyExchange = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
#If the server chose an SRP+RSA suite...
elif cipherSuite in CipherSuite.srpRsaSuites:
#Get Certificate, ServerKeyExchange, ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate, certificateType):
if result in (0,1):
yield result
else:
break
serverCertificate = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_key_exchange, cipherSuite):
if result in (0,1):
yield result
else:
break
serverKeyExchange = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
#If the server chose an RSA suite...
elif cipherSuite in CipherSuite.rsaSuites:
#Get Certificate[, CertificateRequest], ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate, certificateType):
if result in (0,1):
yield result
else:
break
serverCertificate = result
for result in self._getMsg(ContentType.handshake,
(HandshakeType.server_hello_done,
HandshakeType.certificate_request)):
if result in (0,1):
yield result
else:
break
msg = result
certificateRequest = None
if isinstance(msg, CertificateRequest):
certificateRequest = msg
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
elif isinstance(msg, ServerHelloDone):
serverHelloDone = msg
else:
raise AssertionError()
#Calculate SRP premaster secret, if server chose an SRP or
#SRP+RSA suite
if cipherSuite in CipherSuite.srpSuites + \
CipherSuite.srpRsaSuites:
#Get and check the server's group parameters and B value
N = serverKeyExchange.srp_N
g = serverKeyExchange.srp_g
s = serverKeyExchange.srp_s
B = serverKeyExchange.srp_B
if (g,N) not in goodGroupParameters:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"Unknown group parameters"):
yield result
if numBits(N) < settings.minKeySize:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"N value is too small: %d" % numBits(N)):
yield result
if numBits(N) > settings.maxKeySize:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"N value is too large: %d" % numBits(N)):
yield result
if B % N == 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Suspicious B value"):
yield result
#Check the server's signature, if server chose an
#SRP+RSA suite
if cipherSuite in CipherSuite.srpRsaSuites:
#Hash ServerKeyExchange/ServerSRPParams
hashBytes = serverKeyExchange.hash(clientRandom,
serverRandom)
#Extract signature bytes from ServerKeyExchange
sigBytes = serverKeyExchange.signature
if len(sigBytes) == 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server sent an SRP ServerKeyExchange "\
"message without a signature"):
yield result
#Get server's public key from the Certificate message
for result in self._getKeyFromChain(serverCertificate,
settings):
if result in (0,1):
yield result
else:
break
publicKey, serverCertChain = result
#Verify signature
if not publicKey.verify(sigBytes, hashBytes):
for result in self._sendError(\
AlertDescription.decrypt_error,
"Signature failed to verify"):
yield result
#Calculate client's ephemeral DH values (a, A)
a = bytesToNumber(getRandomBytes(32))
A = powMod(g, a, N)
#Calculate client's static DH values (x, v)
x = makeX(bytesToString(s), srpUsername, password)
v = powMod(g, x, N)
#Calculate u
u = makeU(N, A, B)
#Calculate premaster secret
k = makeK(N, g)
S = powMod((B - (k*v)) % N, a+(u*x), N)
if self.fault == Fault.badA:
A = N
S = 0
premasterSecret = numberToBytes(S)
#Send ClientKeyExchange
for result in self._sendMsg(\
ClientKeyExchange(cipherSuite).createSRP(A)):
yield result
#Calculate RSA premaster secret, if server chose an RSA suite
elif cipherSuite in CipherSuite.rsaSuites:
#Handle the presence of a CertificateRequest
if certificateRequest:
if unknownParams and certCallback:
certParamsNew = certCallback()
if certParamsNew:
clientCertChain, privateKey = certParamsNew
#Get server's public key from the Certificate message
for result in self._getKeyFromChain(serverCertificate,
settings):
if result in (0,1):
yield result
else:
break
publicKey, serverCertChain = result
#Calculate premaster secret
premasterSecret = getRandomBytes(48)
premasterSecret[0] = settings.maxVersion[0]
premasterSecret[1] = settings.maxVersion[1]
if self.fault == Fault.badPremasterPadding:
premasterSecret[0] = 5
if self.fault == Fault.shortPremasterSecret:
premasterSecret = premasterSecret[:-1]
#Encrypt premaster secret to server's public key
encryptedPreMasterSecret = publicKey.encrypt(premasterSecret)
#If client authentication was requested, send Certificate
#message, either with certificates or empty
if certificateRequest:
clientCertificate = Certificate(certificateType)
if clientCertChain:
#Check to make sure we have the same type of
#certificates the server requested
wrongType = False
if certificateType == CertificateType.x509:
if not isinstance(clientCertChain, X509CertChain):
wrongType = True
elif certificateType == CertificateType.cryptoID:
if not isinstance(clientCertChain,
cryptoIDlib.CertChain.CertChain):
wrongType = True
if wrongType:
for result in self._sendError(\
AlertDescription.handshake_failure,
"Client certificate is of wrong type"):
yield result
clientCertificate.create(clientCertChain)
for result in self._sendMsg(clientCertificate):
yield result
else:
#The server didn't request client auth, so we
#zeroize these so the clientCertChain won't be
#stored in the session.
privateKey = None
clientCertChain = None
#Send ClientKeyExchange
clientKeyExchange = ClientKeyExchange(cipherSuite,
self.version)
clientKeyExchange.createRSA(encryptedPreMasterSecret)
for result in self._sendMsg(clientKeyExchange):
yield result
#If client authentication was requested and we have a
#private key, send CertificateVerify
if certificateRequest and privateKey:
if self.version == (3,0):
#Create a temporary session object, just for the
#purpose of creating the CertificateVerify
session = Session()
session._calcMasterSecret(self.version,
premasterSecret,
clientRandom,
serverRandom)
verifyBytes = self._calcSSLHandshakeHash(\
session.masterSecret, "")
elif self.version in ((3,1), (3,2)):
verifyBytes = stringToBytes(\
self._handshake_md5.digest() + \
self._handshake_sha.digest())
if self.fault == Fault.badVerifyMessage:
verifyBytes[0] = ((verifyBytes[0]+1) % 256)
signedBytes = privateKey.sign(verifyBytes)
certificateVerify = CertificateVerify()
certificateVerify.create(signedBytes)
for result in self._sendMsg(certificateVerify):
yield result
#Create the session object
self.session = Session()
self.session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
self.session.sessionID = serverHello.session_id
self.session.cipherSuite = cipherSuite
self.session.srpUsername = srpUsername
self.session.clientCertChain = clientCertChain
self.session.serverCertChain = serverCertChain
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._sendFinished():
yield result
for result in self._getFinished():
yield result
#Mark the connection as open
self.session._setResumable(True)
self._handshakeDone(resumed=False)
def handshakeServer(self, sharedKeyDB=None, verifierDB=None,
certChain=None, privateKey=None, reqCert=False,
sessionCache=None, settings=None, checker=None):
"""Perform a handshake in the role of server.
This function performs an SSL or TLS handshake. Depending on
the arguments and the behavior of the client, this function can
perform a shared-key, SRP, or certificate-based handshake. It
can also perform a combined SRP and server-certificate
handshake.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
This function does not send a Hello Request message before
performing the handshake, so if re-handshaking is required,
the server must signal the client to begin the re-handshake
through some other means.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type sharedKeyDB: L{tlslite.SharedKeyDB.SharedKeyDB}
@param sharedKeyDB: A database of shared symmetric keys
associated with usernames. If the client performs a
shared-key handshake, the session's sharedKeyUsername
attribute will be set.
@type verifierDB: L{tlslite.VerifierDB.VerifierDB}
@param verifierDB: A database of SRP password verifiers
associated with usernames. If the client performs an SRP
handshake, the session's srpUsername attribute will be set.
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: The certificate chain to be used if the
client requests server certificate authentication.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: The private key to be used if the client
requests server certificate authentication.
@type reqCert: bool
@param reqCert: Whether to request client certificate
authentication. This only applies if the client chooses server
certificate authentication; if the client chooses SRP or
shared-key authentication, this will be ignored. If the client
performs a client certificate authentication, the sessions's
clientCertChain attribute will be set.
@type sessionCache: L{tlslite.SessionCache.SessionCache}
@param sessionCache: An in-memory cache of resumable sessions.
The client can resume sessions from this cache. Alternatively,
if the client performs a full handshake, a new session will be
added to the cache.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites and SSL/TLS version chosen by the server.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
for result in self.handshakeServerAsync(sharedKeyDB, verifierDB,
certChain, privateKey, reqCert, sessionCache, settings,
checker):
pass
def handshakeServerAsync(self, sharedKeyDB=None, verifierDB=None,
certChain=None, privateKey=None, reqCert=False,
sessionCache=None, settings=None, checker=None):
"""Start a server handshake operation on the TLS connection.
This function returns a generator which behaves similarly to
handshakeServer(). Successive invocations of the generator
will return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or it will raise StopIteration
if the handshake operation is complete.
@rtype: iterable
@return: A generator; see above for details.
"""
handshaker = self._handshakeServerAsyncHelper(\
sharedKeyDB=sharedKeyDB,
verifierDB=verifierDB, certChain=certChain,
privateKey=privateKey, reqCert=reqCert,
sessionCache=sessionCache, settings=settings)
for result in self._handshakeWrapperAsync(handshaker, checker):
yield result
def _handshakeServerAsyncHelper(self, sharedKeyDB, verifierDB,
certChain, privateKey, reqCert, sessionCache,
settings):
self._handshakeStart(client=False)
if (not sharedKeyDB) and (not verifierDB) and (not certChain):
raise ValueError("Caller passed no authentication credentials")
if certChain and not privateKey:
raise ValueError("Caller passed a certChain but no privateKey")
if privateKey and not certChain:
raise ValueError("Caller passed a privateKey but no certChain")
if not settings:
settings = HandshakeSettings()
settings = settings._filter()
#Initialize acceptable cipher suites
cipherSuites = []
if verifierDB:
if certChain:
cipherSuites += \
CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
if sharedKeyDB or certChain:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
#Initialize acceptable certificate type
certificateType = None
if certChain:
try:
import cryptoIDlib.CertChain
if isinstance(certChain, cryptoIDlib.CertChain.CertChain):
certificateType = CertificateType.cryptoID
except ImportError:
pass
if isinstance(certChain, X509CertChain):
certificateType = CertificateType.x509
if certificateType == None:
raise ValueError("Unrecognized certificate type")
#Initialize locals
clientCertChain = None
serverCertChain = None #We may set certChain to this later
postFinishedError = None
#Tentatively set version to most-desirable version, so if an error
#occurs parsing the ClientHello, this is what we'll use for the
#error alert
self.version = settings.maxVersion
#Get ClientHello
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_hello):
if result in (0,1):
yield result
else:
break
clientHello = result
#If client's version is too low, reject it
if clientHello.client_version < settings.minVersion:
self.version = settings.minVersion
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(clientHello.client_version)):
yield result
#If client's version is too high, propose my highest version
elif clientHello.client_version > settings.maxVersion:
self.version = settings.maxVersion
else:
#Set the version to the client's version
self.version = clientHello.client_version
#Get the client nonce; create server nonce
clientRandom = clientHello.random
serverRandom = getRandomBytes(32)
#Calculate the first cipher suite intersection.
#This is the 'privileged' ciphersuite. We'll use it if we're
#doing a shared-key resumption or a new negotiation. In fact,
#the only time we won't use it is if we're resuming a non-sharedkey
#session, in which case we use the ciphersuite from the session.
#
#Given the current ciphersuite ordering, this means we prefer SRP
#over non-SRP.
for cipherSuite in cipherSuites:
if cipherSuite in clientHello.cipher_suites:
break
else:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#If resumption was requested...
if clientHello.session_id and (sharedKeyDB or sessionCache):
session = None
#Check in the sharedKeys container
if sharedKeyDB and len(clientHello.session_id)==16:
try:
#Trim off zero padding, if any
for x in range(16):
if clientHello.session_id[x]==0:
break
self.allegedSharedKeyUsername = bytesToString(\
clientHello.session_id[:x])
session = sharedKeyDB[self.allegedSharedKeyUsername]
if not session.sharedKey:
raise AssertionError()
#use privileged ciphersuite
session.cipherSuite = cipherSuite
except KeyError:
pass
#Then check in the session cache
if sessionCache and not session:
try:
session = sessionCache[bytesToString(\
clientHello.session_id)]
if session.sharedKey:
raise AssertionError()
if not session.resumable:
raise AssertionError()
#Check for consistency with ClientHello
if session.cipherSuite not in cipherSuites:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
if session.cipherSuite not in clientHello.cipher_suites:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
if clientHello.srp_username:
if clientHello.srp_username != session.srpUsername:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
except KeyError:
pass
#If a session is found..
if session:
#Set the session
self.session = session
#Send ServerHello
serverHello = ServerHello()
serverHello.create(self.version, serverRandom,
session.sessionID, session.cipherSuite,
certificateType)
for result in self._sendMsg(serverHello):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._sendFinished():
yield result
for result in self._getFinished():
yield result
#Mark the connection as open
self._handshakeDone(resumed=True)
return
#If not a resumption...
#TRICKY: we might have chosen an RSA suite that was only deemed
#acceptable because of the shared-key resumption. If the shared-
#key resumption failed, because the identifier wasn't recognized,
#we might fall through to here, where we have an RSA suite
#chosen, but no certificate.
if cipherSuite in CipherSuite.rsaSuites and not certChain:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#If an RSA suite is chosen, check for certificate type intersection
#(We do this check down here because if the mismatch occurs but the
# client is using a shared-key session, it's okay)
if cipherSuite in CipherSuite.rsaSuites + \
CipherSuite.srpRsaSuites:
if certificateType not in clientHello.certificate_types:
for result in self._sendError(\
AlertDescription.handshake_failure,
"the client doesn't support my certificate type"):
yield result
#Move certChain -> serverCertChain, now that we're using it
serverCertChain = certChain
#Create sessionID
if sessionCache:
sessionID = getRandomBytes(32)
else:
sessionID = createByteArraySequence([])
#If we've selected an SRP suite, exchange keys and calculate
#premaster secret:
if cipherSuite in CipherSuite.srpSuites + CipherSuite.srpRsaSuites:
#If there's no SRP username...
if not clientHello.srp_username:
#Ask the client to re-send ClientHello with one
for result in self._sendMsg(Alert().create(\
AlertDescription.missing_srp_username,
AlertLevel.warning)):
yield result
#Get ClientHello
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_hello):
if result in (0,1):
yield result
else:
break
clientHello = result
#Check ClientHello
#If client's version is too low, reject it (COPIED CODE; BAD!)
if clientHello.client_version < settings.minVersion:
self.version = settings.minVersion
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(clientHello.client_version)):
yield result
#If client's version is too high, propose my highest version
elif clientHello.client_version > settings.maxVersion:
self.version = settings.maxVersion
else:
#Set the version to the client's version
self.version = clientHello.client_version
#Recalculate the privileged cipher suite, making sure to
#pick an SRP suite
cipherSuites = [c for c in cipherSuites if c in \
CipherSuite.srpSuites + \
CipherSuite.srpRsaSuites]
for cipherSuite in cipherSuites:
if cipherSuite in clientHello.cipher_suites:
break
else:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#Get the client nonce; create server nonce
clientRandom = clientHello.random
serverRandom = getRandomBytes(32)
#The username better be there, this time
if not clientHello.srp_username:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Client resent a hello, but without the SRP"\
" username"):
yield result
#Get username
self.allegedSrpUsername = clientHello.srp_username
#Get parameters from username
try:
entry = verifierDB[self.allegedSrpUsername]
except KeyError:
for result in self._sendError(\
AlertDescription.unknown_srp_username):
yield result
(N, g, s, v) = entry
#Calculate server's ephemeral DH values (b, B)
b = bytesToNumber(getRandomBytes(32))
k = makeK(N, g)
B = (powMod(g, b, N) + (k*v)) % N
#Create ServerKeyExchange, signing it if necessary
serverKeyExchange = ServerKeyExchange(cipherSuite)
serverKeyExchange.createSRP(N, g, stringToBytes(s), B)
if cipherSuite in CipherSuite.srpRsaSuites:
hashBytes = serverKeyExchange.hash(clientRandom,
serverRandom)
serverKeyExchange.signature = privateKey.sign(hashBytes)
#Send ServerHello[, Certificate], ServerKeyExchange,
#ServerHelloDone
msgs = []
serverHello = ServerHello()
serverHello.create(self.version, serverRandom, sessionID,
cipherSuite, certificateType)
msgs.append(serverHello)
if cipherSuite in CipherSuite.srpRsaSuites:
certificateMsg = Certificate(certificateType)
certificateMsg.create(serverCertChain)
msgs.append(certificateMsg)
msgs.append(serverKeyExchange)
msgs.append(ServerHelloDone())
for result in self._sendMsgs(msgs):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Get and check ClientKeyExchange
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_key_exchange,
cipherSuite):
if result in (0,1):
yield result
else:
break
clientKeyExchange = result
A = clientKeyExchange.srp_A
if A % N == 0:
postFinishedError = (AlertDescription.illegal_parameter,
"Suspicious A value")
#Calculate u
u = makeU(N, A, B)
#Calculate premaster secret
S = powMod((A * powMod(v,u,N)) % N, b, N)
premasterSecret = numberToBytes(S)
#If we've selected an RSA suite, exchange keys and calculate
#premaster secret:
elif cipherSuite in CipherSuite.rsaSuites:
#Send ServerHello, Certificate[, CertificateRequest],
#ServerHelloDone
msgs = []
msgs.append(ServerHello().create(self.version, serverRandom,
sessionID, cipherSuite, certificateType))
msgs.append(Certificate(certificateType).create(serverCertChain))
if reqCert:
msgs.append(CertificateRequest())
msgs.append(ServerHelloDone())
for result in self._sendMsgs(msgs):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Get [Certificate,] (if was requested)
if reqCert:
if self.version == (3,0):
for result in self._getMsg((ContentType.handshake,
ContentType.alert),
HandshakeType.certificate,
certificateType):
if result in (0,1):
yield result
else:
break
msg = result
if isinstance(msg, Alert):
#If it's not a no_certificate alert, re-raise
alert = msg
if alert.description != \
AlertDescription.no_certificate:
self._shutdown(False)
raise TLSRemoteAlert(alert)
elif isinstance(msg, Certificate):
clientCertificate = msg
if clientCertificate.certChain and \
clientCertificate.certChain.getNumCerts()!=0:
clientCertChain = clientCertificate.certChain
else:
raise AssertionError()
elif self.version in ((3,1), (3,2)):
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate,
certificateType):
if result in (0,1):
yield result
else:
break
clientCertificate = result
if clientCertificate.certChain and \
clientCertificate.certChain.getNumCerts()!=0:
clientCertChain = clientCertificate.certChain
else:
raise AssertionError()
#Get ClientKeyExchange
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_key_exchange,
cipherSuite):
if result in (0,1):
yield result
else:
break
clientKeyExchange = result
#Decrypt ClientKeyExchange
premasterSecret = privateKey.decrypt(\
clientKeyExchange.encryptedPreMasterSecret)
randomPreMasterSecret = getRandomBytes(48)
versionCheck = (premasterSecret[0], premasterSecret[1])
if not premasterSecret:
premasterSecret = randomPreMasterSecret
elif len(premasterSecret)!=48:
premasterSecret = randomPreMasterSecret
elif versionCheck != clientHello.client_version:
if versionCheck != self.version: #Tolerate buggy IE clients
premasterSecret = randomPreMasterSecret
#Get and check CertificateVerify, if relevant
if clientCertChain:
if self.version == (3,0):
#Create a temporary session object, just for the purpose
#of checking the CertificateVerify
session = Session()
session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
verifyBytes = self._calcSSLHandshakeHash(\
session.masterSecret, "")
elif self.version in ((3,1), (3,2)):
verifyBytes = stringToBytes(self._handshake_md5.digest() +\
self._handshake_sha.digest())
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate_verify):
if result in (0,1):
yield result
else:
break
certificateVerify = result
publicKey = clientCertChain.getEndEntityPublicKey()
if len(publicKey) < settings.minKeySize:
postFinishedError = (AlertDescription.handshake_failure,
"Client's public key too small: %d" % len(publicKey))
if len(publicKey) > settings.maxKeySize:
postFinishedError = (AlertDescription.handshake_failure,
"Client's public key too large: %d" % len(publicKey))
if not publicKey.verify(certificateVerify.signature,
verifyBytes):
postFinishedError = (AlertDescription.decrypt_error,
"Signature failed to verify")
#Create the session object
self.session = Session()
self.session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
self.session.sessionID = sessionID
self.session.cipherSuite = cipherSuite
self.session.srpUsername = self.allegedSrpUsername
self.session.clientCertChain = clientCertChain
self.session.serverCertChain = serverCertChain
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._getFinished():
yield result
#If we were holding a post-finished error until receiving the client
#finished message, send it now. We delay the call until this point
#because calling sendError() throws an exception, and our caller might
#shut down the socket upon receiving the exception. If he did, and the
#client was still sending its ChangeCipherSpec or Finished messages, it
#would cause a socket error on the client side. This is a lot of
#consideration to show to misbehaving clients, but this would also
#cause problems with fault-testing.
if postFinishedError:
for result in self._sendError(*postFinishedError):
yield result
for result in self._sendFinished():
yield result
#Add the session object to the session cache
if sessionCache and sessionID:
sessionCache[bytesToString(sessionID)] = self.session
#Mark the connection as open
self.session._setResumable(True)
self._handshakeDone(resumed=False)
def _handshakeWrapperAsync(self, handshaker, checker):
if not self.fault:
try:
for result in handshaker:
yield result
if checker:
try:
checker(self)
except TLSAuthenticationError:
alert = Alert().create(AlertDescription.close_notify,
AlertLevel.fatal)
for result in self._sendMsg(alert):
yield result
raise
except:
self._shutdown(False)
raise
else:
try:
for result in handshaker:
yield result
if checker:
try:
checker(self)
except TLSAuthenticationError:
alert = Alert().create(AlertDescription.close_notify,
AlertLevel.fatal)
for result in self._sendMsg(alert):
yield result
raise
except socket.error, e:
raise TLSFaultError("socket error!")
except TLSAbruptCloseError, e:
raise TLSFaultError("abrupt close error!")
except TLSAlert, alert:
if alert.description not in Fault.faultAlerts[self.fault]:
raise TLSFaultError(str(alert))
else:
pass
except:
self._shutdown(False)
raise
else:
raise TLSFaultError("No error!")
def _getKeyFromChain(self, certificate, settings):
#Get and check cert chain from the Certificate message
certChain = certificate.certChain
if not certChain or certChain.getNumCerts() == 0:
for result in self._sendError(AlertDescription.illegal_parameter,
"Other party sent a Certificate message without "\
"certificates"):
yield result
#Get and check public key from the cert chain
publicKey = certChain.getEndEntityPublicKey()
if len(publicKey) < settings.minKeySize:
for result in self._sendError(AlertDescription.handshake_failure,
"Other party's public key too small: %d" % len(publicKey)):
yield result
if len(publicKey) > settings.maxKeySize:
for result in self._sendError(AlertDescription.handshake_failure,
"Other party's public key too large: %d" % len(publicKey)):
yield result
yield publicKey, certChain
| agpl-3.0 |
dgellis90/nipype | nipype/interfaces/semtools/registration/tests/test_auto_BRAINSDemonWarp.py | 24 | 3971 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from .....testing import assert_equal
from ..specialized import BRAINSDemonWarp
def test_BRAINSDemonWarp_inputs():
input_map = dict(args=dict(argstr='%s',
),
arrayOfPyramidLevelIterations=dict(argstr='--arrayOfPyramidLevelIterations %s',
sep=',',
),
backgroundFillValue=dict(argstr='--backgroundFillValue %d',
),
checkerboardPatternSubdivisions=dict(argstr='--checkerboardPatternSubdivisions %s',
sep=',',
),
environ=dict(nohash=True,
usedefault=True,
),
fixedBinaryVolume=dict(argstr='--fixedBinaryVolume %s',
),
fixedVolume=dict(argstr='--fixedVolume %s',
),
gradient_type=dict(argstr='--gradient_type %s',
),
gui=dict(argstr='--gui ',
),
histogramMatch=dict(argstr='--histogramMatch ',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
initializeWithDisplacementField=dict(argstr='--initializeWithDisplacementField %s',
),
initializeWithTransform=dict(argstr='--initializeWithTransform %s',
),
inputPixelType=dict(argstr='--inputPixelType %s',
),
interpolationMode=dict(argstr='--interpolationMode %s',
),
lowerThresholdForBOBF=dict(argstr='--lowerThresholdForBOBF %d',
),
maskProcessingMode=dict(argstr='--maskProcessingMode %s',
),
max_step_length=dict(argstr='--max_step_length %f',
),
medianFilterSize=dict(argstr='--medianFilterSize %s',
sep=',',
),
minimumFixedPyramid=dict(argstr='--minimumFixedPyramid %s',
sep=',',
),
minimumMovingPyramid=dict(argstr='--minimumMovingPyramid %s',
sep=',',
),
movingBinaryVolume=dict(argstr='--movingBinaryVolume %s',
),
movingVolume=dict(argstr='--movingVolume %s',
),
neighborhoodForBOBF=dict(argstr='--neighborhoodForBOBF %s',
sep=',',
),
numberOfBCHApproximationTerms=dict(argstr='--numberOfBCHApproximationTerms %d',
),
numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d',
),
numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d',
),
numberOfPyramidLevels=dict(argstr='--numberOfPyramidLevels %d',
),
numberOfThreads=dict(argstr='--numberOfThreads %d',
),
outputCheckerboardVolume=dict(argstr='--outputCheckerboardVolume %s',
hash_files=False,
),
outputDebug=dict(argstr='--outputDebug ',
),
outputDisplacementFieldPrefix=dict(argstr='--outputDisplacementFieldPrefix %s',
),
outputDisplacementFieldVolume=dict(argstr='--outputDisplacementFieldVolume %s',
hash_files=False,
),
outputNormalized=dict(argstr='--outputNormalized ',
),
outputPixelType=dict(argstr='--outputPixelType %s',
),
outputVolume=dict(argstr='--outputVolume %s',
hash_files=False,
),
promptUser=dict(argstr='--promptUser ',
),
registrationFilterType=dict(argstr='--registrationFilterType %s',
),
seedForBOBF=dict(argstr='--seedForBOBF %s',
sep=',',
),
smoothDisplacementFieldSigma=dict(argstr='--smoothDisplacementFieldSigma %f',
),
terminal_output=dict(nohash=True,
),
upFieldSmoothing=dict(argstr='--upFieldSmoothing %f',
),
upperThresholdForBOBF=dict(argstr='--upperThresholdForBOBF %d',
),
use_vanilla_dem=dict(argstr='--use_vanilla_dem ',
),
)
inputs = BRAINSDemonWarp.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_BRAINSDemonWarp_outputs():
output_map = dict(outputCheckerboardVolume=dict(),
outputDisplacementFieldVolume=dict(),
outputVolume=dict(),
)
outputs = BRAINSDemonWarp.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| bsd-3-clause |
azumimuo/family-xbmc-addon | plugin.video.exodus/resources/lib/sources/tunemovie.py | 7 | 6553 | # -*- coding: utf-8 -*-
'''
Exodus Add-on
Copyright (C) 2016 Exodus
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,json,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import directstream
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['tunemovies.to', 'tunemovie.tv']
self.base_link = 'https://tunemovies.to'
self.search_link = '/search/%s.html'
def movie(self, imdb, title, localtitle, aliases, year):
try:
query = urlparse.urljoin(self.base_link, self.search_link)
query = query % urllib.quote_plus(title)
t = cleantitle.get(title)
r = client.request(query)
r = client.parseDOM(r, 'div', attrs = {'class': 'thumb'})
r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title'), re.findall('(\d{4})', i)) for i in r]
r = [(i[0][0], i[1][0], i[2][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0 and len(i[2]) > 0]
url = [i[0] for i in r if t in cleantitle.get(i[1]) and year == i[2]][0]
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
query = urlparse.urljoin(self.base_link, self.search_link)
query = query % urllib.quote_plus(data['tvshowtitle'])
t = cleantitle.get(data['tvshowtitle'])
r = client.request(query)
r = client.parseDOM(r, 'div', attrs = {'class': 'thumb'})
r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title'), re.findall('(\d{4})', i)) for i in r]
r = [(i[0][0], i[1][0], i[2][0]) for i in r if len(i[0]) > 0 and len(i[1]) > 0 and len(i[2]) > 0]
url = [i[0] for i in r if t in cleantitle.get(i[1]) and ('Season %s' % season) in i[1]][0]
url += '?episode=%01d' % int(episode)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
try:
url, episode = re.findall('(.+?)\?episode=(\d*)$', url)[0]
except:
episode = None
ref = url
for i in range(3):
result = client.request(url)
if not result == None: break
if not episode == None:
result = client.parseDOM(result, 'div', attrs = {'id': 'ip_episode'})[0]
ep_url = client.parseDOM(result, 'a', attrs = {'data-name': str(episode)}, ret='href')[0]
for i in range(3):
result = client.request(ep_url)
if not result == None: break
r = client.parseDOM(result, 'div', attrs = {'class': '[^"]*server_line[^"]*'})
for u in r:
try:
url = urlparse.urljoin(self.base_link, '/ip.file/swf/plugins/ipplugins.php')
p1 = client.parseDOM(u, 'a', ret='data-film')[0]
p2 = client.parseDOM(u, 'a', ret='data-server')[0]
p3 = client.parseDOM(u, 'a', ret='data-name')[0]
post = {'ipplugins': 1, 'ip_film': p1, 'ip_server': p2, 'ip_name': p3}
post = urllib.urlencode(post)
for i in range(3):
result = client.request(url, post=post, XHR=True, referer=ref, timeout='10')
if not result == None: break
result = json.loads(result)
u = result['s']
s = result['v']
url = urlparse.urljoin(self.base_link, '/ip.file/swf/ipplayer/ipplayer.php')
for n in range(3):
try:
post = {'u': u, 'w': '100%', 'h': '420', 's': s, 'n': n}
post = urllib.urlencode(post)
result = client.request(url, post=post, XHR=True, referer=ref)
src = json.loads(result)['data']
if type(src) is list:
src = [i['files'] for i in src]
for i in src:
try:
sources.append({'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'language': 'en', 'url': i, 'direct': True, 'debridonly': False})
except:
pass
else:
src = client.request(src)
src = client.parseDOM(src, 'source', ret='src', attrs = {'type': 'video.+?'})[0]
src += '|%s' % urllib.urlencode({'User-agent': client.randomagent()})
sources.append({'source': 'cdn', 'quality': 'HD', 'language': 'en', 'url': src, 'direct': False, 'debridonly': False})
except:
pass
except:
pass
return sources
except:
return sources
def resolve(self, url):
return directstream.googlepass(url)
| gpl-2.0 |
rwatson/chromium-capsicum | third_party/scons/scons-local/SCons/Tool/lex.py | 3 | 3285 | """SCons.Tool.lex
Tool-specific initialization for lex.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/lex.py 3897 2009/01/13 06:45:54 scons"
import os.path
import string
import SCons.Action
import SCons.Tool
import SCons.Util
LexAction = SCons.Action.Action("$LEXCOM", "$LEXCOMSTR")
def lexEmitter(target, source, env):
sourceBase, sourceExt = os.path.splitext(SCons.Util.to_String(source[0]))
if sourceExt == ".lm": # If using Objective-C
target = [sourceBase + ".m"] # the extension is ".m".
# This emitter essentially tries to add to the target all extra
# files generated by flex.
# Different options that are used to trigger the creation of extra files.
fileGenOptions = ["--header-file=", "--tables-file="]
lexflags = env.subst("$LEXFLAGS", target=target, source=source)
for option in SCons.Util.CLVar(lexflags):
for fileGenOption in fileGenOptions:
l = len(fileGenOption)
if option[:l] == fileGenOption:
# A file generating option is present, so add the
# file name to the target list.
fileName = string.strip(option[l:])
target.append(fileName)
return (target, source)
def generate(env):
"""Add Builders and construction variables for lex to an Environment."""
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
# C
c_file.add_action(".l", LexAction)
c_file.add_emitter(".l", lexEmitter)
c_file.add_action(".lex", LexAction)
c_file.add_emitter(".lex", lexEmitter)
# Objective-C
cxx_file.add_action(".lm", LexAction)
cxx_file.add_emitter(".lm", lexEmitter)
# C++
cxx_file.add_action(".ll", LexAction)
cxx_file.add_emitter(".ll", lexEmitter)
env["LEX"] = env.Detect("flex") or "lex"
env["LEXFLAGS"] = SCons.Util.CLVar("")
env["LEXCOM"] = "$LEX $LEXFLAGS -t $SOURCES > $TARGET"
def exists(env):
return env.Detect(["flex", "lex"])
| bsd-3-clause |
czchen/debian-pgcli | pgcli/packages/expanded.py | 2 | 1174 | from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
total_len = header_len + data_len + 1
sep = u"-[ RECORD {0} ]".format(num)
if len(sep) < header_len:
sep = pad(sep, header_len - 1, u"-") + u"+"
if len(sep) < total_len:
sep = pad(sep, total_len, u"-")
return sep + u"\n"
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
| bsd-3-clause |
spadae22/odoo | addons/procurement/__openerp__.py | 267 | 2661 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Procurements',
'version' : '1.0',
'author' : 'OpenERP SA',
'website': 'https://www.odoo.com/page/manufacturing',
'category' : 'Hidden/Dependency',
'depends' : ['base', 'product'],
'description': """
This is the module for computing Procurements.
==============================================
This procurement module only depends on the product module and is not useful
on itself. Procurements represent needs that need to be solved by a procurement
rule. When a procurement is created, it is confirmed. When a rule is found,
it will be put in running state. After, it will check if what needed to be done
for the rule has been executed. Then it will go to the done state. A procurement
can also go into exception, for example when it can not find a rule and it can be cancelled.
The mechanism will be extended by several modules. The procurement rule of stock will
create a move and the procurement will be fulfilled when the move is done.
The procurement rule of sale_service will create a task. Those of purchase or
mrp will create a purchase order or a manufacturing order.
The scheduler will check if it can assign a rule to confirmed procurements and if
it can put running procurements to done.
Procurements in exception should be checked manually and can be re-run.
""",
'data': [
'security/ir.model.access.csv',
'security/procurement_security.xml',
'procurement_data.xml',
'wizard/schedulers_all_view.xml',
'procurement_view.xml',
'company_view.xml',
],
'demo': [],
'test': ['test/procurement.yml'],
'installable': True,
'auto_install': True,
}
| agpl-3.0 |
LuqueDaniel/ninja-ide | ninja_ide/gui/editor/checkers/__init__.py | 7 | 1735 | # -*- coding: utf-8 -*-
#
# This file is part of NINJA-IDE (http://ninja-ide.org).
#
# NINJA-IDE is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
#
# NINJA-IDE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NINJA-IDE; If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import unicode_literals
NOTIFICATIONS_CHECKERS = {}
def register_checker(lang='python', checker=None, color=None, priority=1):
"""Register a Checker (Like PEP8, Lint, etc) for some language.
@lang: language that the checker apply.
@checker: Class to be instantiated.
@color: the color that this checker will use.
@priority: the priority of this checker (1=LOW, >1 = HIGH...)"""
global NOTIFICATIONS_CHECKERS
checkers = NOTIFICATIONS_CHECKERS.get(lang, [])
checkers.append((checker, color, priority))
NOTIFICATIONS_CHECKERS[lang] = checkers
def remove_checker(checker):
global NOTIFICATIONS_CHECKERS
checkers = NOTIFICATIONS_CHECKERS.get('python', [])
if checker in checkers:
checkers.remove(checker)
NOTIFICATIONS_CHECKERS['python'] = checkers
def get_checkers_for(lang='python'):
"""Get a registered checker for some language."""
global NOTIFICATIONS_CHECKERS
return NOTIFICATIONS_CHECKERS.get(lang, []) | gpl-3.0 |
primoz-k/cookiecutter-django | {{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/contrib/sites/migrations/0001_initial.py | 348 | 1025 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.contrib.sites.models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('domain', models.CharField(verbose_name='domain name', max_length=100, validators=[django.contrib.sites.models._simple_domain_name_validator])),
('name', models.CharField(verbose_name='display name', max_length=50)),
],
options={
'verbose_name_plural': 'sites',
'verbose_name': 'site',
'db_table': 'django_site',
'ordering': ('domain',),
},
managers=[
(b'objects', django.contrib.sites.models.SiteManager()),
],
),
]
| bsd-3-clause |
slightlymadphoenix/activityPointsApp | activitypoints/lib/python3.5/site-packages/django/db/backends/mysql/base.py | 44 | 16091 | """
MySQL database backend for Django.
Requires mysqlclient: https://pypi.python.org/pypi/mysqlclient/
MySQLdb is supported for Python 2 only: http://sourceforge.net/projects/mysql-python
"""
from __future__ import unicode_literals
import datetime
import re
import sys
import warnings
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import utils
from django.db.backends import utils as backend_utils
from django.db.backends.base.base import BaseDatabaseWrapper
from django.utils import six, timezone
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_str
from django.utils.functional import cached_property
from django.utils.safestring import SafeBytes, SafeText
try:
import MySQLdb as Database
except ImportError as e:
raise ImproperlyConfigured(
'Error loading MySQLdb module: %s.\n'
'Did you install mysqlclient or MySQL-python?' % e
)
from MySQLdb.constants import CLIENT, FIELD_TYPE # isort:skip
from MySQLdb.converters import Thing2Literal, conversions # isort:skip
# Some of these import MySQLdb, so import them after checking if it's installed.
from .client import DatabaseClient # isort:skip
from .creation import DatabaseCreation # isort:skip
from .features import DatabaseFeatures # isort:skip
from .introspection import DatabaseIntrospection # isort:skip
from .operations import DatabaseOperations # isort:skip
from .schema import DatabaseSchemaEditor # isort:skip
from .validation import DatabaseValidation # isort:skip
version = Database.version_info
if version < (1, 2, 3):
raise ImproperlyConfigured(
"MySQLdb/mysqlclient 1.2.3 or newer is required; you have %s"
% Database.__version__
)
def adapt_datetime_warn_on_aware_datetime(value, conv):
# Remove this function and rely on the default adapter in Django 2.0.
if settings.USE_TZ and timezone.is_aware(value):
warnings.warn(
"The MySQL database adapter received an aware datetime (%s), "
"probably from cursor.execute(). Update your code to pass a "
"naive datetime in the database connection's time zone (UTC by "
"default).", RemovedInDjango20Warning)
# This doesn't account for the database connection's timezone,
# which isn't known. (That's why this adapter is deprecated.)
value = value.astimezone(timezone.utc).replace(tzinfo=None)
return Thing2Literal(value.strftime("%Y-%m-%d %H:%M:%S.%f"), conv)
# MySQLdb returns TIME columns as timedelta -- they are more like timedelta in
# terms of actual behavior as they are signed and include days -- and Django
# expects time, so we still need to override that. We also need to add special
# handling for SafeText and SafeBytes as MySQLdb's type checking is too tight
# to catch those (see Django ticket #6052).
django_conversions = conversions.copy()
django_conversions.update({
FIELD_TYPE.TIME: backend_utils.typecast_time,
FIELD_TYPE.DECIMAL: backend_utils.typecast_decimal,
FIELD_TYPE.NEWDECIMAL: backend_utils.typecast_decimal,
datetime.datetime: adapt_datetime_warn_on_aware_datetime,
})
# This should match the numerical portion of the version numbers (we can treat
# versions like 5.0.24 and 5.0.24a as the same).
server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
class CursorWrapper(object):
"""
A thin wrapper around MySQLdb's normal cursor class so that we can catch
particular exception instances and reraise them with the right types.
Implemented as a wrapper, rather than a subclass, so that we aren't stuck
to the particular underlying representation returned by Connection.cursor().
"""
codes_for_integrityerror = (1048,)
def __init__(self, cursor):
self.cursor = cursor
def execute(self, query, args=None):
try:
# args is None means no string interpolation
return self.cursor.execute(query, args)
except Database.OperationalError as e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e.args[0] in self.codes_for_integrityerror:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def executemany(self, query, args):
try:
return self.cursor.executemany(query, args)
except Database.OperationalError as e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e.args[0] in self.codes_for_integrityerror:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Close instead of passing through to avoid backend-specific behavior
# (#17671).
self.close()
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'mysql'
# This dictionary maps Field objects to their associated MySQL column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
_data_types = {
'AutoField': 'integer AUTO_INCREMENT',
'BigAutoField': 'bigint AUTO_INCREMENT',
'BinaryField': 'longblob',
'BooleanField': 'bool',
'CharField': 'varchar(%(max_length)s)',
'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
'DurationField': 'bigint',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'double precision',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'char(15)',
'GenericIPAddressField': 'char(39)',
'NullBooleanField': 'bool',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer UNSIGNED',
'PositiveSmallIntegerField': 'smallint UNSIGNED',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'longtext',
'TimeField': 'time',
'UUIDField': 'char(32)',
}
@cached_property
def data_types(self):
if self.features.supports_microsecond_precision:
return dict(self._data_types, DateTimeField='datetime(6)', TimeField='time(6)')
else:
return self._data_types
operators = {
'exact': '= %s',
'iexact': 'LIKE %s',
'contains': 'LIKE BINARY %s',
'icontains': 'LIKE %s',
'regex': 'REGEXP BINARY %s',
'iregex': 'REGEXP %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE BINARY %s',
'endswith': 'LIKE BINARY %s',
'istartswith': 'LIKE %s',
'iendswith': 'LIKE %s',
}
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\\', '\\\\'), '%%', '\%%'), '_', '\_')"
pattern_ops = {
'contains': "LIKE BINARY CONCAT('%%', {}, '%%')",
'icontains': "LIKE CONCAT('%%', {}, '%%')",
'startswith': "LIKE BINARY CONCAT({}, '%%')",
'istartswith': "LIKE CONCAT({}, '%%')",
'endswith': "LIKE BINARY CONCAT('%%', {})",
'iendswith': "LIKE CONCAT('%%', {})",
}
isolation_levels = {
'read uncommitted',
'read committed',
'repeatable read',
'serializable',
}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
# Classes instantiated in __init__().
client_class = DatabaseClient
creation_class = DatabaseCreation
features_class = DatabaseFeatures
introspection_class = DatabaseIntrospection
ops_class = DatabaseOperations
validation_class = DatabaseValidation
def get_connection_params(self):
kwargs = {
'conv': django_conversions,
'charset': 'utf8',
}
if six.PY2:
kwargs['use_unicode'] = True
settings_dict = self.settings_dict
if settings_dict['USER']:
kwargs['user'] = settings_dict['USER']
if settings_dict['NAME']:
kwargs['db'] = settings_dict['NAME']
if settings_dict['PASSWORD']:
kwargs['passwd'] = force_str(settings_dict['PASSWORD'])
if settings_dict['HOST'].startswith('/'):
kwargs['unix_socket'] = settings_dict['HOST']
elif settings_dict['HOST']:
kwargs['host'] = settings_dict['HOST']
if settings_dict['PORT']:
kwargs['port'] = int(settings_dict['PORT'])
# We need the number of potentially affected rows after an
# "UPDATE", not the number of changed rows.
kwargs['client_flag'] = CLIENT.FOUND_ROWS
# Validate the transaction isolation level, if specified.
options = settings_dict['OPTIONS'].copy()
isolation_level = options.pop('isolation_level', None)
if isolation_level:
isolation_level = isolation_level.lower()
if isolation_level not in self.isolation_levels:
raise ImproperlyConfigured(
"Invalid transaction isolation level '%s' specified.\n"
"Use one of %s, or None." % (
isolation_level,
', '.join("'%s'" % s for s in sorted(self.isolation_levels))
))
# The variable assignment form of setting transaction isolation
# levels will be used, e.g. "set tx_isolation='repeatable-read'".
isolation_level = isolation_level.replace(' ', '-')
self.isolation_level = isolation_level
kwargs.update(options)
return kwargs
def get_new_connection(self, conn_params):
conn = Database.connect(**conn_params)
conn.encoders[SafeText] = conn.encoders[six.text_type]
conn.encoders[SafeBytes] = conn.encoders[bytes]
return conn
def init_connection_state(self):
assignments = []
if self.features.is_sql_auto_is_null_enabled:
# SQL_AUTO_IS_NULL controls whether an AUTO_INCREMENT column on
# a recently inserted row will return when the field is tested
# for NULL. Disabling this brings this aspect of MySQL in line
# with SQL standards.
assignments.append('SQL_AUTO_IS_NULL = 0')
if self.isolation_level:
assignments.append("TX_ISOLATION = '%s'" % self.isolation_level)
if assignments:
with self.cursor() as cursor:
cursor.execute('SET ' + ', '.join(assignments))
def create_cursor(self, name=None):
cursor = self.connection.cursor()
return CursorWrapper(cursor)
def _rollback(self):
try:
BaseDatabaseWrapper._rollback(self)
except Database.NotSupportedError:
pass
def _set_autocommit(self, autocommit):
with self.wrap_database_errors:
self.connection.autocommit(autocommit)
def disable_constraint_checking(self):
"""
Disables foreign key checks, primarily for use in adding rows with forward references. Always returns True,
to indicate constraint checks need to be re-enabled.
"""
self.cursor().execute('SET foreign_key_checks=0')
return True
def enable_constraint_checking(self):
"""
Re-enable foreign key checks after they have been disabled.
"""
# Override needs_rollback in case constraint_checks_disabled is
# nested inside transaction.atomic.
self.needs_rollback, needs_rollback = False, self.needs_rollback
try:
self.cursor().execute('SET foreign_key_checks=1')
finally:
self.needs_rollback = needs_rollback
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
Raises an IntegrityError on the first invalid foreign key reference
encountered (if any) and provides detailed information about the
invalid reference in the error message.
Backends can override this method if they can more directly apply
constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:
table_names = self.introspection.table_names(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute(
"""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL
""" % (
primary_key_column_name, column_name, table_name,
referenced_table_name, column_name, referenced_column_name,
column_name, referenced_column_name,
)
)
for bad_row in cursor.fetchall():
raise utils.IntegrityError(
"The row in table '%s' with primary key '%s' has an invalid "
"foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
% (
table_name, bad_row[0], table_name, column_name,
bad_row[1], referenced_table_name, referenced_column_name,
)
)
def is_usable(self):
try:
self.connection.ping()
except Database.Error:
return False
else:
return True
@cached_property
def mysql_version(self):
with self.temporary_connection() as cursor:
cursor.execute('SELECT VERSION()')
server_info = cursor.fetchone()[0]
match = server_version_re.match(server_info)
if not match:
raise Exception('Unable to determine MySQL version from version string %r' % server_info)
return tuple(int(x) for x in match.groups())
| mit |
resmo/ansible | test/units/modules/network/onyx/test_onyx_ospf.py | 68 | 4494 | #
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.onyx import onyx_ospf
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxOspfModule(TestOnyxModule):
module = onyx_ospf
def setUp(self):
super(TestOnyxOspfModule, self).setUp()
self._ospf_exists = True
self.mock_get_config = patch.object(
onyx_ospf.OnyxOspfModule,
"_get_ospf_config")
self.get_config = self.mock_get_config.start()
self.mock_get_interfaces_config = patch.object(
onyx_ospf.OnyxOspfModule,
"_get_ospf_interfaces_config")
self.get_interfaces_config = self.mock_get_interfaces_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestOnyxOspfModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
if self._ospf_exists:
config_file = 'onyx_ospf_show.cfg'
self.get_config.return_value = load_fixture(config_file)
config_file = 'onyx_ospf_interfaces_show.cfg'
self.get_interfaces_config.return_value = load_fixture(config_file)
else:
self.get_config.return_value = None
self.get_interfaces_config.return_value = None
self.load_config.return_value = None
def test_ospf_absent_no_change(self):
set_module_args(dict(ospf=3, state='absent'))
self.execute_module(changed=False)
def test_ospf_present_no_change(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=[interface]))
self.execute_module(changed=False)
def test_ospf_present_remove(self):
set_module_args(dict(ospf=2, state='absent'))
commands = ['no router ospf 2']
self.execute_module(changed=True, commands=commands)
def test_ospf_change_router(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, router_id='10.2.3.5',
interfaces=[interface]))
commands = ['router ospf 2', 'router-id 10.2.3.5', 'exit']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ospf_remove_router(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, interfaces=[interface]))
commands = ['router ospf 2', 'no router-id', 'exit']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ospf_add_interface(self):
interfaces = [dict(name='Loopback 1', area='0.0.0.0'),
dict(name='Loopback 2', area='0.0.0.0')]
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=interfaces))
commands = ['interface loopback 2 ip ospf area 0.0.0.0']
self.execute_module(changed=True, commands=commands)
def test_ospf_remove_interface(self):
set_module_args(dict(ospf=2, router_id='10.2.3.4'))
commands = ['interface loopback 1 no ip ospf area']
self.execute_module(changed=True, commands=commands)
def test_ospf_add(self):
self._ospf_exists = False
interfaces = [dict(name='Loopback 1', area='0.0.0.0'),
dict(name='Vlan 210', area='0.0.0.0'),
dict(name='Eth1/1', area='0.0.0.0'),
dict(name='Po1', area='0.0.0.0')]
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=interfaces))
commands = ['router ospf 2', 'router-id 10.2.3.4', 'exit',
'interface loopback 1 ip ospf area 0.0.0.0',
'interface vlan 210 ip ospf area 0.0.0.0',
'interface ethernet 1/1 ip ospf area 0.0.0.0',
'interface port-channel 1 ip ospf area 0.0.0.0']
self.execute_module(changed=True, commands=commands)
| gpl-3.0 |
moreati/django-allauth | allauth/socialaccount/providers/twitch/views.py | 62 | 1037 | import requests
from allauth.socialaccount.providers.oauth2.views import (OAuth2Adapter,
OAuth2LoginView,
OAuth2CallbackView)
from .provider import TwitchProvider
class TwitchOAuth2Adapter(OAuth2Adapter):
provider_id = TwitchProvider.id
access_token_url = 'https://api.twitch.tv/kraken/oauth2/token'
authorize_url = 'https://api.twitch.tv/kraken/oauth2/authorize'
profile_url = 'https://api.twitch.tv/kraken/user'
def complete_login(self, request, app, token, **kwargs):
resp = requests.get(self.profile_url,
params={'oauth_token': token.token})
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(TwitchOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(TwitchOAuth2Adapter)
| mit |
papaloizouc/battlehack2014 | website/spameggs/spameggs/settings.py | 1 | 2945 | """
Django settings for spameggs project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ue(sv)*=^m!z&a!8t(f1&zvf__mpvi(jck+0w$%uo_07_k69x!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TOP_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
def here(*args):
return os.path.realpath(os.path.join(TOP_DIR, *args))
TEMPLATE_DIRS = (
here('theapp/templates'),
)
STATICFILES_DIRS = (
here('theapp/static'),
)
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'theapp',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
ROOT_URLCONF = 'spameggs.urls'
WSGI_APPLICATION = 'spameggs.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'battlehack',
'USER': 'battlehack',
'PASSWORD': 'battlehack',
'HOST': 'localhost',
'PORT': '',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
foo = {}
LOGGING = {
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
'propagate': True,
'level': 'DEBUG',
}
},
}
X_FRAME_OPTIONS = 'DENY'
AUTH_USER_MODEL = 'theapp.User'
| gpl-2.0 |
sugarlabs/sugar | src/jarabe/model/notifications.py | 4 | 4491 | # Copyright (C) 2008 One Laptop Per Child
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import logging
import dbus
from sugar3 import dispatch
from jarabe import config
_DBUS_SERVICE = 'org.freedesktop.Notifications'
_DBUS_IFACE = 'org.freedesktop.Notifications'
_DBUS_PATH = '/org/freedesktop/Notifications'
_instance = None
class NotificationService(dbus.service.Object):
def __init__(self):
bus = dbus.SessionBus()
bus_name = dbus.service.BusName(_DBUS_SERVICE, bus=bus)
dbus.service.Object.__init__(self, bus_name, _DBUS_PATH)
self._notification_counter = 0
self.notification_received = dispatch.Signal()
self.notification_cancelled = dispatch.Signal()
self._buffer = {}
self.buffer_cleared = dispatch.Signal()
def retrieve_by_name(self, name):
if name in self._buffer:
return self._buffer[name]
return None
def clear_by_name(self, name):
if name in self._buffer:
del self._buffer[name]
self.buffer_cleared.send(self, app_name=name)
@dbus.service.method(_DBUS_IFACE,
in_signature='susssava{sv}i', out_signature='u')
def Notify(self, app_name, replaces_id, app_icon, summary, body, actions,
hints, expire_timeout):
logging.debug('Received notification: %r',
[app_name, replaces_id,
'<app_icon>', summary, body, actions, '<hints>',
expire_timeout])
if replaces_id > 0:
notification_id = replaces_id
else:
if self._notification_counter == sys.maxsize:
self._notification_counter = 1
else:
self._notification_counter += 1
notification_id = self._notification_counter
if app_name not in self._buffer:
self._buffer[app_name] = []
self._buffer[app_name].append({'app_name': app_name,
'replaces_id': replaces_id,
'app_icon': app_icon,
'summary': summary,
'body': body,
'actions': actions,
'hints': hints,
'expire_timeout': expire_timeout})
self.notification_received.send(self,
app_name=app_name,
replaces_id=replaces_id,
app_icon=app_icon,
summary=summary,
body=body,
actions=actions,
hints=hints,
expire_timeout=expire_timeout)
return notification_id
@dbus.service.method(_DBUS_IFACE, in_signature='u', out_signature='')
def CloseNotification(self, notification_id):
self.notification_cancelled.send(self, notification_id=notification_id)
@dbus.service.method(_DBUS_IFACE, in_signature='', out_signature='as')
def GetCapabilities(self):
return []
@dbus.service.method(_DBUS_IFACE, in_signature='', out_signature='sss')
def GetServerInformation(self, name, vendor, version):
return 'Sugar Shell', 'Sugar', config.version
@dbus.service.signal(_DBUS_IFACE, signature='uu')
def NotificationClosed(self, notification_id, reason):
pass
@dbus.service.signal(_DBUS_IFACE, signature='us')
def ActionInvoked(self, notification_id, action_key):
pass
def get_service():
global _instance
if not _instance:
_instance = NotificationService()
return _instance
def init():
get_service()
| gpl-3.0 |
giorgiop/scikit-learn | doc/tutorial/text_analytics/solutions/exercise_01_language_train_model.py | 73 | 2264 | """Build a language detector model
The goal of this exercise is to train a linear classifier on text features
that represent sequences of up to 3 consecutive characters so as to be
recognize natural languages by using the frequencies of short character
sequences as 'fingerprints'.
"""
# Author: Olivier Grisel <[email protected]>
# License: Simplified BSD
import sys
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.linear_model import Perceptron
from sklearn.pipeline import Pipeline
from sklearn.datasets import load_files
from sklearn.model_selection import train_test_split
from sklearn import metrics
# The training data folder must be passed as first argument
languages_data_folder = sys.argv[1]
dataset = load_files(languages_data_folder)
# Split the dataset in training and test set:
docs_train, docs_test, y_train, y_test = train_test_split(
dataset.data, dataset.target, test_size=0.5)
# TASK: Build a vectorizer that splits strings into sequence of 1 to 3
# characters instead of word tokens
vectorizer = TfidfVectorizer(ngram_range=(1, 3), analyzer='char',
use_idf=False)
# TASK: Build a vectorizer / classifier pipeline using the previous analyzer
# the pipeline instance should stored in a variable named clf
clf = Pipeline([
('vec', vectorizer),
('clf', Perceptron()),
])
# TASK: Fit the pipeline on the training set
clf.fit(docs_train, y_train)
# TASK: Predict the outcome on the testing set in a variable named y_predicted
y_predicted = clf.predict(docs_test)
# Print the classification report
print(metrics.classification_report(y_test, y_predicted,
target_names=dataset.target_names))
# Plot the confusion matrix
cm = metrics.confusion_matrix(y_test, y_predicted)
print(cm)
#import matlotlib.pyplot as plt
#plt.matshow(cm, cmap=plt.cm.jet)
#plt.show()
# Predict the result on some short new sentences:
sentences = [
u'This is a language detection test.',
u'Ceci est un test de d\xe9tection de la langue.',
u'Dies ist ein Test, um die Sprache zu erkennen.',
]
predicted = clf.predict(sentences)
for s, p in zip(sentences, predicted):
print(u'The language of "%s" is "%s"' % (s, dataset.target_names[p]))
| bsd-3-clause |
sarakha63/persomov | libs/git/ref.py | 110 | 2981 | # Copyright (c) 2009, Rotem Yaari <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of organization nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY Rotem Yaari ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Rotem Yaari BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class Ref(object):
def __init__(self, repo, name):
super(Ref, self).__init__()
self.repo = repo
self.name = name
def getHead(self):
return self.repo._getCommitByRefName(self.name)
def getNormalizedName(self):
return self.name
def getNewCommits(self, comparedTo, limit = ""):
returned = []
command = "cherry %s %s %s" % (self.repo._normalizeRefName(comparedTo),
self.getNormalizedName(),
self.repo._normalizeRefName(limit))
for line in self.repo._getOutputAssertSuccess(command).splitlines():
symbol, sha = line.split()
if symbol == '-':
#already has an equivalent commit
continue
returned.append(self.repo._getCommitByHash(sha.strip()))
return returned
def __eq__(self, ref):
return (type(ref) is type(self) and ref.name == self.name)
def __ne__(self, ref):
return not (self == ref)
def __repr__(self):
return "<%s %s>" % (type(self).__name__, self.getNormalizedName())
################################## Containment #################################
def getMergeBase(self, other):
return self.repo.getMergeBase(self, other)
__and__ = getMergeBase
def contains(self, other):
return self.getMergeBase(other) == other
__contains__ = contains
| gpl-3.0 |
RevolutionMC/Revolution | plugin.video.PsychoTV/resources/lib/libraries/f4mproxy/f4mDownloader.py | 55 | 38147 | import xml.etree.ElementTree as etree
import base64
from struct import unpack, pack
import sys
import io
import os
import time
import itertools
import xbmcaddon
import xbmc
import urllib2,urllib
import traceback
import urlparse
import posixpath
import re
import hmac
import hashlib
import binascii
import zlib
from hashlib import sha256
import cookielib
#import youtube_dl
#from youtube_dl.utils import *
addon_id = 'script.video.F4mProxy'
selfAddon = xbmcaddon.Addon()
__addonname__ = selfAddon.getAddonInfo('name')
__icon__ = selfAddon.getAddonInfo('icon')
downloadPath = xbmc.translatePath(selfAddon.getAddonInfo('profile'))#selfAddon["profile"])
F4Mversion=''
#from Crypto.Cipher import AES
value_unsafe = '%+&;#'
VALUE_SAFE = ''.join(chr(c) for c in range(33, 127)
if chr(c) not in value_unsafe)
def urlencode_param(value):
"""Minimal URL encoding for query parameter"""
return urllib.quote_plus(value, safe=VALUE_SAFE)
class FlvReader(io.BytesIO):
"""
Reader for Flv files
The file format is documented in https://www.adobe.com/devnet/f4v.html
"""
# Utility functions for reading numbers and strings
def read_unsigned_long_long(self):
return unpack('!Q', self.read(8))[0]
def read_unsigned_int(self):
return unpack('!I', self.read(4))[0]
def read_unsigned_char(self):
return unpack('!B', self.read(1))[0]
def read_string(self):
res = b''
while True:
char = self.read(1)
if char == b'\x00':
break
res+=char
return res
def read_box_info(self):
"""
Read a box and return the info as a tuple: (box_size, box_type, box_data)
"""
real_size = size = self.read_unsigned_int()
box_type = self.read(4)
header_end = 8
if size == 1:
real_size = self.read_unsigned_long_long()
header_end = 16
return real_size, box_type, self.read(real_size-header_end)
def read_asrt(self, debug=False):
version = self.read_unsigned_char()
self.read(3) # flags
quality_entry_count = self.read_unsigned_char()
quality_modifiers = []
for i in range(quality_entry_count):
quality_modifier = self.read_string()
quality_modifiers.append(quality_modifier)
segment_run_count = self.read_unsigned_int()
segments = []
#print 'segment_run_count',segment_run_count
for i in range(segment_run_count):
first_segment = self.read_unsigned_int()
fragments_per_segment = self.read_unsigned_int()
segments.append((first_segment, fragments_per_segment))
#print 'segments',segments
return {'version': version,
'quality_segment_modifiers': quality_modifiers,
'segment_run': segments,
}
def read_afrt(self, debug=False):
version = self.read_unsigned_char()
self.read(3) # flags
time_scale = self.read_unsigned_int()
quality_entry_count = self.read_unsigned_char()
quality_entries = []
for i in range(quality_entry_count):
mod = self.read_string()
quality_entries.append(mod)
fragments_count = self.read_unsigned_int()
#print 'fragments_count',fragments_count
fragments = []
for i in range(fragments_count):
first = self.read_unsigned_int()
first_ts = self.read_unsigned_long_long()
duration = self.read_unsigned_int()
if duration == 0:
discontinuity_indicator = self.read_unsigned_char()
else:
discontinuity_indicator = None
fragments.append({'first': first,
'ts': first_ts,
'duration': duration,
'discontinuity_indicator': discontinuity_indicator,
})
#print 'fragments',fragments
return {'version': version,
'time_scale': time_scale,
'fragments': fragments,
'quality_entries': quality_entries,
}
def read_abst(self, debug=False):
version = self.read_unsigned_char()
self.read(3) # flags
bootstrap_info_version = self.read_unsigned_int()
streamType=self.read_unsigned_char()#self.read(1) # Profile,Live,Update,Reserved
islive=False
if (streamType & 0x20) >> 5:
islive=True
print 'LIVE',streamType,islive
time_scale = self.read_unsigned_int()
current_media_time = self.read_unsigned_long_long()
smpteTimeCodeOffset = self.read_unsigned_long_long()
movie_identifier = self.read_string()
server_count = self.read_unsigned_char()
servers = []
for i in range(server_count):
server = self.read_string()
servers.append(server)
quality_count = self.read_unsigned_char()
qualities = []
for i in range(server_count):
quality = self.read_string()
qualities.append(server)
drm_data = self.read_string()
metadata = self.read_string()
segments_count = self.read_unsigned_char()
#print 'segments_count11',segments_count
segments = []
for i in range(segments_count):
box_size, box_type, box_data = self.read_box_info()
assert box_type == b'asrt'
segment = FlvReader(box_data).read_asrt()
segments.append(segment)
fragments_run_count = self.read_unsigned_char()
#print 'fragments_run_count11',fragments_run_count
fragments = []
for i in range(fragments_run_count):
# This info is only useful for the player, it doesn't give more info
# for the download process
box_size, box_type, box_data = self.read_box_info()
assert box_type == b'afrt'
fragments.append(FlvReader(box_data).read_afrt())
return {'segments': segments,
'movie_identifier': movie_identifier,
'drm_data': drm_data,
'fragments': fragments,
},islive
def read_bootstrap_info(self):
"""
Read the bootstrap information from the stream,
returns a dict with the following keys:
segments: A list of dicts with the following keys
segment_run: A list of (first_segment, fragments_per_segment) tuples
"""
total_size, box_type, box_data = self.read_box_info()
assert box_type == b'abst'
return FlvReader(box_data).read_abst()
def read_bootstrap_info(bootstrap_bytes):
return FlvReader(bootstrap_bytes).read_bootstrap_info()
def build_fragments_list(boot_info, startFromFregment=None, live=True):
""" Return a list of (segment, fragment) for each fragment in the video """
res = []
segment_run_table = boot_info['segments'][0]
#print 'segment_run_table',segment_run_table
# I've only found videos with one segment
#if len(segment_run_table['segment_run'])>1:
# segment_run_table['segment_run']=segment_run_table['segment_run'][-2:] #pick latest
frag_start = boot_info['fragments'][0]['fragments']
#print boot_info['fragments']
# sum(j for i, j in segment_run_table['segment_run'])
first_frag_number=frag_start[0]['first']
last_frag_number=frag_start[-1]['first']
if last_frag_number==0:
last_frag_number=frag_start[-2]['first']
endfragment=0
segment_to_start=None
for current in range (len(segment_run_table['segment_run'])):
seg,fregCount=segment_run_table['segment_run'][current]
#print 'segmcount',seg,fregCount
if (not live):
frag_end=last_frag_number
else:
frag_end=first_frag_number+fregCount-1
if fregCount>10000:
frag_end=last_frag_number
#if frag_end
segment_run_table['segment_run'][current]=(seg,fregCount,first_frag_number,frag_end)
if (not startFromFregment==None) and startFromFregment>=first_frag_number and startFromFregment<=frag_end:
segment_to_start=current
first_frag_number+=fregCount
print 'current status',segment_run_table['segment_run']
#if we have no index then take the last segment
if segment_to_start==None:
segment_to_start=len(segment_run_table['segment_run'])-1
#if len(segment_run_table['segment_run'])>2:
# segment_to_start=len(segment_run_table['segment_run'])-2;
if live:
startFromFregment=segment_run_table['segment_run'][-1][3]
# if len(boot_info['fragments'][0]['fragments'])>1: #go bit back
# startFromFregment= boot_info['fragments'][0]['fragments'][-1]['first']
else:
startFromFregment= boot_info['fragments'][0]['fragments'][0]['first'] #start from begining
#if len(boot_info['fragments'][0]['fragments'])>2: #go little bit back
# startFromFregment= boot_info['fragments'][0]['fragments'][-2]['first']
print 'startFromFregment',startFromFregment,boot_info,len(boot_info['fragments'][0]['fragments'])
#print 'segment_to_start',segment_to_start
for currentIndex in range (segment_to_start,len(segment_run_table['segment_run'])):
currentSegment=segment_run_table['segment_run'][currentIndex]
#print 'currentSegment',currentSegment
(seg,fregCount,frag_start,frag_end)=currentSegment
#print 'startFromFregment',startFromFregment,
if (not startFromFregment==None) and startFromFregment>=frag_start and startFromFregment<=frag_end:
frag_start=startFromFregment
#print 'frag_start',frag_start,frag_end
for currentFreg in range(frag_start,frag_end+1):
res.append((seg,currentFreg ))
print 'fragmentlist',res,boot_info
return res
#totalFrags=sum(j for i, j in segment_run_table['segment_run'])
#lastSegment=segment_run_table['segment_run'][-1]
#lastSegmentStart= lastSegment[0]
#lastSegmentFragCount = lastSegment[1]
#print 'totalFrags',totalFrags
#first_frag_number = frag_start[0]['first']
#startFragOfLastSegment= first_frag_number +totalFrags - lastSegmentFragCount
#for (i, frag_number) in zip(range(1, lastSegmentFragCount+1), itertools.count(startFragOfLastSegment)):
# res.append((lastSegmentStart,frag_number )) #this was i, i am using first segement start
#return res
#segment_run_entry = segment_run_table['segment_run'][0]
#print 'segment_run_entry',segment_run_entry,segment_run_table
#n_frags = segment_run_entry[1]
#startingPoint = segment_run_entry[0]
#fragment_run_entry_table = boot_info['fragments'][0]['fragments']
#frag_entry_index = 0
#first_frag_number = fragment_run_entry_table[0]['first']
#first_frag_number=(startingPoint*n_frags) -(n_frags)+1
#print 'THENUMBERS',startingPoint,n_frags,first_frag_number
#for (i, frag_number) in zip(range(1, n_frags+1), itertools.count(first_frag_number)):
# res.append((startingPoint,frag_number )) #this was i, i am using first segement start
#return res
def join(base,url):
join = urlparse.urljoin(base,url)
url = urlparse.urlparse(join)
path = posixpath.normpath(url[2])
return urlparse.urlunparse(
(url.scheme,url.netloc,path,url.params,url.query,url.fragment)
)
def _add_ns(prop):
#print 'F4Mversion',F4Mversion
return '{http://ns.adobe.com/f4m/%s}%s' %(F4Mversion, prop)
#class ReallyQuietDownloader(youtube_dl.FileDownloader):
# def to_screen(sef, *args, **kargs):
# pass
class F4MDownloader():
"""
A downloader for f4m manifests or AdobeHDS.
"""
outputfile =''
clientHeader=None
cookieJar=cookielib.LWPCookieJar()
def __init__(self):
self.init_done=False
def getUrl(self,url, ischunkDownloading=False):
try:
post=None
print 'url',url
openner = urllib2.build_opener(urllib2.HTTPHandler, urllib2.HTTPSHandler)
#cookie_handler = urllib2.HTTPCookieProcessor(self.cookieJar)
#openner = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
if post:
req = urllib2.Request(url, post)
else:
req = urllib2.Request(url)
ua_header=False
if self.clientHeader:
for n,v in self.clientHeader:
req.add_header(n,v)
if n=='User-Agent':
ua_header=True
if not ua_header:
req.add_header('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0')
#response = urllib2.urlopen(req)
if self.proxy and ( (not ischunkDownloading) or self.use_proxy_for_chunks ):
req.set_proxy(self.proxy, 'http')
response = openner.open(req)
data=response.read()
return data
except:
print 'Error in getUrl'
traceback.print_exc()
return None
def _write_flv_header2(self, stream):
"""Writes the FLV header and the metadata to stream"""
# FLV header
stream.write(b'FLV\x01')
stream.write(b'\x01')
stream.write(b'\x00\x00\x00\x09')
# FLV File body
stream.write(b'\x00\x00\x00\x09')
def _write_flv_header(self, stream, metadata):
"""Writes the FLV header and the metadata to stream"""
# FLV header
stream.write(b'FLV\x01')
stream.write(b'\x05')
stream.write(b'\x00\x00\x00\x09')
# FLV File body
stream.write(b'\x00\x00\x00\x00')
# FLVTAG
if metadata:
stream.write(b'\x12') # Script data
stream.write(pack('!L',len(metadata))[1:]) # Size of the metadata with 3 bytes
stream.write(b'\x00\x00\x00\x00\x00\x00\x00')
stream.write(metadata)
# All this magic numbers have been extracted from the output file
# produced by AdobeHDS.php (https://github.com/K-S-V/Scripts)
stream.write(b'\x00\x00\x01\x73')
def init(self, out_stream, url, proxy=None,use_proxy_for_chunks=True,g_stopEvent=None, maxbitrate=0, auth=''):
try:
self.init_done=False
self.total_frags=0
self.init_url=url
self.clientHeader=None
self.status='init'
self.proxy = proxy
self.auth=auth
#self.auth="pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYzMDMxMTV+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWQxODA5MWVkYTQ4NDI3NjFjODhjOWQwY2QxNTk3YTI0MWQwOWYwNWI1N2ZmMDE0ZjcxN2QyMTVjZTJkNmJjMDQ%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3DACF8A1E4467676C9BCE2721CA5EFF840BD6ED1780046954039373A3B0D942ADC&hdntl=exp=1406303115~acl=%2f*~data=hdntl~hmac=4ab96fa533fd7c40204e487bfc7befaf31dd1f49c27eb1f610673fed9ff97a5f&als=0,2,0,0,0,NaN,0,0,0,37,f,52293145.57,52293155.9,t,s,GARWLHLMHNGA,2.11.3,37&hdcore=2.11.3"
if self.auth ==None or self.auth =='None' :
self.auth=''
if self.proxy and len(self.proxy)==0:
self.proxy=None
self.use_proxy_for_chunks=use_proxy_for_chunks
self.out_stream=out_stream
self.g_stopEvent=g_stopEvent
self.maxbitrate=maxbitrate
if '|' in url:
sp = url.split('|')
url = sp[0]
self.clientHeader = sp[1]
self.clientHeader= urlparse.parse_qsl(self.clientHeader)
print 'header recieved now url and headers are',url, self.clientHeader
self.status='init done'
self.url=url
#self.downloadInternal( url)
return self.preDownoload()
#os.remove(self.outputfile)
except:
traceback.print_exc()
self.status='finished'
return False
def preDownoload(self):
global F4Mversion
try:
self.seqNumber=0
self.live=False #todo find if its Live or not
man_url = self.url
url=self.url
print 'Downloading f4m manifest'
manifest = self.getUrl(man_url)#.read()
if not manifest:
return False
print len(manifest)
try:
print manifest
except: pass
self.status='manifest done'
#self.report_destination(filename)
#dl = ReallyQuietDownloader(self.ydl, {'continuedl': True, 'quiet': True, 'noprogress':True})
version_fine="xmlns=\".*?\/([0-9].*?)\""
F4Mversion =re.findall(version_fine, manifest)[0]
#print F4Mversion,_add_ns('media')
auth_patt='<pv-2.0>(.*?)<'
auth_obj =re.findall(auth_patt, manifest)
self.auth20=''
if auth_obj and len(auth_obj)>0:
self.auth20=auth_obj[0] #not doing anything for time being
print 'auth',self.auth,self.auth20
#quick for one example where the xml was wrong.
if '\"bootstrapInfoId' in manifest:
manifest=manifest.replace('\"bootstrapInfoId','\" bootstrapInfoId')
doc = etree.fromstring(manifest)
print doc
# Added the-one 05082014
# START
# Check if manifest defines a baseURL tag
baseURL_tag = doc.find(_add_ns('baseURL'))
if baseURL_tag != None:
man_url = baseURL_tag.text
url = man_url
self.url = url
print 'base url defined as: %s' % man_url
# END
try:
#formats = [(int(f.attrib.get('bitrate', -1)),f) for f in doc.findall(_add_ns('media'))]
formats=[]
for f in doc.findall(_add_ns('media')):
vtype=f.attrib.get('type', '')
if f.attrib.get('type', '')=='video' or vtype=='' :
formats.append([int(f.attrib.get('bitrate', -1)),f])
print 'format works',formats
except:
formats=[(int(0),f) for f in doc.findall(_add_ns('media'))]
#print 'formats',formats
formats = sorted(formats, key=lambda f: f[0])
if self.maxbitrate==0:
rate, media = formats[-1]
elif self.maxbitrate==-1:
rate, media = formats[0]
else: #find bitrate
brselected=None
rate, media=None,None
for r, m in formats:
if r<=self.maxbitrate:
rate, media=r,m
else:
break
if media==None:
rate, media = formats[-1]
dest_stream = self.out_stream
print 'rate selected',rate
self.metadata=None
try:
self.metadata = base64.b64decode(media.find(_add_ns('metadata')).text)
print 'metadata stream read done'#,media.find(_add_ns('metadata')).text
#self._write_flv_header(dest_stream, metadata)
#dest_stream.flush()
except: pass
# Modified the-one 05082014
# START
# url and href can be used interchangeably
# so if url attribute is not present
# check for href attribute
try:
mediaUrl=media.attrib['url']
except:
mediaUrl=media.attrib['href']
# END
# Added the-one 05082014
# START
# if media url/href points to another f4m file
if '.f4m' in mediaUrl:
sub_f4m_url = join(man_url,mediaUrl)
print 'media points to another f4m file: %s' % sub_f4m_url
print 'Downloading f4m sub manifest'
sub_manifest = self.getUrl(sub_f4m_url)#.read()
if not sub_manifest:
return False
print len(sub_manifest)
try:
print sub_manifest
except: pass
self.status='sub manifest done'
F4Mversion =re.findall(version_fine, sub_manifest)[0]
doc = etree.fromstring(sub_manifest)
print doc
media = doc.find(_add_ns('media'))
if media == None:
return False
try:
self.metadata = base64.b64decode(media.find(_add_ns('metadata')).text)
print 'metadata stream read done'
except: pass
try:
mediaUrl=media.attrib['url']
except:
mediaUrl=media.attrib['href']
# END
try:
bootStrapID = media.attrib['bootstrapInfoId']
except: bootStrapID='xx'
#print 'mediaUrl',mediaUrl
base_url = join(man_url,mediaUrl)#compat_urlparse.urljoin(man_url,media.attrib['url'])
if mediaUrl.endswith('/') and not base_url.endswith('/'):
base_url += '/'
self.base_url=base_url
bsArray=doc.findall(_add_ns('bootstrapInfo'))
print 'bootStrapID',bootStrapID
#bootStrapID='bootstrap_450'
bootstrap=self.getBootStrapWithId(bsArray,bootStrapID)
if bootstrap==None: #if not available then find any!
print 'bootStrapID NOT Found'
bootstrap=doc.findall(_add_ns('bootstrapInfo'))[0]
else:
print 'found bootstrap with id',bootstrap
#print 'bootstrap',bootstrap
bootstrapURL1=''
try:
bootstrapURL1=bootstrap.attrib['url']
except: pass
bootstrapURL=''
bootstrapData=None
queryString=None
if bootstrapURL1=='':
bootstrapData=base64.b64decode(doc.findall(_add_ns('bootstrapInfo'))[0].text)
#
else:
from urlparse import urlparse
queryString = urlparse(url).query
print 'queryString11',queryString
if len(queryString)==0: queryString=None
if queryString==None or '?' in bootstrap.attrib['url']:
bootstrapURL = join(man_url,bootstrap.attrib['url'])# take out querystring for later
queryString = urlparse(bootstrapURL).query
print 'queryString override',queryString
if len(queryString)==0:
queryString=None
if len(self.auth)>0:
bootstrapURL+='?'+self.auth
queryString=self.auth#self._pv_params('',self.auth20)#not in use
else:
print 'queryString!!',queryString
bootstrapURL = join(man_url,bootstrap.attrib['url'])+'?'+queryString
if len(self.auth)>0:
authval=self.auth#self._pv_params('',self.auth20)#not in use
bootstrapURL = join(man_url,bootstrap.attrib['url'])+'?'+authval
queryString=authval
print 'bootstrapURL',bootstrapURL
if queryString==None:
queryString=''
self.bootstrapURL=bootstrapURL
self.queryString=queryString
self.bootstrap, self.boot_info, self.fragments_list,self.total_frags=self.readBootStrapInfo(bootstrapURL,bootstrapData)
self.init_done=True
return True
except:
traceback.print_exc()
return False
def keep_sending_video(self,dest_stream, segmentToStart=None, totalSegmentToSend=0):
try:
self.status='download Starting'
self.downloadInternal(self.url,dest_stream,segmentToStart,totalSegmentToSend)
except:
traceback.print_exc()
self.status='finished'
def downloadInternal(self,url,dest_stream ,segmentToStart=None,totalSegmentToSend=0):
global F4Mversion
try:
#dest_stream = self.out_stream
queryString=self.queryString
print 'segmentToStart',segmentToStart
if self.live or segmentToStart==0 or segmentToStart==None:
print 'writing metadata'#,len(self.metadata)
self._write_flv_header(dest_stream, self.metadata)
dest_stream.flush()
#elif segmentToStart>0 and not self.live:
# self._write_flv_header2(dest_stream)
# dest_stream.flush()
url=self.url
bootstrap, boot_info, fragments_list,total_frags=(self.bootstrap, self.boot_info, self.fragments_list,self.total_frags)
print boot_info, fragments_list,total_frags
self.status='bootstrap done'
self.status='file created'
self.downloaded_bytes = 0
self.bytes_in_disk = 0
self.frag_counter = 0
start = time.time()
frags_filenames = []
self.seqNumber=0
if segmentToStart and not self.live :
self.seqNumber=segmentToStart
if self.seqNumber>=total_frags:
self.seqNumber=total_frags-1
#for (seg_i, frag_i) in fragments_list:
#for seqNumber in range(0,len(fragments_list)):
self.segmentAvailable=0
frameSent=0
while True:
#if not self.live:
# _write_flv_header2
if self.g_stopEvent and self.g_stopEvent.isSet():
return
seg_i, frag_i=fragments_list[self.seqNumber]
self.seqNumber+=1
frameSent+=1
name = u'Seg%d-Frag%d' % (seg_i, frag_i)
#print 'base_url',base_url,name
url = self.base_url + name
if queryString and '?' not in url:
url+='?'+queryString
elif '?' in self.base_url:
url = self.base_url.split('?')[0] + name+'?'+self.base_url.split('?')[1]
#print(url),base_url,name
#frag_filename = u'%s-%s' % (tmpfilename, name)
#success = dl._do_download(frag_filename, {'url': url})
print 'downloading....',url
success=False
urlTry=0
while not success and urlTry<5:
success = self.getUrl(url,True)
if not success: xbmc.sleep(300)
urlTry+=1
print 'downloaded',not success==None,url
if not success:
return False
#with open(frag_filename, 'rb') as down:
if 1==1:
down_data = success#down.read()
reader = FlvReader(down_data)
while True:
_, box_type, box_data = reader.read_box_info()
print 'box_type',box_type,len(box_data)
#if box_type == b'afra':
# dest_stream.write(box_data)
# dest_stream.flush()
# break
if box_type == b'mdat':
isDrm=True if ord(box_data[0])&1 else False
#print 'isDrm',isDrm,repr(box_data)
if 1==2 and isDrm:
print 'drm',repr(box_data[1:17])
box_data=box_data[17:]
dest_stream.write(box_data)
dest_stream.flush()
break
# Using the following code may fix some videos, but
# only in mplayer, VLC won't play the sound.
# mdat_reader = FlvReader(box_data)
# media_type = mdat_reader.read_unsigned_char()
# while True:
# if mdat_reader.read_unsigned_char() == media_type:
# if mdat_reader.read_unsigned_char() == 0x00:
# break
# dest_stream.write(pack('!B', media_type))
# dest_stream.write(b'\x00')
# dest_stream.write(mdat_reader.read())
# break
self.status='play'
if self.seqNumber==len(fragments_list) or (totalSegmentToSend>0 and frameSent==totalSegmentToSend):
if not self.live:
break
self.seqNumber=0
#todo if the url not available then get manifest and get the data again
total_frags=None
try:
bootstrap, boot_info, fragments_list,total_frags=self.readBootStrapInfo(self.bootstrapURL,None,updateMode=True,lastSegment=seg_i, lastFragement=frag_i)
except:
traceback.print_exc()
pass
if total_frags==None:
break
del self.downloaded_bytes
del self.frag_counter
except:
traceback.print_exc()
return
def getBootStrapWithId (self,BSarray, id):
try:
for bs in BSarray:
print 'compare val is ',bs.attrib['id'], 'id', id
if bs.attrib['id']==id:
print 'gotcha'
return bs
except: pass
return None
def readBootStrapInfo(self,bootstrapUrl,bootStrapData, updateMode=False, lastFragement=None,lastSegment=None):
try:
retries=0
while retries<=10:
if self.g_stopEvent and self.g_stopEvent.isSet():
return
if not bootStrapData:
bootStrapData =self.getUrl(bootstrapUrl)
if bootStrapData==None:
retries+=1
continue
#print 'bootstrapData',len(bootStrapData)
bootstrap = bootStrapData#base64.b64decode(bootStrapData)#doc.findall(_add_ns('bootstrapInfo'))[0].text)
#print 'boot stream read done'
boot_info,self.live = read_bootstrap_info(bootstrap)
#print 'boot_info read done',boot_info
newFragement=None
if not lastFragement==None:
newFragement=lastFragement+1
fragments_list = build_fragments_list(boot_info,newFragement,self.live)
total_frags = len(fragments_list)
#print 'fragments_list',fragments_list, newFragement
#print lastSegment
if updateMode and (len(fragments_list)==0 or ( newFragement and newFragement>fragments_list[0][1])):
#todo check lastFragement to see if we got valid data
print 'retrying......'
bootStrapData=None
retries+=1
xbmc.sleep(2000)
continue
return bootstrap, boot_info, fragments_list,total_frags
except:
traceback.print_exc()
def _pv_params(self, pvswf, pv):
"""Returns any parameters needed for Akamai HD player verification.
Algorithm originally documented by KSV, source:
http://stream-recorder.com/forum/showpost.php?p=43761&postcount=13
"""
pv="ZXhwPTE0MDYyODMxOTF+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPTgwNTA0N2E1Yjk5ZmFjMjMzMDY0N2MxMzkyNGM0MDNiYzY1YjZmYzgyYTZhMjYyZDIxNDdkZTExZjI1MzQ5ZDI=;hdntl=exp=1406283191~acl=%2f*~data=hdntl~hmac=b65dc0c5ae60570f105984f0cc5ec6ce3a51422a7a1442e09f55513718ba80bf"
(data, hdntl) = pv.split(";")
SWF_VERIFICATION_KEY = b"Genuine Adobe Flash Player 001"
#SWF_VERIFICATION_KEY=binascii.unhexlify("9b673b13fa4682ed14c3cfa5af5310274b514c4133e9b3a81e6e3aba009l2564")
SWF_VERIFICATION_KEY = binascii.unhexlify(b"BD938D5EE6D9F42016F9C56577B6FDCF415FE4B184932B785AB32BCADC9BB592")
swf = self.getUrl('http://www.wat.tv/images/v70/PlayerLite.swf',True)
#AKAMAIHD_PV_KEY = unhexlify(b"BD938D5EE6D9F42016F9C56577B6FDCF415FE4B184932B785AB32BCADC9BB592")
AKAMAIHD_PV_KEY = "9b673b13fa4682ed14c3cfa5af5310274b514c4133e9b3a81e6e3aba009l2564"
hash = hashlib.sha256()
hash.update(self.swfdecompress(swf))
hash = base64.b64encode(hash.digest()).decode("ascii")
print 'hash',hash
hash="96e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ="
print 'hash',hash
#data="ZXhwPTE0MDYyMDQ3NjB+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWEzMjBlZDI5YjI1MDkwN2ExODcyMTJlOWJjNGFlNGUzZjA3MTM3ODk1ZDk4NmI2ZDVkMzczNzNhYzNiNDgxOWU="
msg = "exp=9999999999~acl=%2f%2a~data={0}!{1}".format(data, hash)
auth = hmac.new(AKAMAIHD_PV_KEY, msg.encode("ascii"), sha256)
pvtoken = "{0}~hmac={1}".format(msg, auth.hexdigest())
# The "hdntl" parameter can be accepted as a cookie or passed in the
# query string, but the "pvtoken" parameter can only be in the query
# string
print 'pvtoken',pvtoken
#return "pvtoken={}&{}".format(
#urlencode_param(pvtoken), urlencode_param(hdntl))
params=urllib.urlencode({'pvtoken':pvtoken})+'&'+hdntl+'&hdcore=2.11.3'
#params='pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYwNDMzOTN+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWQxMTk0ZDc4NDExMDYwNjZlNDI5OWU2NTc3ODA0Mzk0ODU5NGZiMDQ5Njk2OGNiYzJiOGU2OTI2MjIzMjczZTA%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3D1BE9DEB8262AB4886A0CB9E8376D04652F015751B88DD3D2201DE463D9E47733&hdntl=exp=1406043393~acl=%2f*~data=hdntl~hmac=28d5e28f47b7b3821fafae0250ba37091f2fc66d1a9d39b76b925c423458c537'+'&hdcore=2.11.3'
#php AdobeHDS.php --manifest "http://nt1livhdsweb-lh.akamaihd.net/z/live_1@90590/manifest.f4m?hdnea=st=1405958620~exp=1405960420~acl=/*~hmac=5ca0d2521a99c897fb9ffaf6ed9c2e40e5d0300cdcdd9dfb7302d9e32a84f98d&hdcore=2.11.3&g=VQYTYCFRUDRA"
#params="pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYwNDUwNDZ+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWYwYWQ5ZGQyNDJlYjdiYjQ2YmZhMzk3MjY3MzE0ZWZiOWVlYTY5MDMzYWE2ODM5ZDM1ZWVjMWM1ZDUzZTk3ZjA%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3D9FCCB6BC90C17E8057EE52CD53DDF0C6D07B20638D68B8FFCE98ED74153AA960&hdntl=exp=1406045046~acl=%2f*~data=hdntl~hmac=11e323633ad708a11e57a91e8c685011292f42936f5f7f3b1cb0fb8d2266586a&als=0,2,0,0,0,NaN,0,0,0,52,f,52035079.57,52035089.9,t,s,VQYTYCFRUDRA,2.11.3,52&hdcore=2.11.3"
#--useragent "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0"
#+'&als=0,2,0,0,0,NaN,0,0,0,47,f,52018363.57,52018373.9,t,s,HPFXDUMCMNPG,2.11.3,47&hdcore=2.11.3'
params=params.replace('%2B','+')
params=params.replace('%2F','/')
#params='pvtoken=' +pvtoken+'&'+hdntl
#params = [("pvtoken", pvtoken)]
#params.extend(parse_qsl(hdntl, keep_blank_values=True))
#params='pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYwMzc2Njl+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWZjYzY5OTVkYjE5ODIxYTJlNDM4YTdhMWNmZjMyN2RhNTViOWNhMWM4NjZhZjYxM2ZkNDI4MTMwNjU4MjFjMjM%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3DFA3BCC1CF6466CAFFCC6EF5CB2855ED065F36687CBFCD11570B7D702F71F10A6&hdntl=exp=1406037669~acl=%2f*~data=hdntl~hmac=4ab5ad38849b952ae93721af7451936b4c5906258d575eda11e52a05f78c7d75&als=0,2,0,0,0,NaN,0,0,0,96,f,52027699.57,52027709.89,t,s,RUIDLGQGDHVH,2.11.3,90&hdcore=2.11.3'
#print '_pv_params params',params
print params
print "pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYyODMxOTF+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPTgwNTA0N2E1Yjk5ZmFjMjMzMDY0N2MxMzkyNGM0MDNiYzY1YjZmYzgyYTZhMjYyZDIxNDdkZTExZjI1MzQ5ZDI%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3D47A2B2AA9570ECFB37966C884174D608D86A7DE2466DE7EB48A6F118A155BD80&hdntl=exp=1406283191~acl=%2f*~data=hdntl~hmac=b65dc0c5ae60570f105984f0cc5ec6ce3a51422a7a1442e09f55513718ba80bf"
return "pvtoken=exp%3D9999999999%7Eacl%3D%252f%252a%7Edata%3DZXhwPTE0MDYzMDMxMTV+YWNsPSUyZip+ZGF0YT1wdmMsc35obWFjPWQxODA5MWVkYTQ4NDI3NjFjODhjOWQwY2QxNTk3YTI0MWQwOWYwNWI1N2ZmMDE0ZjcxN2QyMTVjZTJkNmJjMDQ%3D%2196e4sdLWrezE46RaCBzzP43/LEM5en2KujAosbeDimQ%3D%7Ehmac%3DACF8A1E4467676C9BCE2721CA5EFF840BD6ED1780046954039373A3B0D942ADC&hdntl=exp=1406303115~acl=%2f*~data=hdntl~hmac=4ab96fa533fd7c40204e487bfc7befaf31dd1f49c27eb1f610673fed9ff97a5f&als=0,2,0,0,0,NaN,0,0,0,37,f,52293145.57,52293155.9,t,s,GARWLHLMHNGA,2.11.3,37&hdcore=2.11.3"
return params
def swfdecompress(self,data):
if data[:3] == b"CWS":
data = b"F" + data[1:8] + zlib.decompress(data[8:])
return data
| gpl-2.0 |
bzennn/blog_flask | python/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py | 354 | 5544 | """A collection of modules for iterating through different kinds of
tree, generating tokens identical to those produced by the tokenizer
module.
To create a tree walker for a new type of tree, you need to do
implement a tree walker object (called TreeWalker by convention) that
implements a 'serialize' method taking a tree as sole argument and
returning an iterator generating tokens.
"""
from __future__ import absolute_import, division, unicode_literals
from .. import constants
from .._utils import default_etree
__all__ = ["getTreeWalker", "pprint", "dom", "etree", "genshi", "etree_lxml"]
treeWalkerCache = {}
def getTreeWalker(treeType, implementation=None, **kwargs):
"""Get a TreeWalker class for various types of tree with built-in support
Args:
treeType (str): the name of the tree type required (case-insensitive).
Supported values are:
- "dom": The xml.dom.minidom DOM implementation
- "etree": A generic walker for tree implementations exposing an
elementtree-like interface (known to work with
ElementTree, cElementTree and lxml.etree).
- "lxml": Optimized walker for lxml.etree
- "genshi": a Genshi stream
Implementation: A module implementing the tree type e.g.
xml.etree.ElementTree or cElementTree (Currently applies to the
"etree" tree type only).
"""
treeType = treeType.lower()
if treeType not in treeWalkerCache:
if treeType == "dom":
from . import dom
treeWalkerCache[treeType] = dom.TreeWalker
elif treeType == "genshi":
from . import genshi
treeWalkerCache[treeType] = genshi.TreeWalker
elif treeType == "lxml":
from . import etree_lxml
treeWalkerCache[treeType] = etree_lxml.TreeWalker
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# XXX: NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeWalker
return treeWalkerCache.get(treeType)
def concatenateCharacterTokens(tokens):
pendingCharacters = []
for token in tokens:
type = token["type"]
if type in ("Characters", "SpaceCharacters"):
pendingCharacters.append(token["data"])
else:
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
pendingCharacters = []
yield token
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
def pprint(walker):
"""Pretty printer for tree walkers"""
output = []
indent = 0
for token in concatenateCharacterTokens(walker):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
# tag name
if token["namespace"] and token["namespace"] != constants.namespaces["html"]:
if token["namespace"] in constants.prefixes:
ns = constants.prefixes[token["namespace"]]
else:
ns = token["namespace"]
name = "%s %s" % (ns, token["name"])
else:
name = token["name"]
output.append("%s<%s>" % (" " * indent, name))
indent += 2
# attributes (sorted for consistent ordering)
attrs = token["data"]
for (namespace, localname), value in sorted(attrs.items()):
if namespace:
if namespace in constants.prefixes:
ns = constants.prefixes[namespace]
else:
ns = namespace
name = "%s %s" % (ns, localname)
else:
name = localname
output.append("%s%s=\"%s\"" % (" " * indent, name, value))
# self-closing
if type == "EmptyTag":
indent -= 2
elif type == "EndTag":
indent -= 2
elif type == "Comment":
output.append("%s<!-- %s -->" % (" " * indent, token["data"]))
elif type == "Doctype":
if token["name"]:
if token["publicId"]:
output.append("""%s<!DOCTYPE %s "%s" "%s">""" %
(" " * indent,
token["name"],
token["publicId"],
token["systemId"] if token["systemId"] else ""))
elif token["systemId"]:
output.append("""%s<!DOCTYPE %s "" "%s">""" %
(" " * indent,
token["name"],
token["systemId"]))
else:
output.append("%s<!DOCTYPE %s>" % (" " * indent,
token["name"]))
else:
output.append("%s<!DOCTYPE >" % (" " * indent,))
elif type == "Characters":
output.append("%s\"%s\"" % (" " * indent, token["data"]))
elif type == "SpaceCharacters":
assert False, "concatenateCharacterTokens should have got rid of all Space tokens"
else:
raise ValueError("Unknown token type, %s" % type)
return "\n".join(output)
| gpl-3.0 |
chys87/pyCxxLookup | cxxlookup/expr.py | 1 | 28952 | #!/usr/bin/env python3
# vim: set ts=4 sts=4 sw=4 expandtab cc=80:
# Copyright (c) 2014, 2016, chys <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of chys <[email protected]> nor the names of other
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from . import utils
try:
from . import _speedups
except ImportError:
_speedups = None
# Signed types only allowed for intermediate values
# Unsigned: Number of bits
# Signed: Number of bits - 1
# E.g.: 31 = int32_t; 32 = uint32_t
def type_name(type):
'''
>>> type_name(7), type_name(32)
('int8_t', 'uint32_t')
'''
if (type & 1):
return 'int{}_t'.format(type + 1)
else:
return 'uint{}_t'.format(type)
def type_bytes(type):
'''
>>> list(map(type_bytes, [7, 8, 15, 16, 31, 32, 63, 64]))
[1, 1, 2, 2, 4, 4, 8, 8]
'''
return (type + 7) // 8
def const_type(value):
if value >= 2**16:
if value >= 2**32:
return 64
else:
return 32
elif value >= 2**8:
return 16
else:
return 8
class ExprMeta(type):
"""This is for performance purpose.
Add IS_*** constants to Expr* classes to replace ininstance,
which turned out to be one of the bottlenecks of pyCxxLookup
>>> Expr.IS_CONST, Expr.IS_VAR, Expr.IS_RSHIFT
(False, False, False)
>>> ExprConst.IS_CONST, ExprConst.IS_VAR, ExprConst.IS_RSHIFT
(True, False, False)
>>> ExprVar.IS_CONST, ExprVar.IS_VAR, ExprVar.IS_RSHIFT
(False, True, False)
>>> ExprRShift.IS_CONST, ExprRShift.IS_VAR, ExprRShift.IS_RSHIFT
(False, False, True)
"""
def __new__(cls, name, bases, namespace, **kwds):
result = type.__new__(cls, name, bases, dict(namespace))
if name != 'Expr' and name.startswith('Expr'):
is_name = 'IS_' + name[4:].upper()
setattr(result, is_name, True)
setattr(Expr, is_name, False)
for extra_name in result.__dict__.get('IS_ALSO', ()):
is_name = 'IS_' + extra_name
setattr(result, is_name, True)
setattr(Expr, is_name, False)
return result
class Expr(metaclass=ExprMeta):
__slots__ = ()
def __str__(self):
raise NotImplementedError
def statics(self, vs):
return ''.join(filter(None, (x.statics(vs) for x in self.children)))
children = ()
rtype = None
@property
def optimized(self):
return self
def walk(self):
"""Recursively visit itself and all children."""
yield self
q = [self]
q_pop = q.pop
q_extend = q.extend
while q:
expr = q_pop()
children = expr.children
yield from children
q_extend(children)
def walk_tempvar(self):
"""Shortcut for filter(lambda x: x.IS_TEMPVAR, self.walk())
"""
return (x for x in self.walk() if x.IS_TEMPVAR)
def _complicated(self, threshold):
for expr in self.walk():
threshold -= 1
if not threshold:
return True
return False
def extract_subexprs(self, threshold, callback, allow_new):
for subexpr in self.children:
subexpr.extract_subexprs(threshold, callback, allow_new)
def __add__(self, r):
return Add(self, r)
def __mul__(self, r):
return ExprMul(self, exprize(r))
def __floordiv__(self, r):
return ExprDiv(self, exprize(r))
def __rfloordiv__(self, r):
return ExprDiv(exprize(r), self)
def __mod__(self, r):
return ExprMod(self, exprize(r))
def __rmod__(self, r):
return ExprMod(exprize(r), self)
def __sub__(self, r):
r = exprize(r)
if not r.IS_CONST:
return NotImplemented
return Add(self, -r)
def __and__(self, r):
return ExprAnd(self, exprize(r))
def __lshift__(self, r):
return ExprLShift(self, exprize(r))
def __rshift__(self, r):
return ExprRShift(self, exprize(r))
def __rlshift__(self, r):
return ExprLShift(exprize(r), self)
def __rrshift__(self, r):
return ExprRShift(exprize(r), self)
def __eq__(self, r):
return ExprCompare(self, '==', exprize(r))
def __ne__(self, r):
return ExprCompare(self, '!=', exprize(r))
def __lt__(self, r):
return ExprCompare(self, '<', exprize(r))
def __le__(self, r):
return ExprCompare(self, '<=', exprize(r))
def __gt__(self, r):
return ExprCompare(self, '>', exprize(r))
def __ge__(self, r):
return ExprCompare(self, '>=', exprize(r))
__radd__ = __add__
__rmul__ = __mul__
__rand__ = __and__
class ExprVar(Expr):
__slots__ = 'rtype',
def __init__(self, type):
self.rtype = type
class ExprFixedVar(ExprVar):
__slots__ = 'name',
def __init__(self, type, name):
super().__init__(type)
self.name = name
def __str__(self):
return self.name
class ExprTempVar(ExprVar):
__slots__ = 'var',
_name_cache = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')
def __init__(self, type, var):
super().__init__(type)
self.var = var
@classmethod
def get_name(cls, var):
'''
>>> list(map(ExprTempVar.get_name, (0, 26, 52)))
['A', 'AA', 'BA']
'''
cache = cls._name_cache
try:
s = cache[var]
except IndexError:
cache += [None] * (var + 1 - len(cache))
else:
if s is not None:
return s
length = 1
expressible = 26
ind = var
while ind >= expressible:
length += 1
ind -= expressible
expressible *= 26
s = ''
for _ in range(length):
s = chr(ord('A') + (ind % 26)) + s
ind //= 26
cache[var] = s
return s
def __str__(self):
return self.get_name(self.var)
class ExprConst(Expr):
__slots__ = 'rtype', 'value'
def __init__(self, type, value, *, int=int):
self.rtype = type
self.value = int(value)
def __str__(self):
if -10 < self.value < 10:
value_s = str(self.value)
else:
value_s = hex(self.value)
if self.rtype < 64:
return value_s + 'u'
else:
return 'UINT64_C({})'.format(value_s)
def _complicated(self, threshold):
# Always assign 64-bit constant to a variable for readability.
return (self.rtype == 64)
@staticmethod
def combine(const_exprs):
"""Combine multiple ExprConst into one."""
const_value = 0
const_type = 32
for expr in const_exprs:
const_value += expr.value
const_type = max(const_type, expr.rtype)
if const_value == 0:
return None
else:
return ExprConst(const_type, const_value)
def __neg__(self):
return ExprConst(self.rtype, -self.value)
class ExprAdd(Expr):
def __init__(self, exprs, const, *, max=max, tuple=tuple):
assert const is None or const.IS_CONST
self.exprs = tuple(exprs)
self.const = const
rtype = max([x.rtype for x in self.children])
self.rtype = max(rtype, 31) # C type-promotion rule
def __str__(self):
res = ' + '.join(map(str, self.exprs))
if self.const:
const_value = self.const.value
if const_value >= 0:
res += ' + ' + str(self.const)
else:
res += ' - ' + str(ExprConst(self.const.rtype,
-const_value))
return '(' + res + ')'
@property
def children(self):
const = self.const
if const:
return self.exprs + (const,)
else:
return self.exprs
@utils.cached_property
def optimized(self):
exprs = []
const_exprs = []
if self.const:
const_exprs.append(self.const)
for expr in self.exprs:
expr = expr.optimized
if expr.IS_ADD:
exprs.extend(expr.exprs)
if expr.const:
const_exprs.append(expr.const)
elif expr.IS_CONST:
const_exprs.append(expr)
else:
exprs.append(expr)
const = ExprConst.combine(const_exprs)
# (a ? c1 : c2) + c3 ==> (a ? c1 + c3 : c2 + c3)
if const:
const_value = const.value
for i, expr in enumerate(exprs):
if expr.IS_COND and \
expr.exprT.IS_CONST and expr.exprF.IS_CONST and \
(min(expr.exprT.value, expr.exprF.value) + const_value
>= 0):
expr = ExprCond(
expr.cond,
ExprConst(self.rtype, expr.exprT.value + const_value),
ExprConst(self.rtype, expr.exprF.value + const_value))
exprs[i] = expr.optimized
const = None
break
self.exprs = exprs = tuple(exprs)
self.const = const
if len(exprs) == 1 and not const:
return exprs[0]
return self
def extract_subexprs(self, threshold, callback, allow_new):
exprs = []
for expr in self.exprs:
expr.extract_subexprs(threshold, callback, allow_new)
expr = callback(expr, allow_new and expr._complicated(threshold))
exprs.append(expr)
self.exprs = tuple(exprs)
if self.const:
self.const = callback(
self.const, allow_new and self.const._complicated(threshold))
class ExprBinary(Expr):
__slots__ = 'left', 'right', 'rtype'
def __init__(self, left, right, rtype=None, *, max=max):
self.left = left = left.optimized
self.right = right = right.optimized
self.rtype = rtype or max(31, left.rtype, right.rtype)
@property
def children(self):
return self.left, self.right
def extract_subexprs(self, threshold, callback, allow_new):
super().extract_subexprs(threshold, callback, allow_new)
self.left = callback(self.left,
allow_new and self.left._complicated(threshold))
self.right = callback(self.right,
allow_new and self.right._complicated(threshold))
class ExprShift(ExprBinary):
def __init__(self, left, right):
super().__init__(left, right, max(31, left.rtype))
class ExprLShift(ExprShift):
def __str__(self):
# Avoid the spurious 'u' after the constant
right = self.right
if right.IS_CONST:
if right.value in (1, 2, 3):
return '{} * {}'.format(self.left, 1 << right.value)
return '({} << {})'.format(self.left, right.value)
else:
return '({} << {})'.format(self.left, right)
@utils.cached_property
def optimized(self):
left = self.left
right = self.right
right_const = right.IS_CONST
if right_const and left.IS_CONST:
return ExprConst(self.rtype, left.value << right.value)
# "(a & c1) << c2" ==> (a << c2) & (c1 << c2) (where c2 <= 3)
# This takes advantage of x86's LEA instruction
if right_const and right.value <= 3 and \
left.IS_AND and \
left.right.IS_CONST:
expr_left = ExprLShift(left.left, right)
expr_right = ExprConst(left.right.rtype,
left.right.value << right.value)
return ExprAnd(expr_left, expr_right).optimized
# (cond ? c1 : c2) << c3 ==> (cond ? c1 << c3 : c2 << c3)
if right_const and \
left.IS_COND and \
left.exprT.IS_CONST and \
left.exprF.IS_CONST:
expr = ExprCond(left.cond,
ExprLShift(left.exprT, right),
ExprLShift(left.exprF, right))
return expr.optimized
# (a >> c1) << c2
# (a << (c2 - c1)) & ~((1 << c2) - 1) (c2 > c1)
# (a >> (c1 - c2)) & ~((1 << c2) - 1) (c2 <= c1)
if right_const and \
left.IS_RSHIFT and \
left.right.IS_CONST:
c2 = right.value
c1 = left.right.value
if c2 > c1:
expr = ExprLShift(left.left, ExprConst(32, c2 - c1))
elif c2 == c1:
expr = left.left
else:
expr = ExprRShift(left.left, ExprConst(32, c1 - c2))
and_value = ((1 << c2) - 1) ^ ((1 << expr.rtype) - 1)
expr = ExprAnd(expr, Const(expr.rtype, and_value))
return expr.optimized
# "(a + c1) << c2" ==> (a << c2) + (c1 << c2)
if right_const and \
left.IS_ADD and len(left.exprs) == 1 and \
left.const:
expr_left = ExprLShift(left.exprs[0], right)
expr_right = ExprConst(left.const.rtype,
left.const.value << right.value)
return ExprAdd((expr_left,), expr_right).optimized
return self
class ExprRShift(ExprShift):
def __init__(self, left, right):
# Always logical shift
if left.rtype < 32 or left.rtype == 63:
left = ExprCast(max(32, left.rtype + 1), left)
super().__init__(left, right)
def __str__(self):
# Avoid the spurious 'u' after the constant
right = self.right
if right.IS_CONST:
right_s = str(right.value)
else:
right_s = str(right)
return '({} >> {})'.format(self.left, right_s)
@utils.cached_property
def optimized(self):
'''
>>> expr = (Add(FixedVar(32, 'c'), 30) >> 2)
>>> str(expr.optimized)
'(((c + 2u) >> 2) + 7u)'
>>> expr = (Add(FixedVar(32, 'c'), FixedVar(32, 'd'), -30) >> 2)
>>> str(expr.optimized)
'(((c + d + 2u) >> 2) - 8u)'
'''
left = self.left
right = self.right
right_const = right.IS_CONST
# (a + c1) >> c2
# Convert to ((a + c1 % (1 << c2)) >> c2) + (c1 >> c2).
if right_const and left.IS_ADD and left.const:
ctype = left.const.rtype
c1 = left.const.value
c2 = right.value
if c1 >> c2:
compensation = c1 >> c2
remainder = c1 - (compensation << c2)
if remainder < 0:
compensation += 1
remainder -= 1 << c2
expr = ExprAdd(left.exprs, ExprConst(ctype, remainder))
expr = ExprRShift(expr, ExprConst(32, c2))
expr = ExprAdd((expr,), ExprConst(ctype, compensation))
return expr.optimized
# (a >> c1) >> c2 ==> a >> (c1 + c2)
if right_const and \
left.IS_RSHIFT and \
left.right.IS_CONST:
self.right = right = Add(right, left.right).optimized
self.left = left = left.left
return self
class ExprMul(ExprBinary):
def __str__(self):
return '({} * {})'.format(self.left, self.right)
@utils.cached_property
def optimized(self):
left = self.left
right = self.right
right_const = right.IS_CONST
# Both constants
if right_const and left.IS_CONST:
return ExprConst(self.rtype,
left.value * right.value)
# Put constant on the right side
if not right_const and left.IS_CONST:
self.left, self.right = left, right = right, left
right_const = True
if right_const:
# Strength reduction (* => <<)
rv = right.value
if rv == 0:
return ExprConst(32, 0)
elif rv == 1:
return left
elif (rv > 0) and (rv & (rv - 1)) == 0: # Power of 2
expr = ExprLShift(left, ExprConst(32, rv.bit_length() - 1))
return expr.optimized
# (a + c1) * c2 ==> (a * c2 + c1 * c2)
if left.IS_ADD and len(left.exprs) == 1 and left.const:
expr_left = ExprMul(left.exprs[0], right)
expr_right = ExprMul(left.const, right)
return ExprAdd((expr_left, expr_right), None).optimized
# (cond ? c1 : c2) * c3 ==> (cond ? c1 * c3 : c2 * c3)
if left.IS_COND and \
left.exprT.IS_CONST and \
left.exprF.IS_CONST:
expr = ExprCond(left.cond,
ExprMul(left.exprT, right),
ExprMul(left.exprF, right))
return expr.optimized
# (a & 1) * c ==> (a & 1) ? c : 0
if left.IS_AND and \
left.right.IS_CONST and \
left.right.value == 1:
expr = ExprCond(left, right, ExprConst(self.rtype, 0))
return expr.optimized
return self
class ExprDiv(ExprBinary):
IS_ALSO = 'DIV_MOD',
def __init__(self, left, right):
if left.rtype < 32 or left.rtype == 63:
left = ExprCast(max(32, left.rtype + 1), left)
super().__init__(left, right)
def __str__(self):
return '({} / {})'.format(self.left, self.right)
@utils.cached_property
def optimized(self):
left = self.left
right = self.right
if right.IS_CONST:
rv = right.value
if rv == 0:
raise ZeroDivisionError
elif rv == 1:
return left
elif (rv & (rv - 1)) == 0:
expr = ExprRShift(left, ExprConst(32, rv.bit_length() - 1))
return expr.optimized
return self
class ExprMod(ExprBinary):
IS_ALSO = 'DIV_MOD',
def __str__(self):
return '({} % {})'.format(self.left, self.right)
@utils.cached_property
def optimized(self):
right = self.right
if right.IS_CONST:
value = right.value
if value and (value & (value - 1)) == 0:
return ExprAnd(self.left,
ExprConst(right.rtype, value - 1)).optimized
return self
class ExprAnd(ExprBinary):
def __str__(self):
return '({} & {})'.format(self.left, self.right)
@utils.cached_property
def optimized(self):
left = self.left
right = self.right
right_const = right.IS_CONST
right_value = None
if right_const:
right_value = right.value
# (a + c1) & c2 ==> (a + c1') & c2
# where c1' = c1 with high bits cleared
if right_const and right_value and \
left.IS_ADD and left.const:
rv = right_value
bt = rv.bit_length() + 1
c1 = left.const.value
c1p = c1 & ((1 << bt) - 1)
# If its high bit is set, make it negative
if c1p & (1 << (bt - 1)):
c1p |= ~((1 << bt) - 1)
if c1p != c1:
left = ExprAdd(left.exprs, ExprConst(left.const.rtype, c1p))
self.left = left = left.optimized
# (a & c1) & c2 ==> a & (c1 & c2)
if right_const and \
left.IS_AND and \
left.right.IS_CONST:
c1 = left.right.value
c2 = right_value
expr = ExprAnd(left.left,
Const(max(left.right.rtype, right.rtype), c1 & c2))
return expr.optimized
# (a & 0xff) ==> (uint8_t)a
# (a & 0xffff) ==> (uint16_t)a
if right_const:
# Must cast back
if right_value == 0xff:
expr = ExprCast(self.rtype, ExprCast(8, left))
return expr.optimized
elif right_value == 0xffff:
expr = ExprCast(self.rtype, ExprCast(16, left))
return expr.optimized
return self
class ExprCompare(ExprBinary):
def __init__(self, left, compare, right):
super().__init__(left, right, 31)
self.compare = compare
def __str__(self):
return '({} {} {})'.format(self.left, self.compare, self.right)
@utils.cached_property
def optimized(self):
left = self.left
right = self.right
right_const = right.IS_CONST
# (a >> c1) == c2
# a >= (c2 << c1) && a < ((c2 + 1) << c1)
# unsinged(a - (c2 << c1)) < (1 << c1)
if right_const and self.compare == '==' and \
left.IS_RSHIFT and \
left.left.rtype == 32 and \
left.right.IS_CONST and \
right.rtype == 32:
c1 = left.right.value
c2 = right.value
if ((c2 + 1) << c1) <= 2**32:
expr = ExprAdd((left.left,), ExprConst(32, -(c2 << c1)))
expr = ExprCompare(expr, '<', ExprConst(32, 1 << c1))
return expr.optimized
# (a >> c1) < c2
# a < (c2 << c1)
if right_const and self.compare == '<' and \
left.IS_RSHIFT and \
left.left.rtype == 32 and \
left.right.IS_CONST and \
right.rtype == 32:
c1 = left.right.value
c2 = right.value
if (c2 << c1) < 2**32:
expr = ExprCompare(left.left, '<', ExprConst(32, c2 << c1))
return expr.optimized
return self
class ExprCast(Expr):
def __init__(self, type, value):
self.rtype = type
self.value = value.optimized
def __str__(self):
return '{}({})'.format(type_name(self.rtype),
utils.trim_brackets(str(self.value)))
@property
def children(self):
return self.value,
@utils.cached_property
def optimized(self):
rtype = self.rtype
value = self.value
if value.rtype == rtype:
return value
if value.IS_CAST and rtype <= value.rtype:
return ExprCast(rtype, value.value).optimized
return self
class ExprCond(Expr):
__slots__ = 'cond', 'exprT', 'exprF', 'rtype'
def __init__(self, cond, exprT, exprF):
self.cond = cond.optimized
self.exprT = exprT.optimized
self.exprF = exprF.optimized
self.rtype = max(31, self.exprT.rtype, self.exprF.rtype)
def __str__(self):
return '({} ? {} : {})'.format(self.cond, self.exprT, self.exprF)
@property
def children(self):
return self.cond, self.exprT, self.exprF
def extract_subexprs(self, threshold, callback, allow_new):
# It can be unsafe to evaluate exprT or exprF without first checking
# cond
if not self.cond.IS_VAR:
self.cond.extract_subexprs(threshold, callback, allow_new)
self.cond = callback(
self.cond, allow_new and self.cond._complicated(threshold))
if not allow_new:
self.exprT = callback(self.exprT, False)
self.exprF = callback(self.exprF, False)
self.exprT.extract_subexprs(threshold, callback, False)
self.exprF.extract_subexprs(threshold, callback, False)
class ExprTable(Expr):
def __init__(self, type, name, values, var, offset):
self.rtype = type
self.name = name
self.values = values
self.var = var = var.optimized
self.offset = offset
def __str__(self):
if self.offset > 0:
# Add an extra 'l' so that the constant is absorbed by the
# address of the array
offset_s = '{:#x}'.format(self.offset)
if self.var.rtype < 63:
offset_s += 'l'
return '{}[{} - {}]'.format(
self.name, self.var, offset_s)
elif self.offset < 0:
# Don't add 'l' in this case, to avoid signed/unsigned
# extension problems
return '{}[{} + {:#x}]'.format(self.name,
self.var, -self.offset)
else:
var = utils.trim_brackets(str(self.var))
return '{}[{}]'.format(self.name, var)
def statics(self, vs):
id_ = id(self)
if id_ in vs:
return ''
vs.add(id_)
var_statics = self.var.statics(vs)
if _speedups:
c_array = _speedups.format_c_array(
self.values, self.rtype, self.name)
if c_array is not None:
return var_statics + c_array
res = [var_statics]
res_append = res.append
indlen = len(hex(self.values.size))
maxlen = len(hex(utils.np_max(self.values)))
# I understand this is not the "correct" way to go, but this is
# for performance.
# If I don't care about performance, I could do '{:#0{}x}'.format(v, l)
line_start_format = ' /* {{:#0{}x}} */'.format(indlen).format
value_format = ' {{:#0{}x}},'.format(maxlen).format
line = 'alignas({type}) const {type} {name}[{size:#x}] = {{'.format(
type=type_name(self.rtype), name=self.name, size=self.values.size)
for i, v in enumerate(self.values):
if not (i & 7):
res_append(line + '\n')
line = line_start_format(i)
line += value_format(v)
res_append(line.rstrip(',') + '\n')
res_append('};\n\n')
return ''.join(res)
@property
def children(self):
return self.var,
@utils.cached_property
def optimized(self):
var = self.var
# Absorb constants into offset
if var.IS_ADD and var.const:
self.offset -= var.const.value
self.var = var = ExprAdd(var.exprs, None).optimized
return self
def table_bytes(self, *, type_bytes=type_bytes):
return self.values.size * type_bytes(self.rtype)
def extract_subexprs(self, threshold, callback, allow_new):
super().extract_subexprs(threshold, callback, allow_new)
self.var = callback(self.var,
allow_new and self.var._complicated(threshold))
def _complicated(self, _threshold):
return True
### Factory functions
def exprize(expr, *,
isinstance=isinstance, Expr=Expr, ExprConst=ExprConst):
'''Convert int to ExprConst'''
if isinstance(expr, Expr):
return expr
else:
return ExprConst(32, expr)
FixedVar = ExprFixedVar
TempVar = ExprTempVar
Const = ExprConst
def Add(*in_exprs):
exprs = []
const_exprs = []
for expr in in_exprs:
expr = exprize(expr)
if expr.IS_CONST:
const_exprs.append(expr)
elif expr.IS_ADD:
exprs.extend(expr.exprs)
if expr.const:
const_exprs.append(expr.const)
else:
exprs.append(expr)
const_expr = ExprConst.combine(const_exprs)
if not exprs:
return const_expr or ExprConst(32, 0)
elif len(exprs) == 1 and not const_expr:
return exprs[0]
else:
return ExprAdd(exprs, const_expr)
def Cast(type, value):
return ExprCast(type, exprize(value))
def Cond(cond, exprT, exprF):
return ExprCond(exprize(cond), exprize(exprT), exprize(exprF))
| bsd-3-clause |
crosswalk-project/crosswalk-test-suite | webapi/tct-csp-w3c-tests/csp-py/csp_ro_style-src_cross-origin.py | 23 | 3064 | def main(request, response):
import simplejson as json
f = file('config.json')
source = f.read()
s = json.JSONDecoder().decode(source)
url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1])
url2 = "http://" + s['host'] + ":" + str(s['ports']['http'][0])
_CSP = "style-src " + url1
response.headers.set("Content-Security-Policy", _CSP)
response.headers.set("X-Content-Security-Policy", _CSP)
response.headers.set("X-WebKit-CSP", _CSP)
return """<!DOCTYPE html>
<!--
Copyright (c) 2013 Samsung Electronics Co., Ltd.
Licensed under the Apache License, Version 2.0 (the License);
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors:
Ran, Wang <[email protected]>
-->
<html>
<head>
<title>CSP Test: csp_ro_style-src_cross-origin</title>
<link rel="author" title="Samsung" href="http://www.Samsung.com"/>
<link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#style-src"/>
<meta name="flags" content=""/>
<meta name="assert" content='style-src """ + url1 + """'/>
<meta charset="utf-8"/>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
<link rel="stylesheet" type="text/css" href='""" + url1 + """/tests/csp/support/canvas-index.css'/>
<link rel="stylesheet" type="text/css" href='""" + url2 + """/tests/csp/support/a-green.css'/>
<link rel="stylesheet" type="text/css" href="support/blue-100x100.css"/>
<style>
#test-green {
background-color: green;
}
</style>
</head>
<body>
<div id="log"></div>
<div id="test-blue"></div>
<h3>ext-css:""" + url1 + """/tests/csp/support/canvas-index.css</h3>
<div id="test-ext-a" class="a"></div>
<div id="test-green"></div>
<script>
test(function() {
var div = document.querySelector("#test-ext-a");
var fix = getComputedStyle(div)["color"];
assert_equals(fix, "rgb(0, 128, 0)", "style setted correctly");
}, document.title + "_blocked");
test(function() {
var div = document.querySelector("#test-blue");
var fix = getComputedStyle(div)["backgroundColor"];
assert_equals(fix, "rgb(0, 0, 255)", "style setted correctly");
}, document.title + "_blocked_int");
test(function() {
var div = document.querySelector("#test-green");
var fix = getComputedStyle(div)["backgroundColor"];
assert_equals(fix, "rgb(0, 128, 0)", "style setted correctly");
}, document.title + "_blocked_inline");
</script>
</body>
</html>"""
| bsd-3-clause |
liuwenf/moose | framework/contrib/nsiqcppstyle/rules/RULE_4_1_A_A_use_tab_for_indentation.py | 43 | 1770 | """
Use tabs for indentation.
This rule check if the each line starts with a space.
In addition, it suppresses the violation when the line contains only spaces and tabs.
== Violation ==
void Hello()
{
[SPACE][SPACE]Hello(); <== Violation. Spaces are used for indentation.
}
== Good ==
void Hello()
{
[TAB] <== Don't care if the line is empty
[TAB]Hello(); <== Good.
}
"""
from nsiqcppstyle_rulehelper import *
from nsiqcppstyle_reporter import *
from nsiqcppstyle_rulemanager import *
def RunRule(lexer, line, lineno) :
if not Match("^\s*$", line) :
if Search("^ ", line) :
nsiqcppstyle_reporter.Error(DummyToken(lexer.filename, line, lineno, 0), __name__, "Do not use space for indent")
ruleManager.AddLineRule(RunRule)
###########################################################################################
# Unit Test
###########################################################################################
from nsiqunittest.nsiqcppstyle_unittestbase import *
class testRule(nct):
def setUpRule(self):
ruleManager.AddLineRule(RunRule)
def test1(self):
self.Analyze("test/thisFile.c",
"\tbool CanHave() {\n\t}")
assert not CheckErrorContent(__name__)
def test2(self):
self.Analyze("test/thisFile.c",
"""
class K {
Hello
}""")
assert CheckErrorContent(__name__)
def test3(self):
self.Analyze("test/thisFile.c",
"""
class K {
Hello
}""")
assert not CheckErrorContent(__name__)
def test4(self):
self.Analyze("test/thisFile.c",
"""
/**
* Check for Doxygen Comment. This rule doesn't care about doxygen comment block.
*/
class K {
Hello
}""")
assert not CheckErrorContent(__name__)
| lgpl-2.1 |
Omegaphora/external_deqp | android/scripts/common.py | 6 | 5274 | # -*- coding: utf-8 -*-
import os
import re
import sys
import shlex
import subprocess
import multiprocessing
class NativeLib:
def __init__ (self, apiVersion, abiVersion):
self.apiVersion = apiVersion
self.abiVersion = abiVersion
def getPlatform ():
if sys.platform.startswith('linux'):
return 'linux'
else:
return sys.platform
def selectByOS (variants):
platform = getPlatform()
if platform in variants:
return variants[platform]
elif 'other' in variants:
return variants['other']
else:
raise Exception("No configuration for '%s'" % platform)
def isExecutable (path):
return os.path.isfile(path) and os.access(path, os.X_OK)
def which (binName):
for path in os.environ['PATH'].split(os.pathsep):
path = path.strip('"')
fullPath = os.path.join(path, binName)
if isExecutable(fullPath):
return fullPath
return None
def isBinaryInPath (binName):
return which(binName) != None
def selectFirstExistingBinary (filenames):
for filename in filenames:
if filename != None and isExecutable(filename):
return filename
return None
def selectFirstExistingDir (paths):
for path in paths:
if path != None and os.path.isdir(path):
return path
return None
def die (msg):
print msg
exit(-1)
def shellquote(s):
return '"%s"' % s.replace('\\', '\\\\').replace('"', '\"').replace('$', '\$').replace('`', '\`')
def execute (commandLine):
args = shlex.split(commandLine)
retcode = subprocess.call(args)
if retcode != 0:
raise Exception("Failed to execute '%s', got %d" % (commandLine, retcode))
def execArgs (args):
retcode = subprocess.call(args)
if retcode != 0:
raise Exception("Failed to execute '%s', got %d" % (str(args), retcode))
class Device:
def __init__(self, serial, product, model, device):
self.serial = serial
self.product = product
self.model = model
self.device = device
def __str__ (self):
return "%s: {product: %s, model: %s, device: %s}" % (self.serial, self.product, self.model, self.device)
def getDevices (adb):
proc = subprocess.Popen([adb, 'devices', '-l'], stdout=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
if proc.returncode != 0:
raise Exception("adb devices -l failed, got %d" % retcode)
ptrn = re.compile(r'^([a-zA-Z0-9]+)\s+.*product:([^\s]+)\s+model:([^\s]+)\s+device:([^\s]+)')
devices = []
for line in stdout.splitlines()[1:]:
if len(line.strip()) == 0:
continue
m = ptrn.match(line)
if m == None:
print "WARNING: Failed to parse device info '%s'" % line
continue
devices.append(Device(m.group(1), m.group(2), m.group(3), m.group(4)))
return devices
def getWin32Generator ():
if which("jom.exe") != None:
return "NMake Makefiles JOM"
else:
return "NMake Makefiles"
def isNinjaSupported ():
return which("ninja") != None
def getUnixGenerator ():
if isNinjaSupported():
return "Ninja"
else:
return "Unix Makefiles"
def getExtraBuildArgs (generator):
if generator == "Unix Makefiles":
return ["--", "-j%d" % multiprocessing.cpu_count()]
else:
return []
NDK_HOST_OS_NAMES = [
"windows",
"windows_x86-64",
"darwin-x86",
"darwin-x86-64",
"linux-x86",
"linux-x86_64"
]
def getNDKHostOsName (ndkPath):
for name in NDK_HOST_OS_NAMES:
if os.path.exists(os.path.join(ndkPath, "prebuilt", name)):
return name
raise Exception("Couldn't determine NDK host OS")
# deqp/android path
ANDROID_DIR = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
# Build configuration
NATIVE_LIBS = [
# API ABI
NativeLib(13, "armeabi-v7a"), # ARM v7a ABI
NativeLib(13, "x86"), # x86
NativeLib(21, "arm64-v8a"), # ARM64 v8a ABI
]
ANDROID_JAVA_API = "android-13"
NATIVE_LIB_NAME = "libdeqp.so"
# NDK paths
ANDROID_NDK_PATH = selectFirstExistingDir([
os.path.expanduser("~/android-ndk-r10c"),
"C:/android/android-ndk-r10c",
])
ANDROID_NDK_HOST_OS = getNDKHostOsName(ANDROID_NDK_PATH)
ANDROID_NDK_TOOLCHAIN_VERSION = "r10c" # Toolchain file is selected based on this
# Native code build settings
CMAKE_GENERATOR = selectByOS({
'win32': getWin32Generator(),
'other': getUnixGenerator()
})
EXTRA_BUILD_ARGS = getExtraBuildArgs(CMAKE_GENERATOR)
# SDK paths
ANDROID_SDK_PATH = selectFirstExistingDir([
os.path.expanduser("~/android-sdk-linux"),
os.path.expanduser("~/android-sdk-mac_x86"),
"C:/android/android-sdk-windows",
])
ANDROID_BIN = selectFirstExistingBinary([
os.path.join(ANDROID_SDK_PATH, "tools", "android"),
os.path.join(ANDROID_SDK_PATH, "tools", "android.bat"),
which('android'),
])
ADB_BIN = selectFirstExistingBinary([
which('adb'), # \note Prefer adb in path to avoid version issues on dev machines
os.path.join(ANDROID_SDK_PATH, "platform-tools", "adb"),
os.path.join(ANDROID_SDK_PATH, "platform-tools", "adb.exe"),
])
ZIPALIGN_BIN = selectFirstExistingBinary([
os.path.join(ANDROID_SDK_PATH, "tools", "zipalign"),
os.path.join(ANDROID_SDK_PATH, "tools", "zipalign.exe"),
which('zipalign'),
])
JARSIGNER_BIN = which('jarsigner')
# Apache ant
ANT_BIN = selectFirstExistingBinary([
which('ant'),
"C:/android/apache-ant-1.8.4/bin/ant.bat",
"C:/android/apache-ant-1.9.2/bin/ant.bat",
"C:/android/apache-ant-1.9.3/bin/ant.bat",
"C:/android/apache-ant-1.9.4/bin/ant.bat",
])
| apache-2.0 |
Nikoala/CouchPotatoServer | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/vidme.py | 36 | 2580 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
float_or_none,
str_to_int,
)
class VidmeIE(InfoExtractor):
_VALID_URL = r'https?://vid\.me/(?:e/)?(?P<id>[\da-zA-Z]+)'
_TEST = {
'url': 'https://vid.me/QNB',
'md5': 'f42d05e7149aeaec5c037b17e5d3dc82',
'info_dict': {
'id': 'QNB',
'ext': 'mp4',
'title': 'Fishing for piranha - the easy way',
'description': 'source: https://www.facebook.com/photo.php?v=312276045600871',
'duration': 119.92,
'timestamp': 1406313244,
'upload_date': '20140725',
'thumbnail': 're:^https?://.*\.jpg',
},
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(r'<source src="([^"]+)"', webpage, 'video URL')
title = self._og_search_title(webpage)
description = self._og_search_description(webpage, default='')
thumbnail = self._og_search_thumbnail(webpage)
timestamp = int_or_none(self._og_search_property('updated_time', webpage, fatal=False))
width = int_or_none(self._og_search_property('video:width', webpage, fatal=False))
height = int_or_none(self._og_search_property('video:height', webpage, fatal=False))
duration = float_or_none(self._html_search_regex(
r'data-duration="([^"]+)"', webpage, 'duration', fatal=False))
view_count = str_to_int(self._html_search_regex(
r'<span class="video_views">\s*([\d,\.]+)\s*plays?', webpage, 'view count', fatal=False))
like_count = str_to_int(self._html_search_regex(
r'class="score js-video-vote-score"[^>]+data-score="([\d,\.\s]+)">',
webpage, 'like count', fatal=False))
comment_count = str_to_int(self._html_search_regex(
r'class="js-comment-count"[^>]+data-count="([\d,\.\s]+)">',
webpage, 'comment count', fatal=False))
return {
'id': video_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
'timestamp': timestamp,
'width': width,
'height': height,
'duration': duration,
'view_count': view_count,
'like_count': like_count,
'comment_count': comment_count,
}
| gpl-3.0 |
lseyesl/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/comments.py | 148 | 2030 | # Copyright (c) 2009 Google Inc. All rights reserved.
# Copyright (c) 2009 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# A tool for automating dealing with bugzilla, posting patches, committing
# patches, etc.
from webkitpy.common.config import urls
def bug_comment_from_svn_revision(svn_revision):
return "Committed r%s: <%s>" % (svn_revision, urls.view_revision_url(svn_revision))
def bug_comment_from_commit_text(scm, commit_text):
svn_revision = scm.svn_revision_from_commit_text(commit_text)
return bug_comment_from_svn_revision(svn_revision)
| bsd-3-clause |
Daksh/sugar-toolkit-gtk3 | src/sugar3/graphics/palettemenu.py | 2 | 7416 | # Copyright 2012 One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from gi.repository import GObject
from gi.repository import Gtk
from sugar3.graphics.icon import Icon
from sugar3.graphics import style
class PaletteMenuBox(Gtk.VBox):
def __init__(self):
Gtk.VBox.__init__(self)
def append_item(self, item_or_widget, horizontal_padding=None,
vertical_padding=None):
item = None
if (isinstance(item_or_widget, PaletteMenuItem) or
isinstance(item_or_widget, PaletteMenuItemSeparator)):
item = item_or_widget
else:
item = self._wrap_widget(item_or_widget, horizontal_padding,
vertical_padding)
self.pack_start(item, False, False, 0)
def _wrap_widget(self, widget, horizontal_padding, vertical_padding):
vbox = Gtk.VBox()
vbox.show()
if horizontal_padding is None:
horizontal_padding = style.DEFAULT_SPACING
if vertical_padding is None:
vertical_padding = style.DEFAULT_SPACING
hbox = Gtk.HBox()
vbox.pack_start(hbox, True, True, vertical_padding)
hbox.show()
hbox.pack_start(widget, True, True, horizontal_padding)
return vbox
class PaletteMenuItemSeparator(Gtk.EventBox):
"""Contains a HSeparator and has the proper height for the menu."""
__gtype_name__ = 'SugarPaletteMenuItemSeparator'
def __init__(self):
Gtk.EventBox.__init__(self)
separator = Gtk.HSeparator()
self.add(separator)
separator.show()
self.set_size_request(-1, style.DEFAULT_SPACING * 2)
class PaletteMenuItem(Gtk.EventBox):
__gtype_name__ = 'SugarPaletteMenuItem'
__gsignals__ = {
'activate': (GObject.SignalFlags.RUN_FIRST, None, [])
}
def __init__(self, text_label=None, icon_name=None, text_maxlen=60,
xo_color=None, file_name=None, accelerator=None):
"""
text_label -- str
a text to display in the menu.
icon_name -- str
the name of a sugar icon to be displayed. Takse precedence
over file_name.
text_maxlen -- int
the desired maximum width of the label, in characters.
By default is 60.
xo_color -- sugar.graphics.XoColor
the color to be applied to the icon.
file_name -- str
the path to a svg file used as icon.
accelerator -- str
a text used to display the keyboard shortcut associated
to the menu.
"""
Gtk.EventBox.__init__(self)
self.set_above_child(True)
self.icon = None
self._hbox = Gtk.HBox()
vbox = Gtk.VBox()
self.add(vbox)
vbox.show()
hbox = Gtk.HBox()
vbox.pack_start(hbox, True, True, style.DEFAULT_PADDING)
hbox.show()
hbox.pack_start(self._hbox, True, True, style.DEFAULT_PADDING)
if icon_name is not None:
self.icon = Icon(icon_name=icon_name,
pixel_size=style.SMALL_ICON_SIZE)
if xo_color is not None:
self.icon.props.xo_color = xo_color
self._hbox.pack_start(self.icon, expand=False, fill=False,
padding=style.DEFAULT_PADDING)
elif file_name is not None:
self.icon = Icon(file=file_name,
pixel_size=style.SMALL_ICON_SIZE)
if xo_color is not None:
self.icon.props.xo_color = xo_color
self._hbox.pack_start(self.icon, expand=False, fill=False,
padding=style.DEFAULT_PADDING)
align = Gtk.Alignment(xalign=0.0, yalign=0.5, xscale=0.0, yscale=0.0)
self.label = Gtk.Label(text_label)
if text_maxlen > 0:
self.label.set_max_width_chars(text_maxlen)
self.label.set_ellipsize(style.ELLIPSIZE_MODE_DEFAULT)
align.add(self.label)
self._hbox.pack_start(align, expand=True, fill=True,
padding=style.DEFAULT_PADDING)
self._accelerator_label = Gtk.AccelLabel('')
if accelerator is not None:
self._accelerator_label.set_text(accelerator)
self._hbox.pack_start(self._accelerator_label, expand=False,
fill=False, padding=style.DEFAULT_PADDING)
self.id_bt_release_cb = self.connect('button-release-event',
self.__button_release_cb)
self.id_enter_notify_cb = self.connect('enter-notify-event',
self.__enter_notify_cb)
self.id_leave_notify_cb = self.connect('leave-notify-event',
self.__leave_notify_cb)
self.show_all()
def __button_release_cb(self, widget, event):
alloc = self.get_allocation()
if 0 < event.x < alloc.width and 0 < event.y < alloc.height:
self.emit('activate')
def __enter_notify_cb(self, widget, event):
self.modify_bg(Gtk.StateType.NORMAL,
style.COLOR_BUTTON_GREY.get_gdk_color())
def __leave_notify_cb(self, widget, event):
self.modify_bg(Gtk.StateType.NORMAL,
style.COLOR_BLACK.get_gdk_color())
def set_label(self, text_label):
text = '<span foreground="%s">' % style.COLOR_WHITE.get_html() + \
text_label + '</span>'
self.label.set_markup(text)
def set_image(self, icon):
self._hbox.pack_start(icon, expand=False, fill=False,
padding=style.DEFAULT_PADDING)
self._hbox.reorder_child(icon, 0)
def set_accelerator(self, text):
self._accelerator_label.set_text(text)
def set_sensitive(self, sensitive):
is_sensitive = bool(not self.get_state_flags() &
Gtk.StateFlags.INSENSITIVE)
if is_sensitive == sensitive:
return
if sensitive:
self.handler_unblock(self.id_bt_release_cb)
self.handler_unblock(self.id_enter_notify_cb)
self.handler_unblock(self.id_leave_notify_cb)
self.unset_state_flags(Gtk.StateFlags.INSENSITIVE)
else:
self.handler_block(self.id_bt_release_cb)
self.handler_block(self.id_enter_notify_cb)
self.handler_block(self.id_leave_notify_cb)
self.set_state_flags(self.get_state_flags() |
Gtk.StateFlags.INSENSITIVE,
clear=True)
| lgpl-2.1 |
Vixionar/django | tests/forms_tests/widget_tests/test_selectmultiple.py | 145 | 4951 | from django.forms import SelectMultiple
from .base import WidgetTest
class SelectMultipleTest(WidgetTest):
widget = SelectMultiple()
numeric_choices = (('0', '0'), ('1', '1'), ('2', '2'), ('3', '3'), ('0', 'extra'))
def test_render_selected(self):
self.check_html(self.widget, 'beatles', ['J'], choices=self.beatles, html=(
"""<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>"""
))
def test_render_multiple_selected(self):
self.check_html(self.widget, 'beatles', ['J', 'P'], choices=self.beatles, html=(
"""<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P" selected="selected">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>"""
))
def test_render_none(self):
"""
If the value is None, none of the options are selected.
"""
self.check_html(self.widget, 'beatles', None, choices=self.beatles, html=(
"""<select multiple="multiple" name="beatles">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>"""
))
def test_render_value_label(self):
"""
If the value corresponds to a label (but not to an option value), none
of the options are selected.
"""
self.check_html(self.widget, 'beatles', ['John'], choices=self.beatles, html=(
"""<select multiple="multiple" name="beatles">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>"""
))
def test_multiple_options_same_value(self):
"""
Multiple options with the same value can be selected (#8103).
"""
self.check_html(self.widget, 'choices', ['0'], choices=self.numeric_choices, html=(
"""<select multiple="multiple" name="choices">
<option value="0" selected="selected">0</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="0" selected="selected">extra</option>
</select>"""
))
def test_multiple_values_invalid(self):
"""
If multiple values are given, but some of them are not valid, the valid
ones are selected.
"""
self.check_html(self.widget, 'beatles', ['J', 'G', 'foo'], choices=self.beatles, html=(
"""<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G" selected="selected">George</option>
<option value="R">Ringo</option>
</select>"""
))
def test_compare_string(self):
choices = [('1', '1'), ('2', '2'), ('3', '3')]
self.check_html(self.widget, 'nums', [2], choices=choices, html=(
"""<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>"""
))
self.check_html(self.widget, 'nums', ['2'], choices=choices, html=(
"""<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>"""
))
self.check_html(self.widget, 'nums', [2], choices=choices, html=(
"""<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>"""
))
def test_optgroup_select_multiple(self):
widget = SelectMultiple(choices=(
('outer1', 'Outer 1'),
('Group "1"', (('inner1', 'Inner 1'), ('inner2', 'Inner 2'))),
))
self.check_html(widget, 'nestchoice', ['outer1', 'inner2'], html=(
"""<select multiple="multiple" name="nestchoice">
<option value="outer1" selected="selected">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2" selected="selected">Inner 2</option>
</optgroup>
</select>"""
))
| bsd-3-clause |
wzbozon/statsmodels | statsmodels/sandbox/km_class.py | 31 | 11748 | #a class for the Kaplan-Meier estimator
from statsmodels.compat.python import range
import numpy as np
from math import sqrt
import matplotlib.pyplot as plt
class KAPLAN_MEIER(object):
def __init__(self, data, timesIn, groupIn, censoringIn):
raise RuntimeError('Newer version of Kaplan-Meier class available in survival2.py')
#store the inputs
self.data = data
self.timesIn = timesIn
self.groupIn = groupIn
self.censoringIn = censoringIn
def fit(self):
#split the data into groups based on the predicting variable
#get a set of all the groups
groups = list(set(self.data[:,self.groupIn]))
#create an empty list to store the data for different groups
groupList = []
#create an empty list for each group and add it to groups
for i in range(len(groups)):
groupList.append([])
#iterate through all the groups in groups
for i in range(len(groups)):
#iterate though the rows of dataArray
for j in range(len(self.data)):
#test if this row has the correct group
if self.data[j,self.groupIn] == groups[i]:
#add the row to groupList
groupList[i].append(self.data[j])
#create an empty list to store the times for each group
timeList = []
#iterate through all the groups
for i in range(len(groupList)):
#create an empty list
times = []
#iterate through all the rows of the group
for j in range(len(groupList[i])):
#get a list of all the times in the group
times.append(groupList[i][j][self.timesIn])
#get a sorted set of the times and store it in timeList
times = list(sorted(set(times)))
timeList.append(times)
#get a list of the number at risk and events at each time
#create an empty list to store the results in
timeCounts = []
#create an empty list to hold points for plotting
points = []
#create a list for points where censoring occurs
censoredPoints = []
#iterate trough each group
for i in range(len(groupList)):
#initialize a variable to estimate the survival function
survival = 1
#initialize a variable to estimate the variance of
#the survival function
varSum = 0
#initialize a counter for the number at risk
riskCounter = len(groupList[i])
#create a list for the counts for this group
counts = []
##create a list for points to plot
x = []
y = []
#iterate through the list of times
for j in range(len(timeList[i])):
if j != 0:
if j == 1:
#add an indicator to tell if the time
#starts a new group
groupInd = 1
#add (0,1) to the list of points
x.append(0)
y.append(1)
#add the point time to the right of that
x.append(timeList[i][j-1])
y.append(1)
#add the point below that at survival
x.append(timeList[i][j-1])
y.append(survival)
#add the survival to y
y.append(survival)
else:
groupInd = 0
#add survival twice to y
y.append(survival)
y.append(survival)
#add the time twice to x
x.append(timeList[i][j-1])
x.append(timeList[i][j-1])
#add each censored time, number of censorings and
#its survival to censoredPoints
censoredPoints.append([timeList[i][j-1],
censoringNum,survival,groupInd])
#add the count to the list
counts.append([timeList[i][j-1],riskCounter,
eventCounter,survival,
sqrt(((survival)**2)*varSum)])
#increment the number at risk
riskCounter += -1*(riskChange)
#initialize a counter for the change in the number at risk
riskChange = 0
#initialize a counter to zero
eventCounter = 0
#intialize a counter to tell when censoring occurs
censoringCounter = 0
censoringNum = 0
#iterate through the observations in each group
for k in range(len(groupList[i])):
#check of the observation has the given time
if (groupList[i][k][self.timesIn]) == (timeList[i][j]):
#increment the number at risk counter
riskChange += 1
#check if this is an event or censoring
if groupList[i][k][self.censoringIn] == 1:
#add 1 to the counter
eventCounter += 1
else:
censoringNum += 1
#check if there are any events at this time
if eventCounter != censoringCounter:
censoringCounter = eventCounter
#calculate the estimate of the survival function
survival *= ((float(riskCounter) -
eventCounter)/(riskCounter))
try:
#calculate the estimate of the variance
varSum += (eventCounter)/((riskCounter)
*(float(riskCounter)-
eventCounter))
except ZeroDivisionError:
varSum = 0
#append the last row to counts
counts.append([timeList[i][len(timeList[i])-1],
riskCounter,eventCounter,survival,
sqrt(((survival)**2)*varSum)])
#add the last time once to x
x.append(timeList[i][len(timeList[i])-1])
x.append(timeList[i][len(timeList[i])-1])
#add the last survival twice to y
y.append(survival)
#y.append(survival)
censoredPoints.append([timeList[i][len(timeList[i])-1],
censoringNum,survival,1])
#add the list for the group to al ist for all the groups
timeCounts.append(np.array(counts))
points.append([x,y])
#returns a list of arrays, where each array has as it columns: the time,
#the number at risk, the number of events, the estimated value of the
#survival function at that time, and the estimated standard error at
#that time, in that order
self.results = timeCounts
self.points = points
self.censoredPoints = censoredPoints
def plot(self):
x = []
#iterate through the groups
for i in range(len(self.points)):
#plot x and y
plt.plot(np.array(self.points[i][0]),np.array(self.points[i][1]))
#create lists of all the x and y values
x += self.points[i][0]
for j in range(len(self.censoredPoints)):
#check if censoring is occuring
if (self.censoredPoints[j][1] != 0):
#if this is the first censored point
if (self.censoredPoints[j][3] == 1) and (j == 0):
#calculate a distance beyond 1 to place it
#so all the points will fit
dx = ((1./((self.censoredPoints[j][1])+1.))
*(float(self.censoredPoints[j][0])))
#iterate through all the censored points at this time
for k in range(self.censoredPoints[j][1]):
#plot a vertical line for censoring
plt.vlines((1+((k+1)*dx)),
self.censoredPoints[j][2]-0.03,
self.censoredPoints[j][2]+0.03)
#if this censored point starts a new group
elif ((self.censoredPoints[j][3] == 1) and
(self.censoredPoints[j-1][3] == 1)):
#calculate a distance beyond 1 to place it
#so all the points will fit
dx = ((1./((self.censoredPoints[j][1])+1.))
*(float(self.censoredPoints[j][0])))
#iterate through all the censored points at this time
for k in range(self.censoredPoints[j][1]):
#plot a vertical line for censoring
plt.vlines((1+((k+1)*dx)),
self.censoredPoints[j][2]-0.03,
self.censoredPoints[j][2]+0.03)
#if this is the last censored point
elif j == (len(self.censoredPoints) - 1):
#calculate a distance beyond the previous time
#so that all the points will fit
dx = ((1./((self.censoredPoints[j][1])+1.))
*(float(self.censoredPoints[j][0])))
#iterate through all the points at this time
for k in range(self.censoredPoints[j][1]):
#plot a vertical line for censoring
plt.vlines((self.censoredPoints[j-1][0]+((k+1)*dx)),
self.censoredPoints[j][2]-0.03,
self.censoredPoints[j][2]+0.03)
#if this is a point in the middle of the group
else:
#calcuate a distance beyond the current time
#to place the point, so they all fit
dx = ((1./((self.censoredPoints[j][1])+1.))
*(float(self.censoredPoints[j+1][0])
- self.censoredPoints[j][0]))
#iterate through all the points at this time
for k in range(self.censoredPoints[j][1]):
#plot a vetical line for censoring
plt.vlines((self.censoredPoints[j][0]+((k+1)*dx)),
self.censoredPoints[j][2]-0.03,
self.censoredPoints[j][2]+0.03)
#set the size of the plot so it extends to the max x and above 1 for y
plt.xlim((0,np.max(x)))
plt.ylim((0,1.05))
#label the axes
plt.xlabel('time')
plt.ylabel('survival')
plt.show()
def show_results(self):
#start a string that will be a table of the results
resultsString = ''
#iterate through all the groups
for i in range(len(self.results)):
#label the group and header
resultsString += ('Group {0}\n\n'.format(i) +
'Time At Risk Events Survival Std. Err\n')
for j in self.results[i]:
#add the results to the string
resultsString += (
'{0:<9d}{1:<12d}{2:<11d}{3:<13.4f}{4:<6.4f}\n'.format(
int(j[0]),int(j[1]),int(j[2]),j[3],j[4]))
print(resultsString)
| bsd-3-clause |
pdellaert/ansible | contrib/vault/azure_vault.py | 37 | 23745 | #!/usr/bin/env python
#
# This script borrows a great deal of code from the azure_rm.py dynamic inventory script
# that is packaged with Ansible. This can be found in the Ansible GitHub project at:
# https://github.com/ansible/ansible/blob/devel/contrib/inventory/azure_rm.py
#
# The Azure Dynamic Inventory script was written by:
# Copyright (c) 2016 Matt Davis, <[email protected]>
# Chris Houseknecht, <[email protected]>
# Altered/Added for Vault functionality:
# Austin Hobbs, GitHub: @OxHobbs
'''
Ansible Vault Password with Azure Key Vault Secret Script
=========================================================
This script is designed to be used with Ansible Vault. It provides the
capability to provide this script as the password file to the ansible-vault
command. This script uses the Azure Python SDK. For instruction on installing
the Azure Python SDK see http://azure-sdk-for-python.readthedocs.org/
Authentication
--------------
The order of precedence is command line arguments, environment variables,
and finally the [default] profile found in ~/.azure/credentials for all
authentication parameters.
If using a credentials file, it should be an ini formatted file with one or
more sections, which we refer to as profiles. The script looks for a
[default] section, if a profile is not specified either on the command line
or with an environment variable. The keys in a profile will match the
list of command line arguments below.
For command line arguments and environment variables specify a profile found
in your ~/.azure/credentials file, or a service principal or Active Directory
user.
Command line arguments:
- profile
- client_id
- secret
- subscription_id
- tenant
- ad_user
- password
- cloud_environment
- adfs_authority_url
- vault-name
- secret-name
- secret-version
Environment variables:
- AZURE_PROFILE
- AZURE_CLIENT_ID
- AZURE_SECRET
- AZURE_SUBSCRIPTION_ID
- AZURE_TENANT
- AZURE_AD_USER
- AZURE_PASSWORD
- AZURE_CLOUD_ENVIRONMENT
- AZURE_ADFS_AUTHORITY_URL
- AZURE_VAULT_NAME
- AZURE_VAULT_SECRET_NAME
- AZURE_VAULT_SECRET_VERSION
Vault
-----
The order of precedence of Azure Key Vault Secret information is the same.
Command line arguments, environment variables, and finally the azure_vault.ini
file with the [azure_keyvault] section.
azure_vault.ini (or azure_rm.ini if merged with Azure Dynamic Inventory Script)
------------------------------------------------------------------------------
As mentioned above, you can control execution using environment variables or a .ini file. A sample
azure_vault.ini is included. The name of the .ini file is the basename of the inventory script (in this case
'azure_vault') with a .ini extension. It also assumes the .ini file is alongside the script. To specify
a different path for the .ini file, define the AZURE_VAULT_INI_PATH environment variable:
export AZURE_VAULT_INI_PATH=/path/to/custom.ini
or
export AZURE_VAULT_INI_PATH=[same path as azure_rm.ini if merged]
__NOTE__: If using the azure_rm.py dynamic inventory script, it is possible to use the same .ini
file for both the azure_rm dynamic inventory and the azure_vault password file. Simply add a section
named [azure_keyvault] to the ini file with the following properties: vault_name, secret_name and
secret_version.
Examples:
---------
Validate the vault_pw script with Python
$ python azure_vault.py -n mydjangovault -s vaultpw -v 6b6w7f7252b44eac8ee726b3698009f3
$ python azure_vault.py --vault-name 'mydjangovault' --secret-name 'vaultpw' \
--secret-version 6b6w7f7252b44eac8ee726b3698009f3
Use with a playbook
$ ansible-playbook -i ./azure_rm.py my_playbook.yml --limit galaxy-qa --vault-password-file ./azure_vault.py
Insecure Platform Warning
-------------------------
If you receive InsecurePlatformWarning from urllib3, install the
requests security packages:
pip install requests[security]
author:
- Chris Houseknecht (@chouseknecht)
- Matt Davis (@nitzmahone)
- Austin Hobbs (@OxHobbs)
Company: Ansible by Red Hat, Microsoft
Version: 0.1.0
'''
import argparse
import os
import re
import sys
import inspect
from azure.keyvault import KeyVaultClient
from ansible.module_utils.six.moves import configparser as cp
from os.path import expanduser
import ansible.module_utils.six.moves.urllib.parse as urlparse
HAS_AZURE = True
HAS_AZURE_EXC = None
HAS_AZURE_CLI_CORE = True
CLIError = None
try:
from msrestazure.azure_active_directory import AADTokenCredentials
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_active_directory import MSIAuthentication
from msrestazure import azure_cloud
from azure.mgmt.compute import __version__ as azure_compute_version
from azure.common import AzureMissingResourceHttpError, AzureHttpError
from azure.common.credentials import ServicePrincipalCredentials, UserPassCredentials
from azure.mgmt.network import NetworkManagementClient
from azure.mgmt.resource.resources import ResourceManagementClient
from azure.mgmt.resource.subscriptions import SubscriptionClient
from azure.mgmt.compute import ComputeManagementClient
from adal.authentication_context import AuthenticationContext
except ImportError as exc:
HAS_AZURE_EXC = exc
HAS_AZURE = False
try:
from azure.cli.core.util import CLIError
from azure.common.credentials import get_azure_cli_credentials, get_cli_profile
from azure.common.cloud import get_cli_active_cloud
except ImportError:
HAS_AZURE_CLI_CORE = False
CLIError = Exception
try:
from ansible.release import __version__ as ansible_version
except ImportError:
ansible_version = 'unknown'
AZURE_CREDENTIAL_ENV_MAPPING = dict(
profile='AZURE_PROFILE',
subscription_id='AZURE_SUBSCRIPTION_ID',
client_id='AZURE_CLIENT_ID',
secret='AZURE_SECRET',
tenant='AZURE_TENANT',
ad_user='AZURE_AD_USER',
password='AZURE_PASSWORD',
cloud_environment='AZURE_CLOUD_ENVIRONMENT',
adfs_authority_url='AZURE_ADFS_AUTHORITY_URL'
)
AZURE_VAULT_SETTINGS = dict(
vault_name='AZURE_VAULT_NAME',
secret_name='AZURE_VAULT_SECRET_NAME',
secret_version='AZURE_VAULT_SECRET_VERSION',
)
AZURE_MIN_VERSION = "2.0.0"
ANSIBLE_USER_AGENT = 'Ansible/{0}'.format(ansible_version)
class AzureRM(object):
def __init__(self, args):
self._args = args
self._cloud_environment = None
self._compute_client = None
self._resource_client = None
self._network_client = None
self._adfs_authority_url = None
self._vault_client = None
self._resource = None
self.debug = False
if args.debug:
self.debug = True
self.credentials = self._get_credentials(args)
if not self.credentials:
self.fail("Failed to get credentials. Either pass as parameters, set environment variables, "
"or define a profile in ~/.azure/credentials.")
# if cloud_environment specified, look up/build Cloud object
raw_cloud_env = self.credentials.get('cloud_environment')
if not raw_cloud_env:
self._cloud_environment = azure_cloud.AZURE_PUBLIC_CLOUD # SDK default
else:
# try to look up "well-known" values via the name attribute on azure_cloud members
all_clouds = [x[1] for x in inspect.getmembers(azure_cloud) if isinstance(x[1], azure_cloud.Cloud)]
matched_clouds = [x for x in all_clouds if x.name == raw_cloud_env]
if len(matched_clouds) == 1:
self._cloud_environment = matched_clouds[0]
elif len(matched_clouds) > 1:
self.fail("Azure SDK failure: more than one cloud matched for cloud_environment name '{0}'".format(
raw_cloud_env))
else:
if not urlparse.urlparse(raw_cloud_env).scheme:
self.fail("cloud_environment must be an endpoint discovery URL or one of {0}".format(
[x.name for x in all_clouds]))
try:
self._cloud_environment = azure_cloud.get_cloud_from_metadata_endpoint(raw_cloud_env)
except Exception as e:
self.fail("cloud_environment {0} could not be resolved: {1}".format(raw_cloud_env, e.message))
if self.credentials.get('subscription_id', None) is None:
self.fail("Credentials did not include a subscription_id value.")
self.log("setting subscription_id")
self.subscription_id = self.credentials['subscription_id']
# get authentication authority
# for adfs, user could pass in authority or not.
# for others, use default authority from cloud environment
if self.credentials.get('adfs_authority_url'):
self._adfs_authority_url = self.credentials.get('adfs_authority_url')
else:
self._adfs_authority_url = self._cloud_environment.endpoints.active_directory
# get resource from cloud environment
self._resource = self._cloud_environment.endpoints.active_directory_resource_id
if self.credentials.get('credentials'):
self.azure_credentials = self.credentials.get('credentials')
elif self.credentials.get('client_id') and self.credentials.get('secret') and self.credentials.get('tenant'):
self.azure_credentials = ServicePrincipalCredentials(client_id=self.credentials['client_id'],
secret=self.credentials['secret'],
tenant=self.credentials['tenant'],
cloud_environment=self._cloud_environment)
elif self.credentials.get('ad_user') is not None and \
self.credentials.get('password') is not None and \
self.credentials.get('client_id') is not None and \
self.credentials.get('tenant') is not None:
self.azure_credentials = self.acquire_token_with_username_password(
self._adfs_authority_url,
self._resource,
self.credentials['ad_user'],
self.credentials['password'],
self.credentials['client_id'],
self.credentials['tenant'])
elif self.credentials.get('ad_user') is not None and self.credentials.get('password') is not None:
tenant = self.credentials.get('tenant')
if not tenant:
tenant = 'common'
self.azure_credentials = UserPassCredentials(self.credentials['ad_user'],
self.credentials['password'],
tenant=tenant,
cloud_environment=self._cloud_environment)
else:
self.fail("Failed to authenticate with provided credentials. Some attributes were missing. "
"Credentials must include client_id, secret and tenant or ad_user and password, or "
"ad_user, password, client_id, tenant and adfs_authority_url(optional) for ADFS authentication, "
"or be logged in using AzureCLI.")
def log(self, msg):
if self.debug:
print(msg + u'\n')
def fail(self, msg):
raise Exception(msg)
def _get_profile(self, profile="default"):
path = expanduser("~")
path += "/.azure/credentials"
try:
config = cp.ConfigParser()
config.read(path)
except Exception as exc:
self.fail("Failed to access {0}. Check that the file exists and you have read "
"access. {1}".format(path, str(exc)))
credentials = dict()
for key in AZURE_CREDENTIAL_ENV_MAPPING:
try:
credentials[key] = config.get(profile, key, raw=True)
except Exception:
pass
if credentials.get('client_id') is not None or credentials.get('ad_user') is not None:
return credentials
return None
def _get_env_credentials(self):
env_credentials = dict()
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.items():
env_credentials[attribute] = os.environ.get(env_variable, None)
if env_credentials['profile'] is not None:
credentials = self._get_profile(env_credentials['profile'])
return credentials
if env_credentials['client_id'] is not None or env_credentials['ad_user'] is not None:
return env_credentials
return None
def _get_azure_cli_credentials(self):
credentials, subscription_id = get_azure_cli_credentials()
cloud_environment = get_cli_active_cloud()
cli_credentials = {
'credentials': credentials,
'subscription_id': subscription_id,
'cloud_environment': cloud_environment
}
return cli_credentials
def _get_msi_credentials(self, subscription_id_param=None):
credentials = MSIAuthentication()
try:
# try to get the subscription in MSI to test whether MSI is enabled
subscription_client = SubscriptionClient(credentials)
subscription = next(subscription_client.subscriptions.list())
subscription_id = str(subscription.subscription_id)
return {
'credentials': credentials,
'subscription_id': subscription_id_param or subscription_id
}
except Exception as exc:
return None
def _get_credentials(self, params):
# Get authentication credentials.
# Precedence: cmd line parameters-> environment variables-> default profile in ~/.azure/credentials.
self.log('Getting credentials')
arg_credentials = dict()
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.items():
arg_credentials[attribute] = getattr(params, attribute)
# try module params
if arg_credentials['profile'] is not None:
self.log('Retrieving credentials with profile parameter.')
credentials = self._get_profile(arg_credentials['profile'])
return credentials
if arg_credentials['client_id'] is not None:
self.log('Received credentials from parameters.')
return arg_credentials
if arg_credentials['ad_user'] is not None:
self.log('Received credentials from parameters.')
return arg_credentials
# try environment
env_credentials = self._get_env_credentials()
if env_credentials:
self.log('Received credentials from env.')
return env_credentials
# try default profile from ~./azure/credentials
default_credentials = self._get_profile()
if default_credentials:
self.log('Retrieved default profile credentials from ~/.azure/credentials.')
return default_credentials
msi_credentials = self._get_msi_credentials(arg_credentials.get('subscription_id'))
if msi_credentials:
self.log('Retrieved credentials from MSI.')
return msi_credentials
try:
if HAS_AZURE_CLI_CORE:
self.log('Retrieving credentials from AzureCLI profile')
cli_credentials = self._get_azure_cli_credentials()
return cli_credentials
except CLIError as ce:
self.log('Error getting AzureCLI profile credentials - {0}'.format(ce))
return None
def acquire_token_with_username_password(self, authority, resource, username, password, client_id, tenant):
authority_uri = authority
if tenant is not None:
authority_uri = authority + '/' + tenant
context = AuthenticationContext(authority_uri)
token_response = context.acquire_token_with_username_password(resource, username, password, client_id)
return AADTokenCredentials(token_response)
def _register(self, key):
try:
# We have to perform the one-time registration here. Otherwise, we receive an error the first
# time we attempt to use the requested client.
resource_client = self.rm_client
resource_client.providers.register(key)
except Exception as exc:
self.log("One-time registration of {0} failed - {1}".format(key, str(exc)))
self.log("You might need to register {0} using an admin account".format(key))
self.log(("To register a provider using the Python CLI: "
"https://docs.microsoft.com/azure/azure-resource-manager/"
"resource-manager-common-deployment-errors#noregisteredproviderfound"))
def get_mgmt_svc_client(self, client_type, base_url, api_version):
client = client_type(self.azure_credentials,
self.subscription_id,
base_url=base_url,
api_version=api_version)
client.config.add_user_agent(ANSIBLE_USER_AGENT)
return client
def get_vault_client(self):
return KeyVaultClient(self.azure_credentials)
def get_vault_suffix(self):
return self._cloud_environment.suffixes.keyvault_dns
@property
def network_client(self):
self.log('Getting network client')
if not self._network_client:
self._network_client = self.get_mgmt_svc_client(NetworkManagementClient,
self._cloud_environment.endpoints.resource_manager,
'2017-06-01')
self._register('Microsoft.Network')
return self._network_client
@property
def rm_client(self):
self.log('Getting resource manager client')
if not self._resource_client:
self._resource_client = self.get_mgmt_svc_client(ResourceManagementClient,
self._cloud_environment.endpoints.resource_manager,
'2017-05-10')
return self._resource_client
@property
def compute_client(self):
self.log('Getting compute client')
if not self._compute_client:
self._compute_client = self.get_mgmt_svc_client(ComputeManagementClient,
self._cloud_environment.endpoints.resource_manager,
'2017-03-30')
self._register('Microsoft.Compute')
return self._compute_client
@property
def vault_client(self):
self.log('Getting the Key Vault client')
if not self._vault_client:
self._vault_client = self.get_vault_client()
return self._vault_client
class AzureKeyVaultSecret:
def __init__(self):
self._args = self._parse_cli_args()
try:
rm = AzureRM(self._args)
except Exception as e:
sys.exit("{0}".format(str(e)))
self._get_vault_settings()
if self._args.vault_name:
self.vault_name = self._args.vault_name
if self._args.secret_name:
self.secret_name = self._args.secret_name
if self._args.secret_version:
self.secret_version = self._args.secret_version
self._vault_suffix = rm.get_vault_suffix()
self._vault_client = rm.vault_client
print(self.get_password_from_vault())
def _parse_cli_args(self):
parser = argparse.ArgumentParser(
description='Obtain the vault password used to secure your Ansilbe secrets'
)
parser.add_argument('-n', '--vault-name', action='store', help='Name of Azure Key Vault')
parser.add_argument('-s', '--secret-name', action='store',
help='Name of the secret stored in Azure Key Vault')
parser.add_argument('-v', '--secret-version', action='store',
help='Version of the secret to be retrieved')
parser.add_argument('--debug', action='store_true', default=False,
help='Send the debug messages to STDOUT')
parser.add_argument('--profile', action='store',
help='Azure profile contained in ~/.azure/credentials')
parser.add_argument('--subscription_id', action='store',
help='Azure Subscription Id')
parser.add_argument('--client_id', action='store',
help='Azure Client Id ')
parser.add_argument('--secret', action='store',
help='Azure Client Secret')
parser.add_argument('--tenant', action='store',
help='Azure Tenant Id')
parser.add_argument('--ad_user', action='store',
help='Active Directory User')
parser.add_argument('--password', action='store',
help='password')
parser.add_argument('--adfs_authority_url', action='store',
help='Azure ADFS authority url')
parser.add_argument('--cloud_environment', action='store',
help='Azure Cloud Environment name or metadata discovery URL')
return parser.parse_args()
def get_password_from_vault(self):
vault_url = 'https://{0}{1}'.format(self.vault_name, self._vault_suffix)
secret = self._vault_client.get_secret(vault_url, self.secret_name, self.secret_version)
return secret.value
def _get_vault_settings(self):
env_settings = self._get_vault_env_settings()
if None not in set(env_settings.values()):
for key in AZURE_VAULT_SETTINGS:
setattr(self, key, env_settings.get(key, None))
else:
file_settings = self._load_vault_settings()
if not file_settings:
return
for key in AZURE_VAULT_SETTINGS:
if file_settings.get(key):
setattr(self, key, file_settings.get(key))
def _get_vault_env_settings(self):
env_settings = dict()
for attribute, env_variable in AZURE_VAULT_SETTINGS.items():
env_settings[attribute] = os.environ.get(env_variable, None)
return env_settings
def _load_vault_settings(self):
basename = os.path.splitext(os.path.basename(__file__))[0]
default_path = os.path.join(os.path.dirname(__file__), (basename + '.ini'))
path = os.path.expanduser(os.path.expandvars(os.environ.get('AZURE_VAULT_INI_PATH', default_path)))
config = None
settings = None
try:
config = cp.ConfigParser()
config.read(path)
except Exception:
pass
if config is not None:
settings = dict()
for key in AZURE_VAULT_SETTINGS:
try:
settings[key] = config.get('azure_keyvault', key, raw=True)
except Exception:
pass
return settings
def main():
if not HAS_AZURE:
sys.exit("The Azure python sdk is not installed (try `pip install 'azure>={0}' --upgrade`) - {1}".format(
AZURE_MIN_VERSION, HAS_AZURE_EXC))
AzureKeyVaultSecret()
if __name__ == '__main__':
main()
| gpl-3.0 |
ypu/tp-qemu | qemu/tests/timerdevice_tscwrite.py | 3 | 1850 | import logging
from autotest.client.shared import error
from autotest.client import utils
@error.context_aware
def run(test, params, env):
"""
Timer device tscwrite test:
1) Check for an appropriate clocksource on host.
2) Boot the guest.
3) Download and compile the newest msr-tools.
4) Execute cmd in guest.
:param test: QEMU test object.
:param params: Dictionary with test parameters.
:param env: Dictionary with the test environment.
"""
error.context("Check for an appropriate clocksource on host", logging.info)
host_cmd = "cat /sys/devices/system/clocksource/"
host_cmd += "clocksource0/current_clocksource"
if not "tsc" in utils.system_output(host_cmd):
raise error.TestNAError("Host must use 'tsc' clocksource")
error.context("Boot the guest", logging.info)
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=timeout)
error.context("Download and compile the newest msr-tools", logging.info)
msr_tools_install_cmd = params["msr_tools_install_cmd"]
session.cmd(msr_tools_install_cmd)
error.context("Execute cmd in guest", logging.info)
cmd = "dmesg -c > /dev/null"
session.cmd(cmd)
date_cmd = "strace date 2>&1 | egrep 'clock_gettime|gettimeofday' | wc -l"
output = session.cmd(date_cmd)
if not '0' in output:
raise error.TestFail("Test failed before run msr tools."
" Output: '%s'" % output)
msr_tools_cmd = params["msr_tools_cmd"]
session.cmd(msr_tools_cmd)
cmd = "dmesg"
session.cmd(cmd)
output = session.cmd(date_cmd)
if not "1" in output:
raise error.TestFail("Test failed after run msr tools."
" Output: '%s'" % output)
| gpl-2.0 |
timcera/hspfbintoolbox | tests/test_catalog.py | 1 | 115314 | # -*- coding: utf-8 -*-
"""
catalog
----------------------------------
Tests for `hspfbintoolbox` module.
"""
import csv
import shlex
import subprocess
import sys
from unittest import TestCase
from pandas.testing import assert_frame_equal
try:
from cStringIO import StringIO
except:
from io import StringIO
import pandas as pd
from hspfbintoolbox import hspfbintoolbox
interval2codemap = {"yearly": 5, "monthly": 4, "daily": 3, "bivl": 2}
def capture(func, *args, **kwds):
sys.stdout = StringIO() # capture output
out = func(*args, **kwds)
out = sys.stdout.getvalue() # release output
try:
out = bytes(out, "utf-8")
except:
pass
return out
def read_unicode_csv(
filename,
delimiter=",",
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
lineterminator="\n",
encoding="utf-8",
):
# Python 3 version
if sys.version_info[0] >= 3:
# Open the file in text mode with given encoding
# Set newline arg to ''
# (see https://docs.python.org/3/library/csv.html)
# Next, get the csv reader, with unicode delimiter and quotechar
csv_reader = csv.reader(
filename,
delimiter=delimiter,
quotechar=quotechar,
quoting=quoting,
lineterminator=lineterminator,
)
# Now, iterate over the (already decoded) csv_reader generator
for row in csv_reader:
yield row
# Python 2 version
else:
# Next, get the csv reader, passing delimiter and quotechar as
# bytestrings rather than unicode
csv_reader = csv.reader(
filename,
delimiter=delimiter.encode(encoding),
quotechar=quotechar.encode(encoding),
quoting=quoting,
lineterminator=lineterminator,
)
# Iterate over the file and decode each string into unicode
for row in csv_reader:
yield [cell.decode(encoding) for cell in row]
class TestDescribe(TestCase):
def setUp(self):
self.catalog = b"""\
LUE , LC,GROUP ,VAR , TC,START ,END ,TC
IMPLND, 11,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 11,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 12,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 13,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 14,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 21,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 22,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 23,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 24,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 31,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 32,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 33,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 111,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 112,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 113,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 114,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 211,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 212,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 213,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 214,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 301,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 302,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 303,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 304,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 311,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 312,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 313,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 314,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 411,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 412,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 413,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 414,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 511,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 512,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 513,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 514,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 611,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 612,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 613,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 614,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 711,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 712,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 713,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 714,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 811,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 812,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 813,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 814,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 822,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 823,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 824,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 901,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 902,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 903,IWATER ,SURS , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,IMPEV, 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,PET , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,RETS , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,SUPY , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,SURO , 5,1951 ,2001 ,yearly
IMPLND, 904,IWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 11,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 12,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 13,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 14,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 15,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 21,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 22,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 23,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 24,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 25,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 31,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 32,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 33,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 35,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 111,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 112,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 113,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 114,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 115,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 211,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 212,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 213,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 214,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 215,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 301,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 302,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 303,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 304,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 305,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 311,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 312,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 313,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 314,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 315,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 411,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 412,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 413,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 414,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 415,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 511,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 512,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 513,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 514,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 515,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 611,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 612,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 613,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 614,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 615,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 711,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 712,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 713,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 714,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 715,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 811,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 812,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 813,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 814,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 815,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 822,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 823,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 824,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 825,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 901,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 902,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 903,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 904,PWATER ,UZS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWET, 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,AGWS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,BASET, 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,CEPE , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,CEPS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,GWVS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IFWI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IFWO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IFWS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,IGWI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,INFIL, 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,LZET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,LZI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,LZS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PERC , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PERO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PERS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,PET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,SUPY , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,SURO , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,SURS , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,TAET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,UZET , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,UZI , 5,1951 ,2001 ,yearly
PERLND, 905,PWATER ,UZS , 5,1951 ,2001 ,yearly
"""
ndict = []
rd = read_unicode_csv(StringIO(self.catalog.decode()))
next(rd)
for row in rd:
if len(row) == 0:
continue
nrow = [i.strip() for i in row]
ndict.append(
(nrow[0], int(nrow[1]), nrow[2], nrow[3], interval2codemap[nrow[7]])
)
self.ncatalog = sorted(ndict)
def test_catalog_api(self):
out = hspfbintoolbox.catalog("tests/6b_np1.hbn")
out = [i[:5] for i in out]
self.assertEqual(out, self.ncatalog)
def test_catalog_cli(self):
args = "hspfbintoolbox catalog --tablefmt csv tests/6b_np1.hbn"
args = shlex.split(args)
out = subprocess.Popen(
args, stdout=subprocess.PIPE, stdin=subprocess.PIPE
).communicate()[0]
self.assertEqual(out, self.catalog)
| bsd-3-clause |
rossburton/yocto-autobuilder | lib/python2.7/site-packages/buildbot-0.8.8-py2.7.egg/buildbot/process/users/manual.py | 4 | 9675 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
# this class is known to contain cruft and will be looked at later, so
# no current implementation utilizes it aside from scripts.runner.
from twisted.python import log
from twisted.internet import defer
from twisted.application import service
from buildbot import pbutil
class UsersBase(service.MultiService):
"""
Base class for services that manage users manually. This takes care
of the service.MultiService work needed by all the services that
subclass it.
"""
def __init__(self):
service.MultiService.__init__(self)
self.master = None
def startService(self):
service.MultiService.startService(self)
def stopService(self):
return service.MultiService.stopService(self)
class CommandlineUserManagerPerspective(pbutil.NewCredPerspective):
"""
Perspective registered in buildbot.pbmanager and contains the real
workings of `buildbot user` by working with the database when
perspective_commandline is called.
"""
def __init__(self, master):
self.master = master
def formatResults(self, op, results):
"""
This formats the results of the database operations for printing
back to the caller
@param op: operation to perform (add, remove, update, get)
@type op: string
@param results: results from db queries in perspective_commandline
@type results: list
@returns: string containing formatted results
"""
formatted_results = ""
if op == 'add':
# list, alternating ident, uid
formatted_results += "user(s) added:\n"
for user in results:
if isinstance(user, basestring):
formatted_results += "identifier: %s\n" % user
else:
formatted_results += "uid: %d\n\n" % user
elif op == 'remove':
# list of dictionaries
formatted_results += "user(s) removed:\n"
for user in results:
if user:
formatted_results += "identifier: %s\n" % (user)
elif op == 'update':
# list, alternating ident, None
formatted_results += "user(s) updated:\n"
for user in results:
if user:
formatted_results += "identifier: %s\n" % (user)
elif op == 'get':
# list of dictionaries
formatted_results += "user(s) found:\n"
for user in results:
if user:
for key in user:
if key != 'bb_password':
formatted_results += "%s: %s\n" % (key, user[key])
formatted_results += "\n"
else:
formatted_results += "no match found\n"
return formatted_results
@defer.inlineCallbacks
def perspective_commandline(self, op, bb_username, bb_password, ids, info):
"""
This performs the requested operations from the `buildbot user`
call by calling the proper buildbot.db.users methods based on
the operation. It yields a deferred instance with the results
from the database methods.
@param op: operation to perform (add, remove, update, get)
@type op: string
@param bb_username: username portion of auth credentials
@type bb_username: string
@param bb_password: hashed password portion of auth credentials
@type bb_password: hashed string
@param ids: user identifiers used to find existing users
@type ids: list of strings or None
@param info: type/value pairs for each user that will be added
or updated in the database
@type info: list of dictionaries or None
@returns: results from db.users methods via deferred
"""
log.msg("perspective_commandline called")
results = []
if ids:
for user in ids:
# get identifier, guaranteed to be in user from checks
# done in C{scripts.runner}
uid = yield self.master.db.users.identifierToUid(
identifier=user)
result = None
if op == 'remove':
if uid:
yield self.master.db.users.removeUser(uid)
result = user
else:
log.msg("Unable to find uid for identifier %s" % user)
elif op == 'get':
if uid:
result = yield self.master.db.users.getUser(uid)
else:
log.msg("Unable to find uid for identifier %s" % user)
results.append(result)
else:
for user in info:
# get identifier, guaranteed to be in user from checks
# done in C{scripts.runner}
ident = user.pop('identifier')
uid = yield self.master.db.users.identifierToUid(
identifier=ident)
# if only an identifier was in user, we're updating only
# the bb_username and bb_password.
if not user:
if uid:
result = yield self.master.db.users.updateUser(
uid=uid,
identifier=ident,
bb_username=bb_username,
bb_password=bb_password)
results.append(ident)
else:
log.msg("Unable to find uid for identifier %s"
% user)
else:
# when adding, we update the user after the first attr
once_through = False
for attr in user:
if op == 'update' or once_through:
if uid:
result = yield self.master.db.users.updateUser(
uid=uid,
identifier=ident,
bb_username=bb_username,
bb_password=bb_password,
attr_type=attr,
attr_data=user[attr])
else:
log.msg("Unable to find uid for identifier %s"
% user)
elif op == 'add':
result = yield self.master.db.users.findUserByAttr(
identifier=ident,
attr_type=attr,
attr_data=user[attr])
once_through = True
results.append(ident)
# result is None from updateUser calls
if result:
results.append(result)
uid = result
results = self.formatResults(op, results)
defer.returnValue(results)
class CommandlineUserManager(UsersBase):
"""
Service that runs to set up and register CommandlineUserManagerPerspective
so `buildbot user` calls get to perspective_commandline.
"""
def __init__(self, username=None, passwd=None, port=None):
UsersBase.__init__(self)
assert username and passwd, ("A username and password pair must be given "
"to connect and use `buildbot user`")
self.username = username
self.passwd = passwd
assert port, "A port must be specified for a PB connection"
self.port = port
self.registration = None
def startService(self):
UsersBase.startService(self)
# set up factory and register with buildbot.pbmanager
def factory(mind, username):
return CommandlineUserManagerPerspective(self.master)
self.registration = self.master.pbmanager.register(self.port,
self.username,
self.passwd,
factory)
def stopService(self):
d = defer.maybeDeferred(UsersBase.stopService, self)
def unreg(_):
if self.registration:
return self.registration.unregister()
d.addCallback(unreg)
return d
| gpl-2.0 |
jaap-karssenberg/zim-desktop-wiki | zim/gui/templateeditordialog.py | 1 | 6069 |
# Copyright 2012 Jaap Karssenberg <[email protected]>
from gi.repository import Gtk
from zim.newfs import LocalFile
from zim.newfs.helpers import TrashHelper, TrashNotSupportedError
from zim.config import XDG_DATA_HOME, data_file
from zim.templates import list_template_categories, list_templates
from zim.gui.widgets import Dialog, BrowserTreeView, ScrolledWindow
from zim.gui.applications import open_folder_prompt_create, open_file, edit_file
class TemplateEditorDialog(Dialog):
'''Dialog with a tree of available templates for export and new pages.
Allows edit, delete, and create new templates. Uses external editor.
'''
def __init__(self, parent):
Dialog.__init__(self, parent,
_('Templates'), help='Help:Templates', buttons=Gtk.ButtonsType.CLOSE,
defaultwindowsize=(400, 450))
# T: Dialog title
label = Gtk.Label()
label.set_markup('<b>' + _('Templates') + '</b>')
# T: Section in dialog
label.set_alignment(0.0, 0.5)
self.vbox.pack_start(label, False, True, 0)
hbox = Gtk.HBox()
self.vbox.pack_start(hbox, True, True, 0)
self.view = TemplateListView()
self.view.connect('row-activated', self.on_selection_changed)
hbox.pack_start(ScrolledWindow(self.view), True, True, 0)
vbbox = Gtk.VButtonBox()
vbbox.set_layout(Gtk.ButtonBoxStyle.START)
hbox.pack_start(vbbox, False, True, 0)
view_button = Gtk.Button.new_with_mnemonic(_('_View')) # T: button label
view_button.connect('clicked', self.on_view)
copy_button = Gtk.Button.new_with_mnemonic(_('_Copy')) # T: Button label
copy_button.connect('clicked', self.on_copy)
edit_button = Gtk.Button.new_with_mnemonic(_('_Edit')) # T: Button label
edit_button.connect('clicked', self.on_edit)
delete_button = Gtk.Button.new_with_mnemonic(_('_Remove')) # T: Button label
delete_button.connect('clicked', self.on_delete)
for b in (view_button, copy_button, edit_button, delete_button):
b.set_alignment(0.0, 0.5)
vbbox.add(b)
browse_button = Gtk.Button.new_with_mnemonic(_('Browse')) # T: button label
browse_button.connect('clicked', self.on_browse)
self.add_extra_button(browse_button)
self._buttonbox = vbbox
self._delete_button = delete_button
self.on_selection_changed()
## Same button appears in export dialog
url_button = Gtk.LinkButton(
'https://zim-wiki.org/more_templates.html',
_('Get more templates online') # T: label for button with URL
)
self.vbox.pack_start(url_button, False, True, 0)
def on_selection_changed(self, *a):
# Set sensitivity of the buttons
# All insensitive if category (folder) is selected
# Delete insensitive if only a default
custom, default = self.view.get_selected()
for button in self._buttonbox.get_children():
button.set_sensitive(custom is not None)
if custom is None:
return
if not custom.exists():
self._delete_button.set_sensitive(False)
def on_view(self, *a):
# Open the file, without waiting for editor to return
custom, default = self.view.get_selected()
if custom is None:
return # Should not have been sensitive
if custom.exists():
open_file(self, custom)
else:
assert default and default.exists()
open_file(self, default)
def on_copy(self, *a):
# Create a new template in this category
custom, default = self.view.get_selected()
if custom is None:
return # Should not have been sensitive
if custom.exists():
source = custom
else:
assert default and default.exists()
source = default
name = PromptNameDialog(self).run()
assert name is not None
_, ext = custom.basename.rsplit('.', 1)
basename = name + '.' + ext
newfile = custom.dir.file(basename)
source.copyto(newfile)
self.view.refresh()
def on_edit(self, *a):
custom, default = self.view.get_selected()
if custom is None:
return # Should not have been sensitive
if not custom.exists():
# Copy default
default.copyto(custom)
edit_file(self, custom, istextfile=True)
self.view.refresh()
def on_delete(self, *a):
# Only delete custom, may result in reset to default
custom, default = self.view.get_selected()
if custom is None or not custom.exists():
return # Should not have been sensitive
try:
TrashHelper().trash(LocalFile(custom.path))
except TrashNotSupportedError:
# TODO warnings
custom.remove()
self.view.refresh()
def on_browse(self, *a):
dir = XDG_DATA_HOME.subdir(('zim', 'templates'))
open_folder_prompt_create(self, dir)
class PromptNameDialog(Dialog):
def __init__(self, parent):
Dialog.__init__(self, parent, _('Copy Template')) # T: Dialog title
self.add_form([
('name', 'string', _('Name')),
# T: Input label for the new name when copying a template
])
def do_response_ok(self):
self.result = self.form['name']
if self.result:
return True
class TemplateListView(BrowserTreeView):
BASENAME_COL = 0
FILE_COL = 1
DEFAULT_COL = 2
def __init__(self):
BrowserTreeView.__init__(self)
model = Gtk.TreeStore(str, object, object)
# BASENAME_COL, FILE_COL, DEFAULT_COL
self.set_model(model)
self.set_headers_visible(False)
cell_renderer = Gtk.CellRendererText()
column = Gtk.TreeViewColumn('_template_', cell_renderer, text=self.BASENAME_COL)
self.append_column(column)
self.refresh()
def get_selected(self):
# Returns (base, default file) or (None, None)
model, iter = self.get_selection().get_selected()
if model is None or iter is None:
return None, None
else:
return model[iter][self.FILE_COL], model[iter][self.DEFAULT_COL]
def select(self, path):
self.get_selection().select_path(path)
def refresh(self):
model = self.get_model()
model.clear()
for category in list_template_categories():
parent = model.append(None, (category, None, None))
for name, basename in list_templates(category):
base = XDG_DATA_HOME.file(('zim', 'templates', category, basename))
default = data_file(('templates', category, basename)) # None if not existing
#~ print('>>>', name, base, default)
model.append(parent, (name, base, default))
self.expand_all()
| gpl-2.0 |
joeythesaint/yocto-autobuilder | lib/python2.7/site-packages/Twisted-12.2.0-py2.7-linux-x86_64.egg/twisted/test/test_dirdbm.py | 41 | 5859 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for dirdbm module.
"""
import os, shutil, glob
from twisted.trial import unittest
from twisted.persisted import dirdbm
class DirDbmTestCase(unittest.TestCase):
def setUp(self):
self.path = self.mktemp()
self.dbm = dirdbm.open(self.path)
self.items = (('abc', 'foo'), ('/lalal', '\000\001'), ('\000\012', 'baz'))
def testAll(self):
k = "//==".decode("base64")
self.dbm[k] = "a"
self.dbm[k] = "a"
self.assertEqual(self.dbm[k], "a")
def testRebuildInteraction(self):
from twisted.persisted import dirdbm
from twisted.python import rebuild
s = dirdbm.Shelf('dirdbm.rebuild.test')
s['key'] = 'value'
rebuild.rebuild(dirdbm)
# print s['key']
def testDbm(self):
d = self.dbm
# insert keys
keys = []
values = set()
for k, v in self.items:
d[k] = v
keys.append(k)
values.add(v)
keys.sort()
# check they exist
for k, v in self.items:
assert d.has_key(k), "has_key() failed"
assert d[k] == v, "database has wrong value"
# check non existent key
try:
d["XXX"]
except KeyError:
pass
else:
assert 0, "didn't raise KeyError on non-existent key"
# check keys(), values() and items()
dbkeys = list(d.keys())
dbvalues = set(d.values())
dbitems = set(d.items())
dbkeys.sort()
items = set(self.items)
assert keys == dbkeys, ".keys() output didn't match: %s != %s" % (repr(keys), repr(dbkeys))
assert values == dbvalues, ".values() output didn't match: %s != %s" % (repr(values), repr(dbvalues))
assert items == dbitems, "items() didn't match: %s != %s" % (repr(items), repr(dbitems))
copyPath = self.mktemp()
d2 = d.copyTo(copyPath)
copykeys = list(d.keys())
copyvalues = set(d.values())
copyitems = set(d.items())
copykeys.sort()
assert dbkeys == copykeys, ".copyTo().keys() didn't match: %s != %s" % (repr(dbkeys), repr(copykeys))
assert dbvalues == copyvalues, ".copyTo().values() didn't match: %s != %s" % (repr(dbvalues), repr(copyvalues))
assert dbitems == copyitems, ".copyTo().items() didn't match: %s != %s" % (repr(dbkeys), repr(copyitems))
d2.clear()
assert len(d2.keys()) == len(d2.values()) == len(d2.items()) == 0, ".clear() failed"
shutil.rmtree(copyPath)
# delete items
for k, v in self.items:
del d[k]
assert not d.has_key(k), "has_key() even though we deleted it"
assert len(d.keys()) == 0, "database has keys"
assert len(d.values()) == 0, "database has values"
assert len(d.items()) == 0, "database has items"
def testModificationTime(self):
import time
# the mtime value for files comes from a different place than the
# gettimeofday() system call. On linux, gettimeofday() can be
# slightly ahead (due to clock drift which gettimeofday() takes into
# account but which open()/write()/close() do not), and if we are
# close to the edge of the next second, time.time() can give a value
# which is larger than the mtime which results from a subsequent
# write(). I consider this a kernel bug, but it is beyond the scope
# of this test. Thus we keep the range of acceptability to 3 seconds time.
# -warner
self.dbm["k"] = "v"
self.assert_(abs(time.time() - self.dbm.getModificationTime("k")) <= 3)
def testRecovery(self):
"""DirDBM: test recovery from directory after a faked crash"""
k = self.dbm._encode("key1")
f = open(os.path.join(self.path, k + ".rpl"), "wb")
f.write("value")
f.close()
k2 = self.dbm._encode("key2")
f = open(os.path.join(self.path, k2), "wb")
f.write("correct")
f.close()
f = open(os.path.join(self.path, k2 + ".rpl"), "wb")
f.write("wrong")
f.close()
f = open(os.path.join(self.path, "aa.new"), "wb")
f.write("deleted")
f.close()
dbm = dirdbm.DirDBM(self.path)
assert dbm["key1"] == "value"
assert dbm["key2"] == "correct"
assert not glob.glob(os.path.join(self.path, "*.new"))
assert not glob.glob(os.path.join(self.path, "*.rpl"))
def test_nonStringKeys(self):
"""
L{dirdbm.DirDBM} operations only support string keys: other types
should raise a C{AssertionError}. This really ought to be a
C{TypeError}, but it'll stay like this for backward compatibility.
"""
self.assertRaises(AssertionError, self.dbm.__setitem__, 2, "3")
try:
self.assertRaises(AssertionError, self.dbm.__setitem__, "2", 3)
except unittest.FailTest:
# dirdbm.Shelf.__setitem__ supports non-string values
self.assertIsInstance(self.dbm, dirdbm.Shelf)
self.assertRaises(AssertionError, self.dbm.__getitem__, 2)
self.assertRaises(AssertionError, self.dbm.__delitem__, 2)
self.assertRaises(AssertionError, self.dbm.has_key, 2)
self.assertRaises(AssertionError, self.dbm.__contains__, 2)
self.assertRaises(AssertionError, self.dbm.getModificationTime, 2)
class ShelfTestCase(DirDbmTestCase):
def setUp(self):
self.path = self.mktemp()
self.dbm = dirdbm.Shelf(self.path)
self.items = (('abc', 'foo'), ('/lalal', '\000\001'), ('\000\012', 'baz'),
('int', 12), ('float', 12.0), ('tuple', (None, 12)))
testCases = [DirDbmTestCase, ShelfTestCase]
| gpl-2.0 |
colinnewell/odoo | addons/website/tests/test_crawl.py | 251 | 3415 | # -*- coding: utf-8 -*-
import logging
import urlparse
import time
import lxml.html
import openerp
import re
_logger = logging.getLogger(__name__)
class Crawler(openerp.tests.HttpCase):
""" Test suite crawling an openerp CMS instance and checking that all
internal links lead to a 200 response.
If a username and a password are provided, authenticates the user before
starting the crawl
"""
at_install = False
post_install = True
def crawl(self, url, seen=None, msg=''):
if seen == None:
seen = set()
url_slug = re.sub(r"[/](([^/=?&]+-)?[0-9]+)([/]|$)", '/<slug>/', url)
url_slug = re.sub(r"([^/=?&]+)=[^/=?&]+", '\g<1>=param', url_slug)
if url_slug in seen:
return seen
else:
seen.add(url_slug)
_logger.info("%s %s", msg, url)
r = self.url_open(url)
code = r.getcode()
self.assertIn( code, xrange(200, 300), "%s Fetching %s returned error response (%d)" % (msg, url, code))
if r.info().gettype() == 'text/html':
doc = lxml.html.fromstring(r.read())
for link in doc.xpath('//a[@href]'):
href = link.get('href')
parts = urlparse.urlsplit(href)
# href with any fragment removed
href = urlparse.urlunsplit((
parts.scheme,
parts.netloc,
parts.path,
parts.query,
''
))
# FIXME: handle relative link (not parts.path.startswith /)
if parts.netloc or \
not parts.path.startswith('/') or \
parts.path == '/web' or\
parts.path.startswith('/web/') or \
parts.path.startswith('/en_US/') or \
(parts.scheme and parts.scheme not in ('http', 'https')):
continue
self.crawl(href, seen, msg)
return seen
def test_10_crawl_public(self):
t0 = time.time()
t0_sql = self.registry.test_cr.sql_log_count
seen = self.crawl('/', msg='Anonymous Coward')
count = len(seen)
duration = time.time() - t0
sql = self.registry.test_cr.sql_log_count - t0_sql
_logger.log(25, "public crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request, ", count, duration, sql, duration/count, float(sql)/count)
def test_20_crawl_demo(self):
t0 = time.time()
t0_sql = self.registry.test_cr.sql_log_count
self.authenticate('demo', 'demo')
seen = self.crawl('/', msg='demo')
count = len(seen)
duration = time.time() - t0
sql = self.registry.test_cr.sql_log_count - t0_sql
_logger.log(25, "demo crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request", count, duration, sql, duration/count, float(sql)/count)
def test_30_crawl_admin(self):
t0 = time.time()
t0_sql = self.registry.test_cr.sql_log_count
self.authenticate('admin', 'admin')
seen = self.crawl('/', msg='admin')
count = len(seen)
duration = time.time() - t0
sql = self.registry.test_cr.sql_log_count - t0_sql
_logger.log(25, "admin crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request", count, duration, sql, duration/count, float(sql)/count)
| agpl-3.0 |
drduh/config | lighttpd/upload.py | 1 | 1501 | #!/usr/bin/env python3
# https://github.com/drduh/config/blob/master/lighttpd/upload.py
# Simple file uploader
# Put into /var/www/cgi-bin/, make executable and enable CGI
import cgi
import os
CHUNK_SIZE = 100000
UPLOAD = "/var/www/upload/"
HEADER = """
<html><head><title>%s</title>
<style type="text/css">
body {
background-color: #002b36;
color: #839496;
font-family: "Open Sans", "Helvetica Neue",
"Helvetica", "Arial", "sans-serif";
}
div {
background-color: #073642;
border-radius: 0.5em;
margin: 1em auto;
padding: 2em;
width: 600px;
}
h1 {
font-size: 2em;
padding-bottom: 1em;
}
</style>
</head><body><div>"""
ERROR = """
<h1>Error: %s</h1>
</div></body></html>"""
SUCCESS = """
<h1>Saved <a href="../upload/%s">%s</a></h1>
<h2><a href="../index.html">Upload another file</a></h2>
<h2><a href="../upload/">Download files</a></h2>
</div></body></html>"""
def main():
"""File uploader static pages and form handler."""
print(HEADER % "File upload")
form = cgi.FieldStorage()
ff = form["file"]
fl = ff.file
fn = ff.filename
if not fn:
print(ERROR % "No file selected")
return
with open(
os.path.join(
UPLOAD, os.path.basename(fn)), "wb") as out:
while True:
content = fl.read(CHUNK_SIZE)
if not content:
break
out.write(content)
print(SUCCESS % (fn, fn))
if __name__ == "__main__":
main()
| mit |
switowski/invenio | invenio/legacy/ckeditor/connector.py | 13 | 5954 | # -*- coding: utf-8 -*-
# Comments and reviews for records.
# This file is part of Invenio.
# Copyright (C) 2008, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Invenio implementation of the connector to CKEditor for file upload.
This is heavily borrowed from FCKeditor 'upload.py' sample connector.
"""
import os
import re
from invenio.legacy.bibdocfile.api import decompose_file, propose_next_docname
allowed_extensions = {}
allowed_extensions['File'] = ['7z','aiff','asf','avi','bmp','csv','doc','fla','flv','gif','gz','gzip','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','ods','odt','pdf','png','ppt','pxd','qt','ram','rar','rm','rmi','rmvb','rtf','sdc','sitd','swf','sxc','sxw','tar','tgz','tif','tiff','txt','vsd','wav','wma','wmv','xls','xml','zip']
allowed_extensions['Image'] = ['bmp','gif','jpeg','jpg','png']
allowed_extensions['Flash'] = ['swf','flv']
allowed_extensions['Media'] = ['aiff','asf','avi','bmp','fla', 'flv','gif','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','png','qt','ram','rm','rmi','rmvb','swf','tif','tiff','wav','wma','wmv']
default_allowed_types = ['File', 'Image', 'Flash', 'Media']
def process_CKEditor_upload(form, uid, user_files_path, user_files_absolute_path,
recid=None, allowed_types=default_allowed_types):
"""
Process a file upload request.
@param form: the form as in req object.
@type form: dict
@param uid: the user ID of the user uploading the file.
@type uid: int
@param user_files_path: the base URL where the file can be
accessed from the web after upload.
Note that you have to implement your own handler to stream the files from the directory
C{user_files_absolute_path} if you set this value.
@type user_files_path: string
@param user_files_absolute_path: the base path on the server where
the files should be saved.
Eg:C{%(CFG_PREFIX)s/var/data/comments/%(recid)s/%(uid)s}
@type user_files_absolute_path: string
@param recid: the record ID for which we upload a file. Leave None if not relevant.
@type recid: int
@param allowed_types: types allowed for uploading. These
are supported by CKEditor: ['File', 'Image', 'Flash', 'Media']
@type allowed_types: list of strings
@return: (msg, uploaded_file_path, uploaded_file_name, uploaded_file_url, callback_function)
"""
msg = ''
filename = ''
formfile = None
uploaded_file_path = ''
user_files_path = ''
for key, formfields in form.items():
if key != 'upload':
continue
if hasattr(formfields, "filename") and formfields.filename:
# We have found our file
filename = formfields.filename
formfile = formfields.file
break
can_upload_file_p = False
if not form['type'] in allowed_types:
# Is the type sent through the form ok?
msg = 'You are not allowed to upload a file of this type'
else:
# Is user allowed to upload such file extension?
basedir, name, extension = decompose_file(filename)
extension = extension[1:] # strip leading dot
if extension in allowed_extensions.get(form['type'], []):
can_upload_file_p = True
if not can_upload_file_p:
msg = 'You are not allowed to upload a file of this type'
elif filename and formfile:
## Before saving the file to disk, wash the filename (in particular
## washing away UNIX and Windows (e.g. DFS) paths):
filename = os.path.basename(filename.split('\\')[-1])
# Remove \ / | : ? *
filename = re.sub ( '\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[\x00-\x1f\x7f-\x9f]/', '_', filename)
filename = filename.strip()
if filename != "":
# Check that file does not already exist
n = 1
while os.path.exists(os.path.join(user_files_absolute_path, filename)):
basedir, name, extension = decompose_file(filename)
new_name = propose_next_docname(name)
filename = new_name + extension
# This may be dangerous if the file size is bigger than the available memory
fp = open(os.path.join(user_files_absolute_path, filename), "w")
fp.write(formfile.read())
fp.close()
uploaded_file_path = os.path.join(user_files_absolute_path, filename)
uploaded_file_name = filename
return (msg, uploaded_file_path, filename, user_files_path, form['CKEditorFuncNum'])
def send_response(req, msg, fileurl, callback_function):
"""
Send a response to the CKEdtior after a file upload.
@param req: the request object
@param msg: the message to send to the user
@param fileurl: the URL where the newly uploaded file can be found, if any
@param callback_function: a value returned when calling C{process_CKEditor_upload()}
"""
req.content_type = 'text/html'
req.send_http_header()
req.write('''<html><body><script type="text/javascript">window.parent.CKEDITOR.tools.callFunction(%(function_number)s, '%(url)s', '%(msg)s')</script></body></html>''' % \
{'function_number': callback_function,
'url': fileurl,
'msg': msg.replace("'", "\\'")})
| gpl-2.0 |
Lcaracol/ideasbox.lan | ideasbox/library/models.py | 1 | 3341 | from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
from taggit.managers import TaggableManager
from ideasbox.models import TimeStampedModel
from ideasbox.search.models import SearchableQuerySet, SearchMixin
class BookQuerySet(SearchableQuerySet, models.QuerySet):
def available(self):
return self.filter(specimens__isnull=False).distinct()
class Book(SearchMixin, TimeStampedModel):
OTHER = 99
SECTION_CHOICES = (
(1, _('digital')),
(2, _('children - cartoons')),
(3, _('children - novels')),
(10, _('children - poetry')),
(11, _('children - theatre')),
(4, _('children - documentary')),
(5, _('children - comics')),
(6, _('adults - novels')),
(12, _('adults - poetry')),
(13, _('adults - theatre')),
(7, _('adults - documentary')),
(8, _('adults - comics')),
(9, _('game')),
(OTHER, _('other')),
)
# We allow ISBN to be null, but when it is set it needs to be unique.
isbn = models.CharField(max_length=40, unique=True, null=True, blank=True)
authors = models.CharField(_('authors'), max_length=300, blank=True)
serie = models.CharField(_('serie'), max_length=300, blank=True)
title = models.CharField(_('title'), max_length=300)
subtitle = models.CharField(_('subtitle'), max_length=300, blank=True)
summary = models.TextField(_('summary'), blank=True)
publisher = models.CharField(_('publisher'), max_length=100, blank=True)
section = models.PositiveSmallIntegerField(_('section'),
choices=SECTION_CHOICES)
lang = models.CharField(_('Language'), max_length=10,
choices=settings.LANGUAGES)
cover = models.ImageField(_('cover'), upload_to='library/cover',
blank=True)
objects = BookQuerySet.as_manager()
tags = TaggableManager(blank=True)
class Meta:
ordering = ['title']
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('library:book_detail', kwargs={'pk': self.pk})
@property
def index_strings(self):
return (self.title, self.isbn, self.authors, self.subtitle,
self.summary, self.serie, u' '.join(self.tags.names()))
class BookSpecimen(TimeStampedModel):
book = models.ForeignKey(Book, related_name='specimens')
serial = models.CharField(_('serial'), max_length=40, unique=True,
blank=True, null=True)
location = models.CharField(_('location'), max_length=300, blank=True)
remarks = models.TextField(_('remarks'), blank=True)
file = models.FileField(_('digital file'), upload_to='library/digital',
blank=True)
@property
def is_digital(self):
return bool(self.file)
def __unicode__(self):
if self.is_digital:
# serial is null for digital specimens.
return u'Digital specimen of "{0}"'.format(self.book)
return u'Specimen {0} of "{1}"'.format(self.serial, self.book)
def get_absolute_url(self):
return reverse('library:book_detail', kwargs={'pk': self.book.pk})
| mit |
nvoron23/socialite | test/test_attrs.py | 3 | 1423 | """
Testing attribute/function access in a query.
"""
import unittest
class TesetAttrs(unittest.TestCase):
def __init__(self, methodName='runTest'):
unittest.TestCase.__init__(self, methodName)
`Foo(int i, Avg avg) groupby(1).
Bar(int i).`
def setUp(self):
`clear Foo.
clear Bar.`
def test_field(self):
`Foo(i, $avg(n)) :- i=$range(0, 10), n=$range(1, 4).
Bar(i) :- Foo(0, avg), i=(int)avg.value.`
a = list(`Bar(a)`)[0]
self.assertTrue(a==2)
def test_func(self):
`Bar(i) :- i=(int)$Math.ceil(4.2).`
a = list(`Bar(a)`)[0]
self.assertTrue(a==5)
def test_str(self):
`Qux(String w) indexby w.
Qux(w) :- (w, unused)=$Str.split("qux unused trailing strs.. ", " ").`
w = list(`Qux(w)`)[0]
self.assertTrue(w=='qux')
def test_exception1(self):
try:
`Foo(i, $avg(n)) :- i=$range(0, 10), n=$range(1, 4).
Bar(i) :- Foo(0, avg), i=(int)avg.XXX .`
except SociaLiteException:
pass
else:
self.fail("Expected exception is not raised")
def test_exception2(self):
try:
`Bar(i) :- i=(int)$Math.XXX(4.2).`
except SociaLiteException:
pass
else:
self.fail("Expected exception is not raised")
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
joshwalawender/POCS | pocs/scheduler/field.py | 2 | 1797 | from astroplan import FixedTarget
from astropy.coordinates import SkyCoord
from pocs import PanBase
class Field(FixedTarget, PanBase):
def __init__(self, name, position, equinox='J2000', **kwargs):
""" An object representing an area to be observed
A `Field` corresponds to an `~astroplan.ObservingBlock` and contains information
about the center of the field (represented by an `astroplan.FixedTarget`).
Arguments:
name {str} -- Name of the field, typically the name of object at center `position`
position {str} -- Center of field, can be anything accepted by `~astropy.coordinates.SkyCoord`
**kwargs {dict} -- Additional keywords to be passed to `astroplan.ObservingBlock`
"""
PanBase.__init__(self)
super().__init__(SkyCoord(position, equinox=equinox, frame='icrs'), name=name, **kwargs)
self._field_name = self.name.title().replace(' ', '').replace('-', '')
##################################################################################################
# Properties
##################################################################################################
@property
def field_name(self):
""" Flattened field name appropriate for paths """
return self._field_name
##################################################################################################
# Methods
##################################################################################################
##################################################################################################
# Private Methods
##################################################################################################
def __str__(self):
return self.name
| mit |
rdblue/Impala | testdata/bin/wait-for-hiveserver2.py | 14 | 3445 | #!/usr/bin/env impala-python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script waits for the Hive HiveServer2 service to become available by attempting
# to create a new session until the session creation succeeds, or a timeout is reached.
# TODO: Consider combining this with wait-for-metastore.py. A TCLIService client
# can perhaps also talk to the metastore.
import time
import getpass
from optparse import OptionParser
from tests.util.thrift_util import create_transport
# Imports required for HiveServer2 Client
from cli_service import LegacyTCLIService
from thrift.transport import TTransport, TSocket
from thrift.protocol import TBinaryProtocol
parser = OptionParser()
parser.add_option("--hs2_hostport", dest="hs2_hostport",
default="localhost:11050", help="HiveServer2 hostport to wait for.")
parser.add_option("--transport", dest="transport", default="buffered",
help="Transport to use for connecting to HiveServer2. Valid values: "
"'buffered', 'kerberos', 'plain_sasl'.")
options, args = parser.parse_args()
hs2_host, hs2_port = options.hs2_hostport.split(':')
if options.transport == "plain_sasl":
# Here we supply a bogus username of "foo" and a bogus password of "bar".
# We just have to supply *something*, else HS2 will block waiting for user
# input. Any bogus username and password are accepted.
hs2_transport = create_transport(hs2_host, hs2_port, "hive", options.transport,
"foo", "bar")
else:
hs2_transport = create_transport(hs2_host, hs2_port, "hive", options.transport)
protocol = TBinaryProtocol.TBinaryProtocol(hs2_transport)
hs2_client = LegacyTCLIService.Client(protocol)
# Try to connect to the HiveServer2 service and create a session
now = time.time()
TIMEOUT_SECONDS = 30.0
while time.time() - now < TIMEOUT_SECONDS:
try:
hs2_transport.open()
open_session_req = LegacyTCLIService.TOpenSessionReq()
open_session_req.username = getpass.getuser()
resp = hs2_client.OpenSession(open_session_req)
if resp.status.statusCode == LegacyTCLIService.TStatusCode.SUCCESS_STATUS:
close_session_req = LegacyTCLIService.TCloseSessionReq()
close_session_req.sessionHandle = resp.sessionHandle
hs2_client.CloseSession(close_session_req)
print "HiveServer2 service is up at %s." % options.hs2_hostport
exit(0)
except Exception as e:
if "SASL" in e.message: # Bail out on SASL failures
print "SASL failure when attempting connection:"
raise
if "GSS" in e.message: # Other GSSAPI failures
print "GSS failure when attempting connection:"
raise
print "Waiting for HiveServer2 at %s..." % options.hs2_hostport
print e
finally:
hs2_transport.close()
time.sleep(0.5)
print "HiveServer2 service failed to start within %s seconds." % TIMEOUT_SECONDS
exit(1)
| apache-2.0 |
emfcamp/micropython | tests/basics/int1.py | 44 | 1559 | print(int(False))
print(int(True))
print(int(0))
print(int(1))
print(int(+1))
print(int(-1))
print(int('0'))
print(int('+0'))
print(int('-0'))
print(int('1'))
print(int('+1'))
print(int('-1'))
print(int('01'))
print(int('9'))
print(int('10'))
print(int('+10'))
print(int('-10'))
print(int('12'))
print(int('-12'))
print(int('99'))
print(int('100'))
print(int('314'))
print(int(' 314'))
print(int('314 '))
print(int(' \t\t 314 \t\t '))
print(int(' 1 '))
print(int(' -3 '))
print(int('0', 10))
print(int('1', 10))
print(int(' \t 1 \t ', 10))
print(int('11', 10))
print(int('11', 16))
print(int('11', 8))
print(int('11', 2))
print(int('11', 36))
print(int('0o123', 0))
print(int('8388607'))
print(int('0x123', 16))
print(int('0X123', 16))
print(int('0A', 16))
print(int('0o123', 8))
print(int('0O123', 8))
print(int('0123', 8))
print(int('0b100', 2))
print(int('0B100', 2))
print(int('0100', 2))
print(int(' \t 0o12', 8))
print(int('0o12 \t ', 8))
print(int(b"12", 10))
print(int(b"12"))
def test(value, base):
try:
print(int(value, base))
except ValueError:
print('ValueError')
test('x', 0)
test('1x', 0)
test(' 1x', 0)
test(' 1' + chr(2) + ' ', 0)
test('', 0)
test(' ', 0)
test(' \t\t ', 0)
test('0x', 16)
test('0x', 0)
test('0o', 8)
test('0o', 0)
test('0b', 2)
test('0b', 0)
test('0b2', 2)
test('0o8', 8)
test('0xg', 16)
test('1 1', 16)
test('123', 37)
# check that we don't parse this as a floating point number
print(0x1e+1)
# can't convert list to int
try:
int([])
except TypeError:
print("TypeError")
| mit |
skuda/client-python | kubernetes/client/models/apps_v1beta1_deployment_strategy.py | 1 | 4125 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class AppsV1beta1DeploymentStrategy(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, rolling_update=None, type=None):
"""
AppsV1beta1DeploymentStrategy - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'rolling_update': 'AppsV1beta1RollingUpdateDeployment',
'type': 'str'
}
self.attribute_map = {
'rolling_update': 'rollingUpdate',
'type': 'type'
}
self._rolling_update = rolling_update
self._type = type
@property
def rolling_update(self):
"""
Gets the rolling_update of this AppsV1beta1DeploymentStrategy.
Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
:return: The rolling_update of this AppsV1beta1DeploymentStrategy.
:rtype: AppsV1beta1RollingUpdateDeployment
"""
return self._rolling_update
@rolling_update.setter
def rolling_update(self, rolling_update):
"""
Sets the rolling_update of this AppsV1beta1DeploymentStrategy.
Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
:param rolling_update: The rolling_update of this AppsV1beta1DeploymentStrategy.
:type: AppsV1beta1RollingUpdateDeployment
"""
self._rolling_update = rolling_update
@property
def type(self):
"""
Gets the type of this AppsV1beta1DeploymentStrategy.
Type of deployment. Can be \"Recreate\" or \"RollingUpdate\". Default is RollingUpdate.
:return: The type of this AppsV1beta1DeploymentStrategy.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this AppsV1beta1DeploymentStrategy.
Type of deployment. Can be \"Recreate\" or \"RollingUpdate\". Default is RollingUpdate.
:param type: The type of this AppsV1beta1DeploymentStrategy.
:type: str
"""
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 |
kennedyshead/home-assistant | homeassistant/components/vultr/sensor.py | 5 | 3249 | """Support for monitoring the state of Vultr Subscriptions."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_NAME, DATA_GIGABYTES
import homeassistant.helpers.config_validation as cv
from . import (
ATTR_CURRENT_BANDWIDTH_USED,
ATTR_PENDING_CHARGES,
CONF_SUBSCRIPTION,
DATA_VULTR,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Vultr {} {}"
MONITORED_CONDITIONS = {
ATTR_CURRENT_BANDWIDTH_USED: [
"Current Bandwidth Used",
DATA_GIGABYTES,
"mdi:chart-histogram",
],
ATTR_PENDING_CHARGES: ["Pending Charges", "US$", "mdi:currency-usd"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SUBSCRIPTION): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
CONF_MONITORED_CONDITIONS, default=list(MONITORED_CONDITIONS)
): vol.All(cv.ensure_list, [vol.In(MONITORED_CONDITIONS)]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Vultr subscription (server) sensor."""
vultr = hass.data[DATA_VULTR]
subscription = config.get(CONF_SUBSCRIPTION)
name = config.get(CONF_NAME)
monitored_conditions = config.get(CONF_MONITORED_CONDITIONS)
if subscription not in vultr.data:
_LOGGER.error("Subscription %s not found", subscription)
return
sensors = []
for condition in monitored_conditions:
sensors.append(VultrSensor(vultr, subscription, condition, name))
add_entities(sensors, True)
class VultrSensor(SensorEntity):
"""Representation of a Vultr subscription sensor."""
def __init__(self, vultr, subscription, condition, name):
"""Initialize a new Vultr sensor."""
self._vultr = vultr
self._condition = condition
self._name = name
self.subscription = subscription
self.data = None
condition_info = MONITORED_CONDITIONS[condition]
self._condition_name = condition_info[0]
self._units = condition_info[1]
self._icon = condition_info[2]
@property
def name(self):
"""Return the name of the sensor."""
try:
return self._name.format(self._condition_name)
except IndexError:
try:
return self._name.format(self.data["label"], self._condition_name)
except (KeyError, TypeError):
return self._name
@property
def icon(self):
"""Return the icon used in the frontend if any."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit of measurement to present the value in."""
return self._units
@property
def state(self):
"""Return the value of this given sensor type."""
try:
return round(float(self.data.get(self._condition)), 2)
except (TypeError, ValueError):
return self.data.get(self._condition)
def update(self):
"""Update state of sensor."""
self._vultr.update()
self.data = self._vultr.data[self.subscription]
| apache-2.0 |
sf-wind/caffe2 | caffe2/python/caffe_translator.py | 3 | 35395 | # Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
## @package caffe_translator
# Module caffe2.python.caffe_translator
#!/usr/bin/env python2
import argparse
import copy
import logging
import re
import numpy as np # noqa
from caffe2.proto import caffe2_pb2, caffe2_legacy_pb2
from caffe.proto import caffe_pb2
from caffe2.python import core, utils, workspace
from google.protobuf import text_format
logging.basicConfig()
log = logging.getLogger("caffe_translator")
log.setLevel(logging.INFO)
def _StateMeetsRule(state, rule):
"""A function that reproduces Caffe's StateMeetsRule functionality."""
if rule.HasField('phase') and rule.phase != state.phase:
return False
if rule.HasField('min_level') and state.level < rule.min_level:
return False
if rule.HasField('max_level') and state.level > rule.max_level:
return False
curr_stages = set(list(state.stage))
# all stages in rule.stages should be in, otherwise it's not a match.
if len(rule.stage) and any([s not in curr_stages for s in rule.stage]):
return False
# none of the stage in rule.stages should be in, otherwise it's not a match.
if len(rule.not_stage) and any([s in curr_stages for s in rule.not_stage]):
return False
# If none of the nonmatch happens, return True.
return True
def _ShouldInclude(net_state, layer):
"""A function that reproduces Caffe's inclusion and exclusion rule."""
ret = (len(layer.include) == 0)
# check exclude rules: if any exclusion is met, we shouldn't include.
ret &= not any([_StateMeetsRule(net_state, rule) for rule in layer.exclude])
if len(layer.include):
# check include rules: if any inclusion is met, we should include.
ret |= any([_StateMeetsRule(net_state, rule) for rule in layer.include])
return ret
def _GetLegacyDims(net, net_params, dummy_input, legacy_pad_ops):
dim_map = {}
ws = workspace.C.Workspace()
for param in net_params.protos:
ws.create_blob(param.name) \
.feed(utils.Caffe2TensorToNumpyArray(param))
external_input = net.op[0].input[0]
ws.create_blob(external_input).feed(dummy_input)
# Get dimensions with legacy pad
for i in range(len(net.op)):
op_def = net.op[i]
ws._run_operator(op_def.SerializeToString())
if i in legacy_pad_ops:
output = op_def.output[0]
blob_legacy = ws.fetch_blob(output)
dim_map[i] = blob_legacy.shape
return dim_map
def _GetLegacyPadArgs(op_def, arg_map):
pads = {}
keys = ['pad_l', 'pad_t', 'pad_r', 'pad_b']
is_pad = 'pad' in arg_map
if is_pad:
for k in keys:
pads[k] = arg_map['pad'].i
else:
pads = {x: arg_map[x].i for x in keys}
return pads
def _AdjustDims(op_def, arg_map, pads, dim1, dim2):
n1, c1, h1, w1 = dim1
n2, c2, h2, w2 = dim2
assert(n1 == n2)
assert(c1 == c2)
is_pad = 'pad' in arg_map
if h1 != h2 or w1 != w2:
if h1 == h2 + 1:
pads['pad_b'] += 1
elif h1 != h2:
raise Exception("Unexpected dimensions for height:", h1, h2)
if w1 == w2 + 1:
pads['pad_r'] += 1
elif w1 != w2:
raise Exception("Unexpected dimensions for width:", w1, w2)
if is_pad:
op_def.arg.remove(arg_map['pad'])
args = []
for name in pads.keys():
arg = caffe2_pb2.Argument()
arg.name = name
arg.i = pads[name]
args.append(arg)
op_def.arg.extend(args)
else:
for name in pads.keys():
arg_map[name].i = pads[name]
def _RemoveLegacyPad(net, net_params, input_dims):
legacy_pad_ops = []
for i in range(len(net.op)):
op_def = net.op[i]
if re.match(r'^(Conv|ConvTranspose|MaxPool|AveragePool)(\dD)?$',
op_def.type):
for arg in op_def.arg:
if arg.name == 'legacy_pad':
legacy_pad_ops.append(i)
break
if legacy_pad_ops:
n, c, h, w = input_dims
dummy_input = np.random.randn(n, c, h, w).astype(np.float32)
dim_map = _GetLegacyDims(net, net_params, dummy_input, legacy_pad_ops)
# Running with the legacy pad argument removed
# compare the dimensions and adjust pad argument when necessary
ws = workspace.C.Workspace()
external_input = net.op[0].input[0]
ws.create_blob(external_input).feed_blob(dummy_input)
for param in net_params.protos:
ws.create_blob(param.name) \
.feed_blob(utils.Caffe2TensorToNumpyArray(param))
for i in range(len(net.op)):
op_def = net.op[i]
if i in legacy_pad_ops:
arg_map = {}
for arg in op_def.arg:
arg_map[arg.name] = arg
pads = _GetLegacyPadArgs(op_def, arg_map)
# remove legacy pad arg
for j in range(len(op_def.arg)):
arg = op_def.arg[j]
if arg.name == 'legacy_pad':
del op_def.arg[j]
break
output = op_def.output[0]
# use a new name to avoid the interference with inplace
nonlegacy_output = output + '_nonlegacy'
op_def.output[0] = nonlegacy_output
ws._run_operator(op_def.SerializeToString())
blob_nonlegacy = ws.fetch_blob(nonlegacy_output)
# reset output name
op_def.output[0] = output
dim1 = dim_map[i]
dim2 = blob_nonlegacy.shape
_AdjustDims(op_def, arg_map, pads, dim1, dim2)
ws._run_operator(op_def.SerializeToString())
return net
def _GetBlobDimMap(net, net_params, dummy_input):
dim_map = {}
ws = workspace.C.Workspace()
for param in net_params.protos:
ws.create_blob(param.name) \
.feed(utils.Caffe2TensorToNumpyArray(param))
external_input = net.op[0].input[0]
ws.create_blob(external_input).feed(dummy_input)
# Get dimensions with legacy pad
for i in range(len(net.op)):
op_def = net.op[i]
ws._run_operator(op_def.SerializeToString())
for output in op_def.output:
blob = ws.fetch_blob(output)
dim_map[output] = blob.shape
return dim_map
def _GetInputDims(caffe_net):
input_dims = []
if caffe_net.input_dim:
input_dims = caffe_net.input_dim
elif caffe_net.input_shape:
input_dims = caffe_net.input_shape[0].dim
elif caffe_net.layer[0].input_param.shape:
# getting input dimension from first layer
input_dims = caffe_net.layer[0].input_param.shape[0].dim
return input_dims
class TranslatorRegistry(object):
registry_ = {}
@classmethod
def Register(cls, op_name):
"""A decorator for registering gradient mappings."""
def Wrapper(func):
cls.registry_[op_name] = func
return func
return Wrapper
@classmethod
def TranslateLayer(cls, layer, pretrained_blobs, is_test, **kwargs):
try:
caffe_ops, params = cls.registry_[layer.type](
layer, pretrained_blobs, is_test, **kwargs)
except KeyError:
raise KeyError('No translator registered for layer: %s yet.' %
str(layer))
if caffe_ops is None:
caffe_ops = []
if type(caffe_ops) is not list:
caffe_ops = [caffe_ops]
return caffe_ops, params
@classmethod
def TranslateModel(
cls,
caffe_net,
pretrained_net,
is_test=False,
net_state=None,
remove_legacy_pad=False,
input_dims=None
):
net_state = caffe_pb2.NetState() if net_state is None else net_state
net = caffe2_pb2.NetDef()
net.name = caffe_net.name
net_params = caffe2_pb2.TensorProtos()
if len(caffe_net.layers) > 0:
raise ValueError(
'I think something is wrong. This translation script '
'only accepts new style layers that are stored in the '
'layer field.'
)
if not input_dims:
input_dims = _GetInputDims(caffe_net)
for layer in caffe_net.layer:
if not _ShouldInclude(net_state, layer):
log.info('Current net state does not need layer {}'
.format(layer.name))
continue
log.info('Translate layer {}'.format(layer.name))
# Get pretrained one
pretrained_layers = (
[l for l in pretrained_net.layer
if l.name == layer.name] + [l
for l in pretrained_net.layers
if l.name == layer.name]
)
if len(pretrained_layers) > 1:
raise ValueError(
'huh? more than one pretrained layer of one name?')
elif len(pretrained_layers) == 1:
pretrained_blobs = [
utils.CaffeBlobToNumpyArray(blob)
for blob in pretrained_layers[0].blobs
]
else:
# No pretrained layer for the given layer name. We'll just pass
# no parameter blobs.
# print 'No pretrained layer for layer', layer.name
pretrained_blobs = []
operators, params = cls.TranslateLayer(
layer, pretrained_blobs, is_test, net=net,
net_params=net_params, input_dims=input_dims)
net.op.extend(operators)
net_params.protos.extend(params)
if remove_legacy_pad:
assert input_dims, \
'Please specify input_dims to remove legacy_pad'
net = _RemoveLegacyPad(net, net_params, input_dims)
return net, net_params
def TranslateModel(*args, **kwargs):
return TranslatorRegistry.TranslateModel(*args, **kwargs)
def ConvertTensorProtosToInitNet(net_params, input_name):
"""Takes the net_params returned from TranslateModel, and wrap it as an
init net that contain GivenTensorFill.
This is a very simple feature that only works with float tensors, and is
only intended to be used in an environment where you want a single
initialization file - for more complex cases, use a db to store the
parameters.
"""
init_net = caffe2_pb2.NetDef()
for tensor in net_params.protos:
if len(tensor.float_data) == 0:
raise RuntimeError(
"Only float tensors are supported in this util.")
op = core.CreateOperator(
"GivenTensorFill", [], [tensor.name],
arg=[
utils.MakeArgument("shape", list(tensor.dims)),
utils.MakeArgument("values", tensor.float_data)])
init_net.op.extend([op])
init_net.op.extend([core.CreateOperator("ConstantFill", [], [input_name], shape=[1])])
return init_net
def BaseTranslate(layer, caffe2_type):
"""A simple translate interface that maps the layer input and output."""
caffe2_op = caffe2_pb2.OperatorDef()
caffe2_op.type = caffe2_type
caffe2_op.input.extend(layer.bottom)
caffe2_op.output.extend(layer.top)
return caffe2_op
def AddArgument(op, key, value):
"""Makes an argument based on the value type."""
op.arg.extend([utils.MakeArgument(key, value)])
################################################################################
# Common translators for layers.
################################################################################
@TranslatorRegistry.Register("Input")
def TranslateInput(layer, pretrained_blobs, is_test, **kwargs):
return [], []
@TranslatorRegistry.Register("VideoData")
def TranslateVideoData(layer, pretrained_blobs, is_test, **kwargs):
return [], []
@TranslatorRegistry.Register("Data")
def TranslateData(layer, pretrained_blobs, is_test, **kwargs):
return [], []
# A function used in convolution, pooling and deconvolution to deal with
# conv pool specific parameters.
def _TranslateStridePadKernelHelper(param, caffe_op):
try:
if (len(param.stride) > 1 or len(param.kernel_size) > 1 or
len(param.pad) > 1):
raise NotImplementedError(
"Translator currently does not support non-conventional "
"pad/kernel/stride settings."
)
stride = param.stride[0] if len(param.stride) else 1
pad = param.pad[0] if len(param.pad) else 0
kernel = param.kernel_size[0] if len(param.kernel_size) else 0
except TypeError:
# This catches the case of a PoolingParameter, in which case we are
# having non-repeating pad, stride and kernel.
stride = param.stride
pad = param.pad
kernel = param.kernel_size
# Get stride
if param.HasField("stride_h") or param.HasField("stride_w"):
AddArgument(caffe_op, "stride_h", param.stride_h)
AddArgument(caffe_op, "stride_w", param.stride_w)
else:
AddArgument(caffe_op, "stride", stride)
# Get pad
if param.HasField("pad_h") or param.HasField("pad_w"):
if param.pad_h == param.pad_w:
AddArgument(caffe_op, "pad", param.pad_h)
else:
AddArgument(caffe_op, "pad_t", param.pad_h)
AddArgument(caffe_op, "pad_b", param.pad_h)
AddArgument(caffe_op, "pad_l", param.pad_w)
AddArgument(caffe_op, "pad_r", param.pad_w)
else:
AddArgument(caffe_op, "pad", pad)
# Get kernel
if param.HasField("kernel_h") or param.HasField("kernel_w"):
AddArgument(caffe_op, "kernel_h", param.kernel_h)
AddArgument(caffe_op, "kernel_w", param.kernel_w)
else:
AddArgument(caffe_op, "kernel", kernel)
@TranslatorRegistry.Register("Convolution3D")
def TranslateConvNd(layer, pretrained_blobs, is_test, **kwargs):
param = layer.convolution3d_param
caffe_op = BaseTranslate(layer, "Conv")
output = caffe_op.output[0]
caffe_op.input.append(output + '_w')
AddArgument(
caffe_op,
"kernels",
[param.kernel_depth, param.kernel_size, param.kernel_size])
AddArgument(
caffe_op,
"strides",
[param.temporal_stride, param.stride, param.stride])
temporal_pad = 0
spatial_pad = 0
if hasattr(param, 'temporal_pad'):
temporal_pad = param.temporal_pad
if hasattr(param, 'pad'):
spatial_pad = param.pad
AddArgument(caffe_op, "pads", [temporal_pad, spatial_pad, spatial_pad] * 2)
# weight
params = [
utils.NumpyArrayToCaffe2Tensor(pretrained_blobs[0], output + '_w')]
# bias
if len(pretrained_blobs) == 2:
caffe_op.input.append(output + '_b')
params.append(
utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), output + '_b'))
return caffe_op, params
@TranslatorRegistry.Register("Convolution")
def TranslateConv(layer, pretrained_blobs, is_test, **kwargs):
param = layer.convolution_param
caffe_op = BaseTranslate(layer, "Conv")
output = caffe_op.output[0]
caffe_op.input.append(output + '_w')
_TranslateStridePadKernelHelper(param, caffe_op)
# weight
params = [
utils.NumpyArrayToCaffe2Tensor(pretrained_blobs[0], output + '_w')]
# bias
if len(pretrained_blobs) == 2:
caffe_op.input.append(output + '_b')
params.append(
utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), output + '_b'))
# Group convolution option
if param.group != 1:
AddArgument(caffe_op, "group", param.group)
# Get dilation - not tested. If you have a model and this checks out,
# please provide a test and uncomment this.
if len(param.dilation) > 0:
if len(param.dilation) == 1:
AddArgument(caffe_op, "dilation", param.dilation[0])
elif len(param.dilation) == 2:
AddArgument(caffe_op, "dilation_h", param.dilation[0])
AddArgument(caffe_op, "dilation_w", param.dilation[1])
return caffe_op, params
@TranslatorRegistry.Register("Deconvolution")
def TranslateDeconv(layer, pretrained_blobs, is_test, **kwargs):
param = layer.convolution_param
if param.group > 1:
raise NotImplementedError(
"Translator currently does not support group deconvolution."
)
caffe_op = BaseTranslate(layer, "ConvTranspose")
output = caffe_op.output[0]
_TranslateStridePadKernelHelper(param, caffe_op)
caffe_op.input.extend([output + '_w'])
AddArgument(caffe_op, "order", "NCHW")
weight = utils.NumpyArrayToCaffe2Tensor(pretrained_blobs[0], output + '_w')
if param.bias_term:
bias = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), output + '_b'
)
caffe_op.input.extend([output + '_b'])
return caffe_op, [weight, bias]
else:
return caffe_op, [weight]
@TranslatorRegistry.Register("Crop")
def TranslateCrop(layer, pretrained_blobs, is_test, **kwargs):
net, net_params, input_dims = kwargs['net'], kwargs['net_params'],
kwargs['input_dims']
n, c, h, w = input_dims
dummy_input = np.random.randn(n, c, h, w).astype(np.float32)
dim_map = _GetBlobDimMap(net, net_params, dummy_input)
param = layer.crop_param
axis, offsets = param.axis, param.offset
caffe_op = BaseTranslate(layer, "Slice")
input_1 = caffe_op.input[1]
input_1_dim = dim_map[input_1]
starts, ends = [], []
dims = len(dim_map[input_1])
assert len(offsets) == 1, 'Caffe Translator for Crop only works for offset \
of 1 for now'
for _ in range(axis):
starts.append(0)
ends.append(-1)
end_offset = [int(offsets[0] + input_1_dim[i]) for i in range(axis, dims)]
ends.extend(end_offset)
starts.extend([offsets[0]] * len(end_offset))
op = caffe2_pb2.OperatorDef()
op.input.extend([caffe_op.input[0]])
op.output.extend(caffe_op.output)
op.arg.extend(caffe_op.arg)
op.type = caffe_op.type
AddArgument(op, "starts", starts)
AddArgument(op, "ends", ends)
return op, []
@TranslatorRegistry.Register("ReLU")
def TranslateRelu(layer, pretrained_blobs, is_test, **kwargs):
return BaseTranslate(layer, "Relu"), []
@TranslatorRegistry.Register("Pooling")
def TranslatePool(layer, pretrained_blobs, is_test, **kwargs):
param = layer.pooling_param
if param.pool == caffe_pb2.PoolingParameter.MAX:
caffe_op = BaseTranslate(layer, "MaxPool")
elif param.pool == caffe_pb2.PoolingParameter.AVE:
caffe_op = BaseTranslate(layer, "AveragePool")
_TranslateStridePadKernelHelper(param, caffe_op)
AddArgument(caffe_op, "order", "NCHW")
try:
# In the Facebook port of Caffe, a torch_pooling field was added to
# map the pooling computation of Torch. Essentially, it uses
# floor((height + 2 * padding - kernel) / stride) + 1
# instead of
# ceil((height + 2 * padding - kernel) / stride) + 1
# which is Caffe's version.
# Torch pooling is actually the same as Caffe2 pooling, so we don't
# need to do anything.
is_torch_pooling = param.torch_pooling
except AttributeError:
is_torch_pooling = False
if not is_torch_pooling:
AddArgument(caffe_op, "legacy_pad",
caffe2_legacy_pb2.CAFFE_LEGACY_POOLING)
if param.global_pooling:
AddArgument(caffe_op, "global_pooling", 1)
return caffe_op, []
@TranslatorRegistry.Register("Pooling3D")
def TranslatePool3D(layer, pretrained_blobs, is_test, **kwargs):
param = layer.pooling3d_param
if param.pool == caffe_pb2.Pooling3DParameter.MAX:
caffe_op = BaseTranslate(layer, "MaxPool")
elif param.pool == caffe_pb2.Pooling3DParameter.AVE:
caffe_op = BaseTranslate(layer, "AveragePool")
AddArgument(caffe_op, "order", "NCHW")
AddArgument(
caffe_op,
"kernels",
[param.kernel_depth, param.kernel_size, param.kernel_size])
AddArgument(
caffe_op,
"strides",
[param.temporal_stride, param.stride, param.stride])
temporal_pad = 0
spatial_pad = 0
if hasattr(param, 'temporal_pad'):
temporal_pad = param.temporal_pad
if hasattr(param, 'pad'):
spatial_pad = param.pad
AddArgument(caffe_op, "pads", [temporal_pad, spatial_pad, spatial_pad] * 2)
return caffe_op, []
@TranslatorRegistry.Register("LRN")
def TranslateLRN(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "LRN")
caffe_op.output.extend(['_' + caffe_op.output[0] + '_scale'])
param = layer.lrn_param
if param.norm_region != caffe_pb2.LRNParameter.ACROSS_CHANNELS:
raise ValueError(
"Does not support norm region other than across channels.")
AddArgument(caffe_op, "size", int(param.local_size))
AddArgument(caffe_op, "alpha", float(param.alpha))
AddArgument(caffe_op, "beta", float(param.beta))
AddArgument(caffe_op, "bias", float(param.k))
AddArgument(caffe_op, "order", "NCHW")
return caffe_op, []
@TranslatorRegistry.Register("InnerProduct")
def TranslateInnerProduct(layer, pretrained_blobs, is_test, **kwargs):
param = layer.inner_product_param
try:
if param.axis != 1 or param.transpose:
raise ValueError(
"We don't have testing case for non-default axis and transpose "
"cases yet so we are disabling it for now. If you have a model "
"with this, please do send us your model for us to update this "
"support, and you are more than welcome to send a PR for this.")
except AttributeError:
# We might be using an historic Caffe protobuf that does not have axis
# and transpose arguments, so we will silently pass.
pass
caffe_op = BaseTranslate(layer, "FC")
output = caffe_op.output[0]
caffe_op.input.extend([output + '_w', output + '_b'])
# To provide the old-style 4-dimensional blob (1, 1, dim_output, dim_input)
# case, we always explicitly reshape the pretrained blob.
if pretrained_blobs[0].ndim not in [2, 4]:
raise ValueError("Unexpected weight ndim.")
if (pretrained_blobs[0].ndim == 4 and
list(pretrained_blobs[0].shape[:2]) != [1, 1]):
raise ValueError(
"If pretrained blob has 4 dims (old-style Caffe), the first two "
"should be of value 1, but I got " + str(pretrained_blobs[0].shape))
weight = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[0].reshape(-1, pretrained_blobs[0].shape[-1]),
output + '_w'
)
bias = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), output + '_b'
)
return caffe_op, [weight, bias]
@TranslatorRegistry.Register("Dropout")
def TranslateDropout(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Dropout")
caffe_op.output.extend(['_' + caffe_op.output[0] + '_mask'])
param = layer.dropout_param
AddArgument(caffe_op, "ratio", param.dropout_ratio)
if (is_test):
AddArgument(caffe_op, "is_test", 1)
return caffe_op, []
@TranslatorRegistry.Register("Softmax")
def TranslateSoftmax(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Softmax")
return caffe_op, []
@TranslatorRegistry.Register("SoftmaxWithLoss")
def TranslateSoftmaxWithLoss(layer, pretrained_blobs, is_test, **kwargs):
softmax_op = core.CreateOperator(
"Softmax", [layer.bottom[0]],
layer.bottom[0] + "_translator_autogen_softmax")
xent_op = core.CreateOperator(
"LabelCrossEntropy",
[softmax_op.output[0], layer.bottom[1]],
layer.bottom[0] + "_translator_autogen_xent")
loss_op = core.CreateOperator(
"AveragedLoss",
xent_op.output[0],
layer.top[0])
return [softmax_op, xent_op, loss_op], []
@TranslatorRegistry.Register("Accuracy")
def TranslateAccuracy(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Accuracy")
if layer.accuracy_param.top_k != 1:
AddArgument(caffe_op, "top_k", layer.accuracy_param.top_k)
return caffe_op, []
@TranslatorRegistry.Register("Concat")
def TranslateConcat(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Concat")
caffe_op.output.extend(['_' + caffe_op.output[0] + '_dims'])
AddArgument(caffe_op, "order", "NCHW")
return caffe_op, []
@TranslatorRegistry.Register("TanH")
def TranslateTanH(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Tanh")
return caffe_op, []
@TranslatorRegistry.Register("InstanceNorm")
def TranslateInstanceNorm(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "InstanceNorm")
output = caffe_op.output[0]
weight = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[0].flatten(), output + '_w')
bias = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), output + '_b')
caffe_op.input.extend([output + '_w', output + '_b'])
AddArgument(caffe_op, "order", "NCHW")
return caffe_op, [weight, bias]
@TranslatorRegistry.Register("BatchNorm")
def TranslateBatchNorm(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "SpatialBN")
output = caffe_op.output[0]
param = layer.batch_norm_param
AddArgument(caffe_op, "is_test", is_test)
AddArgument(caffe_op, "epsilon", param.eps)
AddArgument(caffe_op, "order", "NCHW")
caffe_op.input.extend(
[output + "_scale",
output + "_bias",
output + "_mean",
output + "_var"])
if not is_test:
caffe_op.output.extend(
[output + "_mean",
output + "_var",
output + "_saved_mean",
output + "_saved_var"])
n_channels = pretrained_blobs[0].shape[0]
if pretrained_blobs[2][0] != 0:
mean = utils.NumpyArrayToCaffe2Tensor(
(1. / pretrained_blobs[2][0]) * pretrained_blobs[0],
output + '_mean')
var = utils.NumpyArrayToCaffe2Tensor(
(1. / pretrained_blobs[2][0]) * pretrained_blobs[1],
output + '_var')
else:
raise RuntimeError("scalar is zero.")
pretrained_blobs[2][0] = 1
pretrained_blobs[2] = np.tile(pretrained_blobs[2], (n_channels, ))
scale = utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[2],
output + '_scale')
bias = utils.NumpyArrayToCaffe2Tensor(
np.zeros_like(pretrained_blobs[2]),
output + '_bias')
return caffe_op, [scale, bias, mean, var]
@TranslatorRegistry.Register("Eltwise")
def TranslateElementWise(layer, pretrained_blobs, is_test, **kwargs):
param = layer.eltwise_param
# TODO(jiayq): if we have a protobuf that uses this, lift this constraint
# and verify that we can correctly translate.
if len(param.coeff) or param.operation != 1:
raise RuntimeError("This eltwise layer is not yet supported.")
caffe_op = BaseTranslate(layer, "Sum")
return caffe_op, []
@TranslatorRegistry.Register("Scale")
def TranslateScale(layer, pretrained_blobs, is_test, **kwargs):
mul_op = BaseTranslate(layer, "Mul")
scale_param = layer.scale_param
AddArgument(mul_op, "axis", scale_param.axis)
AddArgument(mul_op, "broadcast", True)
if len(mul_op.input) == 1:
# the scale parameter is in pretrained blobs
if scale_param.num_axes != 1:
raise RuntimeError("This path has not been verified yet.")
output = mul_op.output[0]
mul_op_param = output + '_w'
mul_op.input.append(mul_op_param)
weights = []
weights.append(utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[0].flatten(), mul_op_param))
add_op = None
if len(pretrained_blobs) == 1:
# No bias-term in Scale layer
pass
elif len(pretrained_blobs) == 2:
# Caffe Scale layer supports a bias term such that it computes
# (scale_param * X + bias), whereas Caffe2 Mul op doesn't.
# Include a separate Add op for the bias followed by Mul.
add_op = copy.deepcopy(mul_op)
add_op.type = "Add"
add_op_param = output + '_b'
internal_blob = output + "_internal"
del mul_op.output[:]
mul_op.output.append(internal_blob)
del add_op.input[:]
add_op.input.append(internal_blob)
add_op.input.append(add_op_param)
weights.append(utils.NumpyArrayToCaffe2Tensor(
pretrained_blobs[1].flatten(), add_op_param))
else:
raise RuntimeError("Unexpected number of pretrained blobs in Scale")
caffe_ops = [mul_op]
if add_op:
caffe_ops.append(add_op)
assert len(caffe_ops) == len(weights)
return caffe_ops, weights
elif len(mul_op.input) == 2:
# TODO(jiayq): find a protobuf that uses this and verify.
raise RuntimeError("This path has not been verified yet.")
else:
raise RuntimeError("Unexpected number of inputs.")
@TranslatorRegistry.Register("Reshape")
def TranslateReshape(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Reshape")
caffe_op.output.append("_" + caffe_op.input[0] + "_dims")
reshape_param = layer.reshape_param
AddArgument(caffe_op, 'shape', reshape_param.shape.dim)
return caffe_op, []
@TranslatorRegistry.Register("Flatten")
def TranslateFlatten(layer, pretrained_blobs, is_test, **kwargs):
param = layer.flatten_param
if param.end_axis != -1:
raise NotImplementedError("flatten_param.end_axis not supported yet.")
if param.axis == 0:
caffe_op = BaseTranslate(layer, "FlattenToVec")
elif param.axis == 1:
caffe_op = BaseTranslate(layer, "Flatten")
else:
# This could be a Reshape op, but dim size is not known here.
raise NotImplementedError(
"Not supported yet for flatten_param.axis {}.".format(param.axis))
return caffe_op, []
@TranslatorRegistry.Register("Sigmoid")
def TranslateSigmoid(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "Sigmoid")
return caffe_op, []
@TranslatorRegistry.Register("ROIPooling")
def TranslateROIPooling(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "RoIPool")
AddArgument(caffe_op, "order", "NCHW")
if is_test:
AddArgument(caffe_op, "is_test", is_test)
else:
# Only used for gradient computation
caffe_op.output.append(caffe_op.output[0] + '_argmaxes')
param = layer.roi_pooling_param
if param.HasField('pooled_h'):
AddArgument(caffe_op, 'pooled_h', param.pooled_h)
if param.HasField('pooled_w'):
AddArgument(caffe_op, 'pooled_w', param.pooled_w)
if param.HasField('spatial_scale'):
AddArgument(caffe_op, 'spatial_scale', param.spatial_scale)
return caffe_op, []
@TranslatorRegistry.Register("PReLU")
def TranslatePRelu(layer, pretrained_blobs, is_test, **kwargs):
caffe_op = BaseTranslate(layer, "PRelu")
output = caffe_op.output[0]
caffe_op.input.extend([output + '_Slope'])
slope = utils.NumpyArrayToCaffe2Tensor(pretrained_blobs[0], output + '_Slope')
return caffe_op, [slope]
@TranslatorRegistry.Register("Reduction")
def TranslateReduction(layer, pretrained_blobs, is_test, **kwargs):
param = layer.reduction_param
if param.operation == caffe_pb2.ReductionParameter.SUM:
caffe_op = BaseTranslate(layer, "ReduceBackSum")
elif param.operation == caffe_pb2.ReductionParameter.MEAN:
caffe_op = BaseTranslate(layer, "ReduceBackMean")
else:
raise NotImplementedError("Not yet supported")
if param.axis > 0:
# We can't figure out the number of dims to reduce from positive axis
# for back reduction since the shape info is not known here.
raise NotImplementedError("Not yet supported")
num_reduce_dim = -param.axis
AddArgument(caffe_op, "num_reduce_dim", num_reduce_dim)
return caffe_op, []
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Utilitity to convert pretrained caffe models to Caffe2 models.")
parser.add_argument("prototext", help="Caffe prototext.")
parser.add_argument("caffemodel", help="Caffe trained model.")
parser.add_argument("--init_net", help="Caffe2 initialization net.",
default="init_net.pb")
parser.add_argument("--predict_net", help="Caffe2 prediction net.",
default="predict_net.pb")
parser.add_argument("--remove_legacy_pad", help="Remove legacy pad \
(Only works for nets with one input blob)",
action="store_true",
default=False)
parser.add_argument("--input_dims", help="Dimension of input blob", nargs='+',
type=int, default=[])
args = parser.parse_args()
caffenet = caffe_pb2.NetParameter()
caffenet_pretrained = caffe_pb2.NetParameter()
input_proto = args.prototext
input_caffemodel = args.caffemodel
output_init_net = args.init_net
output_predict_net = args.predict_net
text_format.Merge(
open(input_proto, 'r').read(), caffenet
)
caffenet_pretrained.ParseFromString(
open(input_caffemodel, 'rb').read()
)
net, pretrained_params = TranslateModel(
caffenet, caffenet_pretrained, is_test=True,
remove_legacy_pad=args.remove_legacy_pad,
input_dims=args.input_dims
)
# Assume there is one input and one output
external_input = net.op[0].input[0]
external_output = net.op[-1].output[0]
net.external_input.extend([external_input])
net.external_input.extend([param.name for param in pretrained_params.protos])
net.external_output.extend([external_output])
init_net = ConvertTensorProtosToInitNet(pretrained_params, external_input)
with open(output_predict_net, 'wb') as f:
f.write(net.SerializeToString())
with open(output_predict_net + 'txt', 'w') as f:
f.write(str(net))
with open(output_init_net, 'wb') as f:
f.write(init_net.SerializeToString())
| apache-2.0 |
linked67/p2pool-kryptonite | p2pool/bitcoin/sha256.py | 285 | 3084 | from __future__ import division
import struct
k = [
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2,
]
def process(state, chunk):
def rightrotate(x, n):
return (x >> n) | (x << 32 - n) % 2**32
w = list(struct.unpack('>16I', chunk))
for i in xrange(16, 64):
s0 = rightrotate(w[i-15], 7) ^ rightrotate(w[i-15], 18) ^ (w[i-15] >> 3)
s1 = rightrotate(w[i-2], 17) ^ rightrotate(w[i-2], 19) ^ (w[i-2] >> 10)
w.append((w[i-16] + s0 + w[i-7] + s1) % 2**32)
a, b, c, d, e, f, g, h = start_state = struct.unpack('>8I', state)
for k_i, w_i in zip(k, w):
t1 = (h + (rightrotate(e, 6) ^ rightrotate(e, 11) ^ rightrotate(e, 25)) + ((e & f) ^ (~e & g)) + k_i + w_i) % 2**32
a, b, c, d, e, f, g, h = (
(t1 + (rightrotate(a, 2) ^ rightrotate(a, 13) ^ rightrotate(a, 22)) + ((a & b) ^ (a & c) ^ (b & c))) % 2**32,
a, b, c, (d + t1) % 2**32, e, f, g,
)
return struct.pack('>8I', *((x + y) % 2**32 for x, y in zip(start_state, [a, b, c, d, e, f, g, h])))
initial_state = struct.pack('>8I', 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19)
class sha256(object):
digest_size = 256//8
block_size = 512//8
def __init__(self, data='', _=(initial_state, '', 0)):
self.state, self.buf, self.length = _
self.update(data)
def update(self, data):
state = self.state
buf = self.buf + data
chunks = [buf[i:i + self.block_size] for i in xrange(0, len(buf) + 1, self.block_size)]
for chunk in chunks[:-1]:
state = process(state, chunk)
self.state = state
self.buf = chunks[-1]
self.length += 8*len(data)
def copy(self, data=''):
return self.__class__(data, (self.state, self.buf, self.length))
def digest(self):
state = self.state
buf = self.buf + '\x80' + '\x00'*((self.block_size - 9 - len(self.buf)) % self.block_size) + struct.pack('>Q', self.length)
for chunk in [buf[i:i + self.block_size] for i in xrange(0, len(buf), self.block_size)]:
state = process(state, chunk)
return state
def hexdigest(self):
return self.digest().encode('hex')
| gpl-3.0 |
Ormod/Diamond | src/collectors/interrupt/soft.py | 54 | 2024 | # coding=utf-8
"""
The SoftInterruptCollector collects metrics on software interrupts from
/proc/stat
#### Dependencies
* /proc/stat
"""
import platform
import os
import diamond.collector
# Detect the architecture of the system
# and set the counters for MAX_VALUES
# appropriately. Otherwise, rolling over
# counters will cause incorrect or
# negative values.
if platform.architecture()[0] == '64bit':
counter = (2 ** 64) - 1
else:
counter = (2 ** 32) - 1
class SoftInterruptCollector(diamond.collector.Collector):
PROC = '/proc/stat'
def get_default_config_help(self):
config_help = super(SoftInterruptCollector,
self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(SoftInterruptCollector, self).get_default_config()
config.update({
'path': 'softirq'
})
return config
def collect(self):
"""
Collect interrupt data
"""
if not os.access(self.PROC, os.R_OK):
return False
# Open PROC file
file = open(self.PROC, 'r')
# Get data
for line in file:
if not line.startswith('softirq'):
continue
data = line.split()
metric_name = 'total'
metric_value = int(data[1])
metric_value = int(self.derivative(
metric_name,
long(metric_value), counter))
self.publish(metric_name, metric_value)
for i in range(2, len(data)):
metric_name = str(i - 2)
metric_value = int(data[i])
metric_value = int(self.derivative(
metric_name,
long(metric_value), counter))
self.publish(metric_name, metric_value)
# Close file
file.close()
| mit |
louietsai/python-for-android | python3-alpha/python3-src/Lib/idlelib/configHandler.py | 48 | 28730 | """Provides access to stored IDLE configuration information.
Refer to the comments at the beginning of config-main.def for a description of
the available configuration files and the design implemented to update user
configuration information. In particular, user configuration choices which
duplicate the defaults will be removed from the user's configuration files,
and if a file becomes empty, it will be deleted.
The contents of the user files may be altered using the Options/Configure IDLE
menu to access the configuration GUI (configDialog.py), or manually.
Throughout this module there is an emphasis on returning useable defaults
when a problem occurs in returning a requested configuration value back to
idle. This is to allow IDLE to continue to function in spite of errors in
the retrieval of config information. When a default is returned instead of
a requested config value, a message is printed to stderr to aid in
configuration problem notification and resolution.
"""
import os
import sys
from idlelib import macosxSupport
from configparser import ConfigParser, NoOptionError, NoSectionError
class InvalidConfigType(Exception): pass
class InvalidConfigSet(Exception): pass
class InvalidFgBg(Exception): pass
class InvalidTheme(Exception): pass
class IdleConfParser(ConfigParser):
"""
A ConfigParser specialised for idle configuration file handling
"""
def __init__(self, cfgFile, cfgDefaults=None):
"""
cfgFile - string, fully specified configuration file name
"""
self.file=cfgFile
ConfigParser.__init__(self,defaults=cfgDefaults)
def Get(self, section, option, type=None, default=None, raw=False):
"""
Get an option value for given section/option or return default.
If type is specified, return as type.
"""
if not self.has_option(section, option):
return default
if type=='bool':
return self.getboolean(section, option)
elif type=='int':
return self.getint(section, option)
else:
return self.get(section, option, raw=raw)
def GetOptionList(self,section):
"""
Get an option list for given section
"""
if self.has_section(section):
return self.options(section)
else: #return a default value
return []
def Load(self):
"""
Load the configuration file from disk
"""
self.read(self.file)
class IdleUserConfParser(IdleConfParser):
"""
IdleConfigParser specialised for user configuration handling.
"""
def AddSection(self,section):
"""
if section doesn't exist, add it
"""
if not self.has_section(section):
self.add_section(section)
def RemoveEmptySections(self):
"""
remove any sections that have no options
"""
for section in self.sections():
if not self.GetOptionList(section):
self.remove_section(section)
def IsEmpty(self):
"""
Remove empty sections and then return 1 if parser has no sections
left, else return 0.
"""
self.RemoveEmptySections()
if self.sections():
return 0
else:
return 1
def RemoveOption(self,section,option):
"""
If section/option exists, remove it.
Returns 1 if option was removed, 0 otherwise.
"""
if self.has_section(section):
return self.remove_option(section,option)
def SetOption(self,section,option,value):
"""
Sets option to value, adding section if required.
Returns 1 if option was added or changed, otherwise 0.
"""
if self.has_option(section,option):
if self.get(section,option)==value:
return 0
else:
self.set(section,option,value)
return 1
else:
if not self.has_section(section):
self.add_section(section)
self.set(section,option,value)
return 1
def RemoveFile(self):
"""
Removes the user config file from disk if it exists.
"""
if os.path.exists(self.file):
os.remove(self.file)
def Save(self):
"""Update user configuration file.
Remove empty sections. If resulting config isn't empty, write the file
to disk. If config is empty, remove the file from disk if it exists.
"""
if not self.IsEmpty():
fname = self.file
try:
cfgFile = open(fname, 'w')
except IOError:
os.unlink(fname)
cfgFile = open(fname, 'w')
self.write(cfgFile)
else:
self.RemoveFile()
class IdleConf:
"""
holds config parsers for all idle config files:
default config files
(idle install dir)/config-main.def
(idle install dir)/config-extensions.def
(idle install dir)/config-highlight.def
(idle install dir)/config-keys.def
user config files
(user home dir)/.idlerc/config-main.cfg
(user home dir)/.idlerc/config-extensions.cfg
(user home dir)/.idlerc/config-highlight.cfg
(user home dir)/.idlerc/config-keys.cfg
"""
def __init__(self):
self.defaultCfg={}
self.userCfg={}
self.cfg={}
self.CreateConfigHandlers()
self.LoadCfgFiles()
#self.LoadCfg()
def CreateConfigHandlers(self):
"""
set up a dictionary of config parsers for default and user
configurations respectively
"""
#build idle install path
if __name__ != '__main__': # we were imported
idleDir=os.path.dirname(__file__)
else: # we were exec'ed (for testing only)
idleDir=os.path.abspath(sys.path[0])
userDir=self.GetUserCfgDir()
configTypes=('main','extensions','highlight','keys')
defCfgFiles={}
usrCfgFiles={}
for cfgType in configTypes: #build config file names
defCfgFiles[cfgType]=os.path.join(idleDir,'config-'+cfgType+'.def')
usrCfgFiles[cfgType]=os.path.join(userDir,'config-'+cfgType+'.cfg')
for cfgType in configTypes: #create config parsers
self.defaultCfg[cfgType]=IdleConfParser(defCfgFiles[cfgType])
self.userCfg[cfgType]=IdleUserConfParser(usrCfgFiles[cfgType])
def GetUserCfgDir(self):
"""
Creates (if required) and returns a filesystem directory for storing
user config files.
"""
cfgDir = '.idlerc'
userDir = os.path.expanduser('~')
if userDir != '~': # expanduser() found user home dir
if not os.path.exists(userDir):
warn = ('\n Warning: os.path.expanduser("~") points to\n '+
userDir+',\n but the path does not exist.\n')
try:
sys.stderr.write(warn)
except IOError:
pass
userDir = '~'
if userDir == "~": # still no path to home!
# traditionally IDLE has defaulted to os.getcwd(), is this adequate?
userDir = os.getcwd()
userDir = os.path.join(userDir, cfgDir)
if not os.path.exists(userDir):
try:
os.mkdir(userDir)
except (OSError, IOError):
warn = ('\n Warning: unable to create user config directory\n'+
userDir+'\n Check path and permissions.\n Exiting!\n\n')
sys.stderr.write(warn)
raise SystemExit
return userDir
def GetOption(self, configType, section, option, default=None, type=None,
warn_on_default=True, raw=False):
"""
Get an option value for given config type and given general
configuration section/option or return a default. If type is specified,
return as type. Firstly the user configuration is checked, with a
fallback to the default configuration, and a final 'catch all'
fallback to a useable passed-in default if the option isn't present in
either the user or the default configuration.
configType must be one of ('main','extensions','highlight','keys')
If a default is returned, and warn_on_default is True, a warning is
printed to stderr.
"""
if self.userCfg[configType].has_option(section,option):
return self.userCfg[configType].Get(section, option,
type=type, raw=raw)
elif self.defaultCfg[configType].has_option(section,option):
return self.defaultCfg[configType].Get(section, option,
type=type, raw=raw)
else: #returning default, print warning
if warn_on_default:
warning = ('\n Warning: configHandler.py - IdleConf.GetOption -\n'
' problem retrieving configuration option %r\n'
' from section %r.\n'
' returning default value: %r\n' %
(option, section, default))
try:
sys.stderr.write(warning)
except IOError:
pass
return default
def SetOption(self, configType, section, option, value):
"""In user's config file, set section's option to value.
"""
self.userCfg[configType].SetOption(section, option, value)
def GetSectionList(self, configSet, configType):
"""
Get a list of sections from either the user or default config for
the given config type.
configSet must be either 'user' or 'default'
configType must be one of ('main','extensions','highlight','keys')
"""
if not (configType in ('main','extensions','highlight','keys')):
raise InvalidConfigType('Invalid configType specified')
if configSet == 'user':
cfgParser=self.userCfg[configType]
elif configSet == 'default':
cfgParser=self.defaultCfg[configType]
else:
raise InvalidConfigSet('Invalid configSet specified')
return cfgParser.sections()
def GetHighlight(self, theme, element, fgBg=None):
"""
return individual highlighting theme elements.
fgBg - string ('fg'or'bg') or None, if None return a dictionary
containing fg and bg colours (appropriate for passing to Tkinter in,
e.g., a tag_config call), otherwise fg or bg colour only as specified.
"""
if self.defaultCfg['highlight'].has_section(theme):
themeDict=self.GetThemeDict('default',theme)
else:
themeDict=self.GetThemeDict('user',theme)
fore=themeDict[element+'-foreground']
if element=='cursor': #there is no config value for cursor bg
back=themeDict['normal-background']
else:
back=themeDict[element+'-background']
highlight={"foreground": fore,"background": back}
if not fgBg: #return dict of both colours
return highlight
else: #return specified colour only
if fgBg == 'fg':
return highlight["foreground"]
if fgBg == 'bg':
return highlight["background"]
else:
raise InvalidFgBg('Invalid fgBg specified')
def GetThemeDict(self,type,themeName):
"""
type - string, 'default' or 'user' theme type
themeName - string, theme name
Returns a dictionary which holds {option:value} for each element
in the specified theme. Values are loaded over a set of ultimate last
fallback defaults to guarantee that all theme elements are present in
a newly created theme.
"""
if type == 'user':
cfgParser=self.userCfg['highlight']
elif type == 'default':
cfgParser=self.defaultCfg['highlight']
else:
raise InvalidTheme('Invalid theme type specified')
#foreground and background values are provded for each theme element
#(apart from cursor) even though all these values are not yet used
#by idle, to allow for their use in the future. Default values are
#generally black and white.
theme={ 'normal-foreground':'#000000',
'normal-background':'#ffffff',
'keyword-foreground':'#000000',
'keyword-background':'#ffffff',
'builtin-foreground':'#000000',
'builtin-background':'#ffffff',
'comment-foreground':'#000000',
'comment-background':'#ffffff',
'string-foreground':'#000000',
'string-background':'#ffffff',
'definition-foreground':'#000000',
'definition-background':'#ffffff',
'hilite-foreground':'#000000',
'hilite-background':'gray',
'break-foreground':'#ffffff',
'break-background':'#000000',
'hit-foreground':'#ffffff',
'hit-background':'#000000',
'error-foreground':'#ffffff',
'error-background':'#000000',
#cursor (only foreground can be set)
'cursor-foreground':'#000000',
#shell window
'stdout-foreground':'#000000',
'stdout-background':'#ffffff',
'stderr-foreground':'#000000',
'stderr-background':'#ffffff',
'console-foreground':'#000000',
'console-background':'#ffffff' }
for element in theme:
if not cfgParser.has_option(themeName,element):
#we are going to return a default, print warning
warning=('\n Warning: configHandler.py - IdleConf.GetThemeDict'
' -\n problem retrieving theme element %r'
'\n from theme %r.\n'
' returning default value: %r\n' %
(element, themeName, theme[element]))
try:
sys.stderr.write(warning)
except IOError:
pass
colour=cfgParser.Get(themeName,element,default=theme[element])
theme[element]=colour
return theme
def CurrentTheme(self):
"""
Returns the name of the currently active theme
"""
return self.GetOption('main','Theme','name',default='')
def CurrentKeys(self):
"""
Returns the name of the currently active key set
"""
return self.GetOption('main','Keys','name',default='')
def GetExtensions(self, active_only=True, editor_only=False, shell_only=False):
"""
Gets a list of all idle extensions declared in the config files.
active_only - boolean, if true only return active (enabled) extensions
"""
extns=self.RemoveKeyBindNames(
self.GetSectionList('default','extensions'))
userExtns=self.RemoveKeyBindNames(
self.GetSectionList('user','extensions'))
for extn in userExtns:
if extn not in extns: #user has added own extension
extns.append(extn)
if active_only:
activeExtns=[]
for extn in extns:
if self.GetOption('extensions', extn, 'enable', default=True,
type='bool'):
#the extension is enabled
if editor_only or shell_only:
if editor_only:
option = "enable_editor"
else:
option = "enable_shell"
if self.GetOption('extensions', extn,option,
default=True, type='bool',
warn_on_default=False):
activeExtns.append(extn)
else:
activeExtns.append(extn)
return activeExtns
else:
return extns
def RemoveKeyBindNames(self,extnNameList):
#get rid of keybinding section names
names=extnNameList
kbNameIndicies=[]
for name in names:
if name.endswith(('_bindings', '_cfgBindings')):
kbNameIndicies.append(names.index(name))
kbNameIndicies.sort()
kbNameIndicies.reverse()
for index in kbNameIndicies: #delete each keybinding section name
del(names[index])
return names
def GetExtnNameForEvent(self,virtualEvent):
"""
Returns the name of the extension that virtualEvent is bound in, or
None if not bound in any extension.
virtualEvent - string, name of the virtual event to test for, without
the enclosing '<< >>'
"""
extName=None
vEvent='<<'+virtualEvent+'>>'
for extn in self.GetExtensions(active_only=0):
for event in self.GetExtensionKeys(extn):
if event == vEvent:
extName=extn
return extName
def GetExtensionKeys(self,extensionName):
"""
returns a dictionary of the configurable keybindings for a particular
extension,as they exist in the dictionary returned by GetCurrentKeySet;
that is, where previously used bindings are disabled.
"""
keysName=extensionName+'_cfgBindings'
activeKeys=self.GetCurrentKeySet()
extKeys={}
if self.defaultCfg['extensions'].has_section(keysName):
eventNames=self.defaultCfg['extensions'].GetOptionList(keysName)
for eventName in eventNames:
event='<<'+eventName+'>>'
binding=activeKeys[event]
extKeys[event]=binding
return extKeys
def __GetRawExtensionKeys(self,extensionName):
"""
returns a dictionary of the configurable keybindings for a particular
extension, as defined in the configuration files, or an empty dictionary
if no bindings are found
"""
keysName=extensionName+'_cfgBindings'
extKeys={}
if self.defaultCfg['extensions'].has_section(keysName):
eventNames=self.defaultCfg['extensions'].GetOptionList(keysName)
for eventName in eventNames:
binding=self.GetOption('extensions',keysName,
eventName,default='').split()
event='<<'+eventName+'>>'
extKeys[event]=binding
return extKeys
def GetExtensionBindings(self,extensionName):
"""
Returns a dictionary of all the event bindings for a particular
extension. The configurable keybindings are returned as they exist in
the dictionary returned by GetCurrentKeySet; that is, where re-used
keybindings are disabled.
"""
bindsName=extensionName+'_bindings'
extBinds=self.GetExtensionKeys(extensionName)
#add the non-configurable bindings
if self.defaultCfg['extensions'].has_section(bindsName):
eventNames=self.defaultCfg['extensions'].GetOptionList(bindsName)
for eventName in eventNames:
binding=self.GetOption('extensions',bindsName,
eventName,default='').split()
event='<<'+eventName+'>>'
extBinds[event]=binding
return extBinds
def GetKeyBinding(self, keySetName, eventStr):
"""
returns the keybinding for a specific event.
keySetName - string, name of key binding set
eventStr - string, the virtual event we want the binding for,
represented as a string, eg. '<<event>>'
"""
eventName=eventStr[2:-2] #trim off the angle brackets
binding=self.GetOption('keys',keySetName,eventName,default='').split()
return binding
def GetCurrentKeySet(self):
result = self.GetKeySet(self.CurrentKeys())
if macosxSupport.runningAsOSXApp():
# We're using AquaTk, replace all keybingings that use the
# Alt key by ones that use the Option key because the former
# don't work reliably.
for k, v in result.items():
v2 = [ x.replace('<Alt-', '<Option-') for x in v ]
if v != v2:
result[k] = v2
return result
def GetKeySet(self,keySetName):
"""
Returns a dictionary of: all requested core keybindings, plus the
keybindings for all currently active extensions. If a binding defined
in an extension is already in use, that binding is disabled.
"""
keySet=self.GetCoreKeys(keySetName)
activeExtns=self.GetExtensions(active_only=1)
for extn in activeExtns:
extKeys=self.__GetRawExtensionKeys(extn)
if extKeys: #the extension defines keybindings
for event in extKeys:
if extKeys[event] in keySet.values():
#the binding is already in use
extKeys[event]='' #disable this binding
keySet[event]=extKeys[event] #add binding
return keySet
def IsCoreBinding(self,virtualEvent):
"""
returns true if the virtual event is bound in the core idle keybindings.
virtualEvent - string, name of the virtual event to test for, without
the enclosing '<< >>'
"""
return ('<<'+virtualEvent+'>>') in self.GetCoreKeys()
def GetCoreKeys(self, keySetName=None):
"""
returns the requested set of core keybindings, with fallbacks if
required.
Keybindings loaded from the config file(s) are loaded _over_ these
defaults, so if there is a problem getting any core binding there will
be an 'ultimate last resort fallback' to the CUA-ish bindings
defined here.
"""
keyBindings={
'<<copy>>': ['<Control-c>', '<Control-C>'],
'<<cut>>': ['<Control-x>', '<Control-X>'],
'<<paste>>': ['<Control-v>', '<Control-V>'],
'<<beginning-of-line>>': ['<Control-a>', '<Home>'],
'<<center-insert>>': ['<Control-l>'],
'<<close-all-windows>>': ['<Control-q>'],
'<<close-window>>': ['<Alt-F4>'],
'<<do-nothing>>': ['<Control-x>'],
'<<end-of-file>>': ['<Control-d>'],
'<<python-docs>>': ['<F1>'],
'<<python-context-help>>': ['<Shift-F1>'],
'<<history-next>>': ['<Alt-n>'],
'<<history-previous>>': ['<Alt-p>'],
'<<interrupt-execution>>': ['<Control-c>'],
'<<view-restart>>': ['<F6>'],
'<<restart-shell>>': ['<Control-F6>'],
'<<open-class-browser>>': ['<Alt-c>'],
'<<open-module>>': ['<Alt-m>'],
'<<open-new-window>>': ['<Control-n>'],
'<<open-window-from-file>>': ['<Control-o>'],
'<<plain-newline-and-indent>>': ['<Control-j>'],
'<<print-window>>': ['<Control-p>'],
'<<redo>>': ['<Control-y>'],
'<<remove-selection>>': ['<Escape>'],
'<<save-copy-of-window-as-file>>': ['<Alt-Shift-S>'],
'<<save-window-as-file>>': ['<Alt-s>'],
'<<save-window>>': ['<Control-s>'],
'<<select-all>>': ['<Alt-a>'],
'<<toggle-auto-coloring>>': ['<Control-slash>'],
'<<undo>>': ['<Control-z>'],
'<<find-again>>': ['<Control-g>', '<F3>'],
'<<find-in-files>>': ['<Alt-F3>'],
'<<find-selection>>': ['<Control-F3>'],
'<<find>>': ['<Control-f>'],
'<<replace>>': ['<Control-h>'],
'<<goto-line>>': ['<Alt-g>'],
'<<smart-backspace>>': ['<Key-BackSpace>'],
'<<newline-and-indent>>': ['<Key-Return> <Key-KP_Enter>'],
'<<smart-indent>>': ['<Key-Tab>'],
'<<indent-region>>': ['<Control-Key-bracketright>'],
'<<dedent-region>>': ['<Control-Key-bracketleft>'],
'<<comment-region>>': ['<Alt-Key-3>'],
'<<uncomment-region>>': ['<Alt-Key-4>'],
'<<tabify-region>>': ['<Alt-Key-5>'],
'<<untabify-region>>': ['<Alt-Key-6>'],
'<<toggle-tabs>>': ['<Alt-Key-t>'],
'<<change-indentwidth>>': ['<Alt-Key-u>'],
'<<del-word-left>>': ['<Control-Key-BackSpace>'],
'<<del-word-right>>': ['<Control-Key-Delete>']
}
if keySetName:
for event in keyBindings:
binding=self.GetKeyBinding(keySetName,event)
if binding:
keyBindings[event]=binding
else: #we are going to return a default, print warning
warning=('\n Warning: configHandler.py - IdleConf.GetCoreKeys'
' -\n problem retrieving key binding for event %r'
'\n from key set %r.\n'
' returning default value: %r\n' %
(event, keySetName, keyBindings[event]))
try:
sys.stderr.write(warning)
except IOError:
pass
return keyBindings
def GetExtraHelpSourceList(self,configSet):
"""Fetch list of extra help sources from a given configSet.
Valid configSets are 'user' or 'default'. Return a list of tuples of
the form (menu_item , path_to_help_file , option), or return the empty
list. 'option' is the sequence number of the help resource. 'option'
values determine the position of the menu items on the Help menu,
therefore the returned list must be sorted by 'option'.
"""
helpSources=[]
if configSet=='user':
cfgParser=self.userCfg['main']
elif configSet=='default':
cfgParser=self.defaultCfg['main']
else:
raise InvalidConfigSet('Invalid configSet specified')
options=cfgParser.GetOptionList('HelpFiles')
for option in options:
value=cfgParser.Get('HelpFiles',option,default=';')
if value.find(';')==-1: #malformed config entry with no ';'
menuItem='' #make these empty
helpPath='' #so value won't be added to list
else: #config entry contains ';' as expected
value=value.split(';')
menuItem=value[0].strip()
helpPath=value[1].strip()
if menuItem and helpPath: #neither are empty strings
helpSources.append( (menuItem,helpPath,option) )
helpSources.sort(key=lambda x: x[2])
return helpSources
def GetAllExtraHelpSourcesList(self):
"""
Returns a list of tuples containing the details of all additional help
sources configured, or an empty list if there are none. Tuples are of
the format returned by GetExtraHelpSourceList.
"""
allHelpSources=( self.GetExtraHelpSourceList('default')+
self.GetExtraHelpSourceList('user') )
return allHelpSources
def LoadCfgFiles(self):
"""
load all configuration files.
"""
for key in self.defaultCfg:
self.defaultCfg[key].Load()
self.userCfg[key].Load() #same keys
def SaveUserCfgFiles(self):
"""
write all loaded user configuration files back to disk
"""
for key in self.userCfg:
self.userCfg[key].Save()
idleConf=IdleConf()
### module test
if __name__ == '__main__':
def dumpCfg(cfg):
print('\n',cfg,'\n')
for key in cfg:
sections=cfg[key].sections()
print(key)
print(sections)
for section in sections:
options=cfg[key].options(section)
print(section)
print(options)
for option in options:
print(option, '=', cfg[key].Get(section,option))
dumpCfg(idleConf.defaultCfg)
dumpCfg(idleConf.userCfg)
print(idleConf.userCfg['main'].Get('Theme','name'))
#print idleConf.userCfg['highlight'].GetDefHighlight('Foo','normal')
| apache-2.0 |
ASlave2Audio/Restaurant-App | mingw/bin/lib/lib2to3/pytree.py | 325 | 29039 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""
Python parse tree definitions.
This is a very concrete parse tree; we need to keep every token and
even the comments and whitespace between tokens.
There's also a pattern matching implementation here.
"""
__author__ = "Guido van Rossum <[email protected]>"
import sys
import warnings
from StringIO import StringIO
HUGE = 0x7FFFFFFF # maximum repeat count, default max
_type_reprs = {}
def type_repr(type_num):
global _type_reprs
if not _type_reprs:
from .pygram import python_symbols
# printing tokens is possible but not as useful
# from .pgen2 import token // token.__dict__.items():
for name, val in python_symbols.__dict__.items():
if type(val) == int: _type_reprs[val] = name
return _type_reprs.setdefault(type_num, type_num)
class Base(object):
"""
Abstract base class for Node and Leaf.
This provides some default functionality and boilerplate using the
template pattern.
A node may be a subnode of at most one parent.
"""
# Default values for instance variables
type = None # int: token number (< 256) or symbol number (>= 256)
parent = None # Parent node pointer, or None
children = () # Tuple of subnodes
was_changed = False
was_checked = False
def __new__(cls, *args, **kwds):
"""Constructor that prevents Base from being instantiated."""
assert cls is not Base, "Cannot instantiate Base"
return object.__new__(cls)
def __eq__(self, other):
"""
Compare two nodes for equality.
This calls the method _eq().
"""
if self.__class__ is not other.__class__:
return NotImplemented
return self._eq(other)
__hash__ = None # For Py3 compatibility.
def __ne__(self, other):
"""
Compare two nodes for inequality.
This calls the method _eq().
"""
if self.__class__ is not other.__class__:
return NotImplemented
return not self._eq(other)
def _eq(self, other):
"""
Compare two nodes for equality.
This is called by __eq__ and __ne__. It is only called if the two nodes
have the same type. This must be implemented by the concrete subclass.
Nodes should be considered equal if they have the same structure,
ignoring the prefix string and other context information.
"""
raise NotImplementedError
def clone(self):
"""
Return a cloned (deep) copy of self.
This must be implemented by the concrete subclass.
"""
raise NotImplementedError
def post_order(self):
"""
Return a post-order iterator for the tree.
This must be implemented by the concrete subclass.
"""
raise NotImplementedError
def pre_order(self):
"""
Return a pre-order iterator for the tree.
This must be implemented by the concrete subclass.
"""
raise NotImplementedError
def set_prefix(self, prefix):
"""
Set the prefix for the node (see Leaf class).
DEPRECATED; use the prefix property directly.
"""
warnings.warn("set_prefix() is deprecated; use the prefix property",
DeprecationWarning, stacklevel=2)
self.prefix = prefix
def get_prefix(self):
"""
Return the prefix for the node (see Leaf class).
DEPRECATED; use the prefix property directly.
"""
warnings.warn("get_prefix() is deprecated; use the prefix property",
DeprecationWarning, stacklevel=2)
return self.prefix
def replace(self, new):
"""Replace this node with a new one in the parent."""
assert self.parent is not None, str(self)
assert new is not None
if not isinstance(new, list):
new = [new]
l_children = []
found = False
for ch in self.parent.children:
if ch is self:
assert not found, (self.parent.children, self, new)
if new is not None:
l_children.extend(new)
found = True
else:
l_children.append(ch)
assert found, (self.children, self, new)
self.parent.changed()
self.parent.children = l_children
for x in new:
x.parent = self.parent
self.parent = None
def get_lineno(self):
"""Return the line number which generated the invocant node."""
node = self
while not isinstance(node, Leaf):
if not node.children:
return
node = node.children[0]
return node.lineno
def changed(self):
if self.parent:
self.parent.changed()
self.was_changed = True
def remove(self):
"""
Remove the node from the tree. Returns the position of the node in its
parent's children before it was removed.
"""
if self.parent:
for i, node in enumerate(self.parent.children):
if node is self:
self.parent.changed()
del self.parent.children[i]
self.parent = None
return i
@property
def next_sibling(self):
"""
The node immediately following the invocant in their parent's children
list. If the invocant does not have a next sibling, it is None
"""
if self.parent is None:
return None
# Can't use index(); we need to test by identity
for i, child in enumerate(self.parent.children):
if child is self:
try:
return self.parent.children[i+1]
except IndexError:
return None
@property
def prev_sibling(self):
"""
The node immediately preceding the invocant in their parent's children
list. If the invocant does not have a previous sibling, it is None.
"""
if self.parent is None:
return None
# Can't use index(); we need to test by identity
for i, child in enumerate(self.parent.children):
if child is self:
if i == 0:
return None
return self.parent.children[i-1]
def leaves(self):
for child in self.children:
for x in child.leaves():
yield x
def depth(self):
if self.parent is None:
return 0
return 1 + self.parent.depth()
def get_suffix(self):
"""
Return the string immediately following the invocant node. This is
effectively equivalent to node.next_sibling.prefix
"""
next_sib = self.next_sibling
if next_sib is None:
return u""
return next_sib.prefix
if sys.version_info < (3, 0):
def __str__(self):
return unicode(self).encode("ascii")
class Node(Base):
"""Concrete implementation for interior nodes."""
def __init__(self,type, children,
context=None,
prefix=None,
fixers_applied=None):
"""
Initializer.
Takes a type constant (a symbol number >= 256), a sequence of
child nodes, and an optional context keyword argument.
As a side effect, the parent pointers of the children are updated.
"""
assert type >= 256, type
self.type = type
self.children = list(children)
for ch in self.children:
assert ch.parent is None, repr(ch)
ch.parent = self
if prefix is not None:
self.prefix = prefix
if fixers_applied:
self.fixers_applied = fixers_applied[:]
else:
self.fixers_applied = None
def __repr__(self):
"""Return a canonical string representation."""
return "%s(%s, %r)" % (self.__class__.__name__,
type_repr(self.type),
self.children)
def __unicode__(self):
"""
Return a pretty string representation.
This reproduces the input source exactly.
"""
return u"".join(map(unicode, self.children))
if sys.version_info > (3, 0):
__str__ = __unicode__
def _eq(self, other):
"""Compare two nodes for equality."""
return (self.type, self.children) == (other.type, other.children)
def clone(self):
"""Return a cloned (deep) copy of self."""
return Node(self.type, [ch.clone() for ch in self.children],
fixers_applied=self.fixers_applied)
def post_order(self):
"""Return a post-order iterator for the tree."""
for child in self.children:
for node in child.post_order():
yield node
yield self
def pre_order(self):
"""Return a pre-order iterator for the tree."""
yield self
for child in self.children:
for node in child.pre_order():
yield node
def _prefix_getter(self):
"""
The whitespace and comments preceding this node in the input.
"""
if not self.children:
return ""
return self.children[0].prefix
def _prefix_setter(self, prefix):
if self.children:
self.children[0].prefix = prefix
prefix = property(_prefix_getter, _prefix_setter)
def set_child(self, i, child):
"""
Equivalent to 'node.children[i] = child'. This method also sets the
child's parent attribute appropriately.
"""
child.parent = self
self.children[i].parent = None
self.children[i] = child
self.changed()
def insert_child(self, i, child):
"""
Equivalent to 'node.children.insert(i, child)'. This method also sets
the child's parent attribute appropriately.
"""
child.parent = self
self.children.insert(i, child)
self.changed()
def append_child(self, child):
"""
Equivalent to 'node.children.append(child)'. This method also sets the
child's parent attribute appropriately.
"""
child.parent = self
self.children.append(child)
self.changed()
class Leaf(Base):
"""Concrete implementation for leaf nodes."""
# Default values for instance variables
_prefix = "" # Whitespace and comments preceding this token in the input
lineno = 0 # Line where this token starts in the input
column = 0 # Column where this token tarts in the input
def __init__(self, type, value,
context=None,
prefix=None,
fixers_applied=[]):
"""
Initializer.
Takes a type constant (a token number < 256), a string value, and an
optional context keyword argument.
"""
assert 0 <= type < 256, type
if context is not None:
self._prefix, (self.lineno, self.column) = context
self.type = type
self.value = value
if prefix is not None:
self._prefix = prefix
self.fixers_applied = fixers_applied[:]
def __repr__(self):
"""Return a canonical string representation."""
return "%s(%r, %r)" % (self.__class__.__name__,
self.type,
self.value)
def __unicode__(self):
"""
Return a pretty string representation.
This reproduces the input source exactly.
"""
return self.prefix + unicode(self.value)
if sys.version_info > (3, 0):
__str__ = __unicode__
def _eq(self, other):
"""Compare two nodes for equality."""
return (self.type, self.value) == (other.type, other.value)
def clone(self):
"""Return a cloned (deep) copy of self."""
return Leaf(self.type, self.value,
(self.prefix, (self.lineno, self.column)),
fixers_applied=self.fixers_applied)
def leaves(self):
yield self
def post_order(self):
"""Return a post-order iterator for the tree."""
yield self
def pre_order(self):
"""Return a pre-order iterator for the tree."""
yield self
def _prefix_getter(self):
"""
The whitespace and comments preceding this token in the input.
"""
return self._prefix
def _prefix_setter(self, prefix):
self.changed()
self._prefix = prefix
prefix = property(_prefix_getter, _prefix_setter)
def convert(gr, raw_node):
"""
Convert raw node information to a Node or Leaf instance.
This is passed to the parser driver which calls it whenever a reduction of a
grammar rule produces a new complete node, so that the tree is build
strictly bottom-up.
"""
type, value, context, children = raw_node
if children or type in gr.number2symbol:
# If there's exactly one child, return that child instead of
# creating a new node.
if len(children) == 1:
return children[0]
return Node(type, children, context=context)
else:
return Leaf(type, value, context=context)
class BasePattern(object):
"""
A pattern is a tree matching pattern.
It looks for a specific node type (token or symbol), and
optionally for a specific content.
This is an abstract base class. There are three concrete
subclasses:
- LeafPattern matches a single leaf node;
- NodePattern matches a single node (usually non-leaf);
- WildcardPattern matches a sequence of nodes of variable length.
"""
# Defaults for instance variables
type = None # Node type (token if < 256, symbol if >= 256)
content = None # Optional content matching pattern
name = None # Optional name used to store match in results dict
def __new__(cls, *args, **kwds):
"""Constructor that prevents BasePattern from being instantiated."""
assert cls is not BasePattern, "Cannot instantiate BasePattern"
return object.__new__(cls)
def __repr__(self):
args = [type_repr(self.type), self.content, self.name]
while args and args[-1] is None:
del args[-1]
return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args)))
def optimize(self):
"""
A subclass can define this as a hook for optimizations.
Returns either self or another node with the same effect.
"""
return self
def match(self, node, results=None):
"""
Does this pattern exactly match a node?
Returns True if it matches, False if not.
If results is not None, it must be a dict which will be
updated with the nodes matching named subpatterns.
Default implementation for non-wildcard patterns.
"""
if self.type is not None and node.type != self.type:
return False
if self.content is not None:
r = None
if results is not None:
r = {}
if not self._submatch(node, r):
return False
if r:
results.update(r)
if results is not None and self.name:
results[self.name] = node
return True
def match_seq(self, nodes, results=None):
"""
Does this pattern exactly match a sequence of nodes?
Default implementation for non-wildcard patterns.
"""
if len(nodes) != 1:
return False
return self.match(nodes[0], results)
def generate_matches(self, nodes):
"""
Generator yielding all matches for this pattern.
Default implementation for non-wildcard patterns.
"""
r = {}
if nodes and self.match(nodes[0], r):
yield 1, r
class LeafPattern(BasePattern):
def __init__(self, type=None, content=None, name=None):
"""
Initializer. Takes optional type, content, and name.
The type, if given must be a token type (< 256). If not given,
this matches any *leaf* node; the content may still be required.
The content, if given, must be a string.
If a name is given, the matching node is stored in the results
dict under that key.
"""
if type is not None:
assert 0 <= type < 256, type
if content is not None:
assert isinstance(content, basestring), repr(content)
self.type = type
self.content = content
self.name = name
def match(self, node, results=None):
"""Override match() to insist on a leaf node."""
if not isinstance(node, Leaf):
return False
return BasePattern.match(self, node, results)
def _submatch(self, node, results=None):
"""
Match the pattern's content to the node's children.
This assumes the node type matches and self.content is not None.
Returns True if it matches, False if not.
If results is not None, it must be a dict which will be
updated with the nodes matching named subpatterns.
When returning False, the results dict may still be updated.
"""
return self.content == node.value
class NodePattern(BasePattern):
wildcards = False
def __init__(self, type=None, content=None, name=None):
"""
Initializer. Takes optional type, content, and name.
The type, if given, must be a symbol type (>= 256). If the
type is None this matches *any* single node (leaf or not),
except if content is not None, in which it only matches
non-leaf nodes that also match the content pattern.
The content, if not None, must be a sequence of Patterns that
must match the node's children exactly. If the content is
given, the type must not be None.
If a name is given, the matching node is stored in the results
dict under that key.
"""
if type is not None:
assert type >= 256, type
if content is not None:
assert not isinstance(content, basestring), repr(content)
content = list(content)
for i, item in enumerate(content):
assert isinstance(item, BasePattern), (i, item)
if isinstance(item, WildcardPattern):
self.wildcards = True
self.type = type
self.content = content
self.name = name
def _submatch(self, node, results=None):
"""
Match the pattern's content to the node's children.
This assumes the node type matches and self.content is not None.
Returns True if it matches, False if not.
If results is not None, it must be a dict which will be
updated with the nodes matching named subpatterns.
When returning False, the results dict may still be updated.
"""
if self.wildcards:
for c, r in generate_matches(self.content, node.children):
if c == len(node.children):
if results is not None:
results.update(r)
return True
return False
if len(self.content) != len(node.children):
return False
for subpattern, child in zip(self.content, node.children):
if not subpattern.match(child, results):
return False
return True
class WildcardPattern(BasePattern):
"""
A wildcard pattern can match zero or more nodes.
This has all the flexibility needed to implement patterns like:
.* .+ .? .{m,n}
(a b c | d e | f)
(...)* (...)+ (...)? (...){m,n}
except it always uses non-greedy matching.
"""
def __init__(self, content=None, min=0, max=HUGE, name=None):
"""
Initializer.
Args:
content: optional sequence of subsequences of patterns;
if absent, matches one node;
if present, each subsequence is an alternative [*]
min: optional minimum number of times to match, default 0
max: optional maximum number of times to match, default HUGE
name: optional name assigned to this match
[*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is
equivalent to (a b c | d e | f g h); if content is None,
this is equivalent to '.' in regular expression terms.
The min and max parameters work as follows:
min=0, max=maxint: .*
min=1, max=maxint: .+
min=0, max=1: .?
min=1, max=1: .
If content is not None, replace the dot with the parenthesized
list of alternatives, e.g. (a b c | d e | f g h)*
"""
assert 0 <= min <= max <= HUGE, (min, max)
if content is not None:
content = tuple(map(tuple, content)) # Protect against alterations
# Check sanity of alternatives
assert len(content), repr(content) # Can't have zero alternatives
for alt in content:
assert len(alt), repr(alt) # Can have empty alternatives
self.content = content
self.min = min
self.max = max
self.name = name
def optimize(self):
"""Optimize certain stacked wildcard patterns."""
subpattern = None
if (self.content is not None and
len(self.content) == 1 and len(self.content[0]) == 1):
subpattern = self.content[0][0]
if self.min == 1 and self.max == 1:
if self.content is None:
return NodePattern(name=self.name)
if subpattern is not None and self.name == subpattern.name:
return subpattern.optimize()
if (self.min <= 1 and isinstance(subpattern, WildcardPattern) and
subpattern.min <= 1 and self.name == subpattern.name):
return WildcardPattern(subpattern.content,
self.min*subpattern.min,
self.max*subpattern.max,
subpattern.name)
return self
def match(self, node, results=None):
"""Does this pattern exactly match a node?"""
return self.match_seq([node], results)
def match_seq(self, nodes, results=None):
"""Does this pattern exactly match a sequence of nodes?"""
for c, r in self.generate_matches(nodes):
if c == len(nodes):
if results is not None:
results.update(r)
if self.name:
results[self.name] = list(nodes)
return True
return False
def generate_matches(self, nodes):
"""
Generator yielding matches for a sequence of nodes.
Args:
nodes: sequence of nodes
Yields:
(count, results) tuples where:
count: the match comprises nodes[:count];
results: dict containing named submatches.
"""
if self.content is None:
# Shortcut for special case (see __init__.__doc__)
for count in xrange(self.min, 1 + min(len(nodes), self.max)):
r = {}
if self.name:
r[self.name] = nodes[:count]
yield count, r
elif self.name == "bare_name":
yield self._bare_name_matches(nodes)
else:
# The reason for this is that hitting the recursion limit usually
# results in some ugly messages about how RuntimeErrors are being
# ignored. We don't do this on non-CPython implementation because
# they don't have this problem.
if hasattr(sys, "getrefcount"):
save_stderr = sys.stderr
sys.stderr = StringIO()
try:
for count, r in self._recursive_matches(nodes, 0):
if self.name:
r[self.name] = nodes[:count]
yield count, r
except RuntimeError:
# We fall back to the iterative pattern matching scheme if the recursive
# scheme hits the recursion limit.
for count, r in self._iterative_matches(nodes):
if self.name:
r[self.name] = nodes[:count]
yield count, r
finally:
if hasattr(sys, "getrefcount"):
sys.stderr = save_stderr
def _iterative_matches(self, nodes):
"""Helper to iteratively yield the matches."""
nodelen = len(nodes)
if 0 >= self.min:
yield 0, {}
results = []
# generate matches that use just one alt from self.content
for alt in self.content:
for c, r in generate_matches(alt, nodes):
yield c, r
results.append((c, r))
# for each match, iterate down the nodes
while results:
new_results = []
for c0, r0 in results:
# stop if the entire set of nodes has been matched
if c0 < nodelen and c0 <= self.max:
for alt in self.content:
for c1, r1 in generate_matches(alt, nodes[c0:]):
if c1 > 0:
r = {}
r.update(r0)
r.update(r1)
yield c0 + c1, r
new_results.append((c0 + c1, r))
results = new_results
def _bare_name_matches(self, nodes):
"""Special optimized matcher for bare_name."""
count = 0
r = {}
done = False
max = len(nodes)
while not done and count < max:
done = True
for leaf in self.content:
if leaf[0].match(nodes[count], r):
count += 1
done = False
break
r[self.name] = nodes[:count]
return count, r
def _recursive_matches(self, nodes, count):
"""Helper to recursively yield the matches."""
assert self.content is not None
if count >= self.min:
yield 0, {}
if count < self.max:
for alt in self.content:
for c0, r0 in generate_matches(alt, nodes):
for c1, r1 in self._recursive_matches(nodes[c0:], count+1):
r = {}
r.update(r0)
r.update(r1)
yield c0 + c1, r
class NegatedPattern(BasePattern):
def __init__(self, content=None):
"""
Initializer.
The argument is either a pattern or None. If it is None, this
only matches an empty sequence (effectively '$' in regex
lingo). If it is not None, this matches whenever the argument
pattern doesn't have any matches.
"""
if content is not None:
assert isinstance(content, BasePattern), repr(content)
self.content = content
def match(self, node):
# We never match a node in its entirety
return False
def match_seq(self, nodes):
# We only match an empty sequence of nodes in its entirety
return len(nodes) == 0
def generate_matches(self, nodes):
if self.content is None:
# Return a match if there is an empty sequence
if len(nodes) == 0:
yield 0, {}
else:
# Return a match if the argument pattern has no matches
for c, r in self.content.generate_matches(nodes):
return
yield 0, {}
def generate_matches(patterns, nodes):
"""
Generator yielding matches for a sequence of patterns and nodes.
Args:
patterns: a sequence of patterns
nodes: a sequence of nodes
Yields:
(count, results) tuples where:
count: the entire sequence of patterns matches nodes[:count];
results: dict containing named submatches.
"""
if not patterns:
yield 0, {}
else:
p, rest = patterns[0], patterns[1:]
for c0, r0 in p.generate_matches(nodes):
if not rest:
yield c0, r0
else:
for c1, r1 in generate_matches(rest, nodes[c0:]):
r = {}
r.update(r0)
r.update(r1)
yield c0 + c1, r
| mit |
ryfeus/lambda-packs | Tensorflow_Pandas_Numpy/source3.6/tensorflow/contrib/opt/python/training/nadam_optimizer.py | 57 | 4017 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Nadam for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.training import adam
from tensorflow.python.training import training_ops
class NadamOptimizer(adam.AdamOptimizer):
"""Optimizer that implements the Nadam algorithm.
See [Dozat, T., 2015](http://cs229.stanford.edu/proj2015/054_report.pdf).
"""
def _apply_dense(self, grad, var):
m = self.get_slot(var, "m")
v = self.get_slot(var, "v")
return training_ops.apply_adam(
var,
m,
v,
math_ops.cast(self._beta1_power, var.dtype.base_dtype),
math_ops.cast(self._beta2_power, var.dtype.base_dtype),
math_ops.cast(self._lr_t, var.dtype.base_dtype),
math_ops.cast(self._beta1_t, var.dtype.base_dtype),
math_ops.cast(self._beta2_t, var.dtype.base_dtype),
math_ops.cast(self._epsilon_t, var.dtype.base_dtype),
grad,
use_locking=self._use_locking,
use_nesterov=True).op
def _resource_apply_dense(self, grad, var):
m = self.get_slot(var, "m")
v = self.get_slot(var, "v")
return training_ops.resource_apply_adam(
var.handle,
m.handle,
v.handle,
math_ops.cast(self._beta1_power, grad.dtype.base_dtype),
math_ops.cast(self._beta2_power, grad.dtype.base_dtype),
math_ops.cast(self._lr_t, grad.dtype.base_dtype),
math_ops.cast(self._beta1_t, grad.dtype.base_dtype),
math_ops.cast(self._beta2_t, grad.dtype.base_dtype),
math_ops.cast(self._epsilon_t, grad.dtype.base_dtype),
grad,
use_locking=self._use_locking,
use_nesterov=True)
def _apply_sparse_shared(self, grad, var, indices, scatter_add):
beta1_power = math_ops.cast(self._beta1_power, var.dtype.base_dtype)
beta2_power = math_ops.cast(self._beta2_power, var.dtype.base_dtype)
lr_t = math_ops.cast(self._lr_t, var.dtype.base_dtype)
beta1_t = math_ops.cast(self._beta1_t, var.dtype.base_dtype)
beta2_t = math_ops.cast(self._beta2_t, var.dtype.base_dtype)
epsilon_t = math_ops.cast(self._epsilon_t, var.dtype.base_dtype)
lr = (lr_t * math_ops.sqrt(1 - beta2_power) / (1 - beta1_power))
# m_t = beta1 * m + (1 - beta1) * g_t
m = self.get_slot(var, "m")
m_scaled_g_values = grad * (1 - beta1_t)
m_t = state_ops.assign(m, m * beta1_t, use_locking=self._use_locking)
with ops.control_dependencies([m_t]):
m_t = scatter_add(m, indices, m_scaled_g_values)
# m_bar = (1 - beta1) * g_t + beta1 * m_t
m_bar = m_scaled_g_values + beta1_t * m_t
# v_t = beta2 * v + (1 - beta2) * (g_t * g_t)
v = self.get_slot(var, "v")
v_scaled_g_values = (grad * grad) * (1 - beta2_t)
v_t = state_ops.assign(v, v * beta2_t, use_locking=self._use_locking)
with ops.control_dependencies([v_t]):
v_t = scatter_add(v, indices, v_scaled_g_values)
v_sqrt = math_ops.sqrt(v_t)
var_update = state_ops.assign_sub(
var, lr * m_bar / (v_sqrt + epsilon_t), use_locking=self._use_locking)
return control_flow_ops.group(*[var_update, m_bar, v_t])
| mit |
partofthething/home-assistant | homeassistant/components/hlk_sw16/switch.py | 16 | 1200 | """Support for HLK-SW16 switches."""
from homeassistant.components.switch import ToggleEntity
from . import DATA_DEVICE_REGISTER, SW16Device
from .const import DOMAIN
PARALLEL_UPDATES = 0
def devices_from_entities(hass, entry):
"""Parse configuration and add HLK-SW16 switch devices."""
device_client = hass.data[DOMAIN][entry.entry_id][DATA_DEVICE_REGISTER]
devices = []
for i in range(16):
device_port = f"{i:01x}"
device = SW16Switch(device_port, entry.entry_id, device_client)
devices.append(device)
return devices
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the HLK-SW16 platform."""
async_add_entities(devices_from_entities(hass, entry))
class SW16Switch(SW16Device, ToggleEntity):
"""Representation of a HLK-SW16 switch."""
@property
def is_on(self):
"""Return true if device is on."""
return self._is_on
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._client.turn_on(self._device_port)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._client.turn_off(self._device_port)
| mit |
srom/chessbot | estimator/train/parse_logs/__main__.py | 1 | 1947 | from __future__ import unicode_literals
import argparse
import logging
import re
logger = logging.getLogger(__name__)
def main(log_path):
for log_line in yield_train_log_line(log_path):
print log_line
break
class TrainLogLine(object):
__slots__ = ('iteration', 'elapsed', 'test_loss', 'train_loss', 'best', 'best_iteration')
def __init__(self, **kwargs):
for key, value in kwargs.iteritems():
setattr(self, key, value)
def __unicode__(self):
return (
'Training batch {iteration}; ' +
'Elapsed {elapsed}; ' +
'loss: {test_loss} (train: {train_loss}); ' +
'best: {best} ({best_iteration})'
).format(**self.to_dict())
def __repr__(self):
return self.__unicode__()
def to_dict(self):
return {
key: getattr(self, key)
for key in self.__slots__
if hasattr(self, key)
}
def yield_train_log_line(log_path):
with open(log_path, 'r') as f:
for line in f:
if is_train_log_line(line):
yield parse_line(line)
def parse_line(line):
r = r'^.*Training batch ([0-9]+); Elapsed ([0-9]+)s; loss: ([0-9\.]+) \(train: ([0-9\.]+)\); best: ([0-9\.]+) \(([0-9]+)\).*$'
m = re.match(r, line)
if m is None:
raise ValueError('No match for line {}'.format(line))
return TrainLogLine(
iteration=m.group(1),
elapsed=m.group(2),
test_loss=m.group(3),
train_loss=m.group(4),
best=m.group(5),
best_iteration=m.group(6),
)
def is_train_log_line(line):
return re.search('Training batch', line) is not None
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO, format="%(asctime)s (%(levelname)s) %(message)s")
parser = argparse.ArgumentParser()
parser.add_argument('log_path')
args = parser.parse_args()
main(args.log_path)
| mit |
feigames/Odoo | openerp/models.py | 3 | 277182 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
Object Relational Mapping module:
* Hierarchical structure
* Constraints consistency and validation
* Object metadata depends on its status
* Optimised processing by complex query (multiple actions at once)
* Default field values
* Permissions optimisation
* Persistant object: DB postgresql
* Data conversion
* Multi-level caching system
* Two different inheritance mechanisms
* Rich set of field types:
- classical (varchar, integer, boolean, ...)
- relational (one2many, many2one, many2many)
- functional
"""
import datetime
import functools
import itertools
import logging
import operator
import pickle
import pytz
import re
import time
from collections import defaultdict, MutableMapping
from inspect import getmembers
import babel.dates
import dateutil.relativedelta
import psycopg2
from lxml import etree
import openerp
from . import SUPERUSER_ID
from . import api
from . import tools
from .api import Environment
from .exceptions import except_orm, AccessError, MissingError, ValidationError
from .osv import fields
from .osv.query import Query
from .tools import lazy_property, ormcache
from .tools.config import config
from .tools.misc import CountingStream, DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT
from .tools.safe_eval import safe_eval as eval
from .tools.translate import _
_logger = logging.getLogger(__name__)
_schema = logging.getLogger(__name__ + '.schema')
regex_order = re.compile('^( *([a-z0-9:_]+|"[a-z0-9:_]+")( *desc| *asc)?( *, *|))+$', re.I)
regex_object_name = re.compile(r'^[a-z0-9_.]+$')
onchange_v7 = re.compile(r"^(\w+)\((.*)\)$")
AUTOINIT_RECALCULATE_STORED_FIELDS = 1000
def check_object_name(name):
""" Check if the given name is a valid openerp object name.
The _name attribute in osv and osv_memory object is subject to
some restrictions. This function returns True or False whether
the given name is allowed or not.
TODO: this is an approximation. The goal in this approximation
is to disallow uppercase characters (in some places, we quote
table/column names and in other not, which leads to this kind
of errors:
psycopg2.ProgrammingError: relation "xxx" does not exist).
The same restriction should apply to both osv and osv_memory
objects for consistency.
"""
if regex_object_name.match(name) is None:
return False
return True
def raise_on_invalid_object_name(name):
if not check_object_name(name):
msg = "The _name attribute %s is not valid." % name
_logger.error(msg)
raise except_orm('ValueError', msg)
POSTGRES_CONFDELTYPES = {
'RESTRICT': 'r',
'NO ACTION': 'a',
'CASCADE': 'c',
'SET NULL': 'n',
'SET DEFAULT': 'd',
}
def intersect(la, lb):
return filter(lambda x: x in lb, la)
def same_name(f, g):
""" Test whether functions `f` and `g` are identical or have the same name """
return f == g or getattr(f, '__name__', 0) == getattr(g, '__name__', 1)
def fix_import_export_id_paths(fieldname):
"""
Fixes the id fields in import and exports, and splits field paths
on '/'.
:param str fieldname: name of the field to import/export
:return: split field name
:rtype: list of str
"""
fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
return fixed_external_id.split('/')
def pg_varchar(size=0):
""" Returns the VARCHAR declaration for the provided size:
* If no size (or an empty or negative size is provided) return an
'infinite' VARCHAR
* Otherwise return a VARCHAR(n)
:type int size: varchar size, optional
:rtype: str
"""
if size:
if not isinstance(size, int):
raise TypeError("VARCHAR parameter should be an int, got %s"
% type(size))
if size > 0:
return 'VARCHAR(%d)' % size
return 'VARCHAR'
FIELDS_TO_PGTYPES = {
fields.boolean: 'bool',
fields.integer: 'int4',
fields.text: 'text',
fields.html: 'text',
fields.date: 'date',
fields.datetime: 'timestamp',
fields.binary: 'bytea',
fields.many2one: 'int4',
fields.serialized: 'text',
}
def get_pg_type(f, type_override=None):
"""
:param fields._column f: field to get a Postgres type for
:param type type_override: use the provided type for dispatching instead of the field's own type
:returns: (postgres_identification_type, postgres_type_specification)
:rtype: (str, str)
"""
field_type = type_override or type(f)
if field_type in FIELDS_TO_PGTYPES:
pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
elif issubclass(field_type, fields.float):
if f.digits:
pg_type = ('numeric', 'NUMERIC')
else:
pg_type = ('float8', 'DOUBLE PRECISION')
elif issubclass(field_type, (fields.char, fields.reference)):
pg_type = ('varchar', pg_varchar(f.size))
elif issubclass(field_type, fields.selection):
if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\
or getattr(f, 'size', None) == -1:
pg_type = ('int4', 'INTEGER')
else:
pg_type = ('varchar', pg_varchar(getattr(f, 'size', None)))
elif issubclass(field_type, fields.function):
if f._type == 'selection':
pg_type = ('varchar', pg_varchar())
else:
pg_type = get_pg_type(f, getattr(fields, f._type))
else:
_logger.warning('%s type not supported!', field_type)
pg_type = None
return pg_type
class MetaModel(api.Meta):
""" Metaclass for the models.
This class is used as the metaclass for the class :class:`BaseModel` to
discover the models defined in a module (without instanciating them).
If the automatic discovery is not needed, it is possible to set the model's
``_register`` attribute to False.
"""
module_to_models = {}
def __init__(self, name, bases, attrs):
if not self._register:
self._register = True
super(MetaModel, self).__init__(name, bases, attrs)
return
if not hasattr(self, '_module'):
# The (OpenERP) module name can be in the `openerp.addons` namespace
# or not. For instance, module `sale` can be imported as
# `openerp.addons.sale` (the right way) or `sale` (for backward
# compatibility).
module_parts = self.__module__.split('.')
if len(module_parts) > 2 and module_parts[:2] == ['openerp', 'addons']:
module_name = self.__module__.split('.')[2]
else:
module_name = self.__module__.split('.')[0]
self._module = module_name
# Remember which models to instanciate for this module.
if not self._custom:
self.module_to_models.setdefault(self._module, []).append(self)
# transform columns into new-style fields (enables field inheritance)
for name, column in self._columns.iteritems():
if not hasattr(self, name):
setattr(self, name, column.to_field())
class NewId(object):
""" Pseudo-ids for new records. """
def __nonzero__(self):
return False
IdType = (int, long, basestring, NewId)
# maximum number of prefetched records
PREFETCH_MAX = 200
# special columns automatically created by the ORM
LOG_ACCESS_COLUMNS = ['create_uid', 'create_date', 'write_uid', 'write_date']
MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS
class BaseModel(object):
""" Base class for OpenERP models.
OpenERP models are created by inheriting from this class' subclasses:
* :class:`Model` for regular database-persisted models
* :class:`TransientModel` for temporary data, stored in the database but
automatically vaccuumed every so often
* :class:`AbstractModel` for abstract super classes meant to be shared by
multiple inheriting model
The system automatically instantiates every model once per database. Those
instances represent the available models on each database, and depend on
which modules are installed on that database. The actual class of each
instance is built from the Python classes that create and inherit from the
corresponding model.
Every model instance is a "recordset", i.e., an ordered collection of
records of the model. Recordsets are returned by methods like
:meth:`~.browse`, :meth:`~.search`, or field accesses. Records have no
explicit representation: a record is represented as a recordset of one
record.
To create a class that should not be instantiated, the _register class
attribute may be set to False.
"""
__metaclass__ = MetaModel
_auto = True # create database backend
_register = False # Set to false if the model shouldn't be automatically discovered.
_name = None
_columns = {}
_constraints = []
_custom = False
_defaults = {}
_rec_name = None
_parent_name = 'parent_id'
_parent_store = False
_parent_order = False
_date_name = 'date'
_order = 'id'
_sequence = None
_description = None
_needaction = False
_translate = True # set to False to disable translations export for this model
# dict of {field:method}, with method returning the (name_get of records, {id: fold})
# to include in the _read_group, if grouped on this field
_group_by_full = {}
# Transience
_transient = False # True in a TransientModel
# structure:
# { 'parent_model': 'm2o_field', ... }
_inherits = {}
# Mapping from inherits'd field name to triple (m, r, f, n) where m is the
# model from which it is inherits'd, r is the (local) field towards m, f
# is the _column object itself, and n is the original (i.e. top-most)
# parent model.
# Example:
# { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
# field_column_obj, origina_parent_model), ... }
_inherit_fields = {}
# Mapping field name/column_info object
# This is similar to _inherit_fields but:
# 1. includes self fields,
# 2. uses column_info instead of a triple.
_all_columns = {}
_table = None
_log_create = False
_sql_constraints = []
# model dependencies, for models backed up by sql views:
# {model_name: field_names, ...}
_depends = {}
CONCURRENCY_CHECK_FIELD = '__last_update'
def log(self, cr, uid, id, message, secondary=False, context=None):
return _logger.warning("log() is deprecated. Please use OpenChatter notification system instead of the res.log mechanism.")
def view_init(self, cr, uid, fields_list, context=None):
"""Override this method to do specific things when a view on the object is opened."""
pass
def _field_create(self, cr, context=None):
""" Create entries in ir_model_fields for all the model's fields.
If necessary, also create an entry in ir_model, and if called from the
modules loading scheme (by receiving 'module' in the context), also
create entries in ir_model_data (for the model and the fields).
- create an entry in ir_model (if there is not already one),
- create an entry in ir_model_data (if there is not already one, and if
'module' is in the context),
- update ir_model_fields with the fields found in _columns
(TODO there is some redundancy as _columns is updated from
ir_model_fields in __init__).
"""
if context is None:
context = {}
cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
if not cr.rowcount:
cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
model_id = cr.fetchone()[0]
cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
else:
model_id = cr.fetchone()[0]
if 'module' in context:
name_id = 'model_'+self._name.replace('.', '_')
cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
if not cr.rowcount:
cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
(name_id, context['module'], 'ir.model', model_id)
)
cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
cols = {}
for rec in cr.dictfetchall():
cols[rec['name']] = rec
ir_model_fields_obj = self.pool.get('ir.model.fields')
# sparse field should be created at the end, as it depends on its serialized field already existing
model_fields = sorted(self._columns.items(), key=lambda x: 1 if x[1]._type == 'sparse' else 0)
for (k, f) in model_fields:
vals = {
'model_id': model_id,
'model': self._name,
'name': k,
'field_description': f.string,
'ttype': f._type,
'relation': f._obj or '',
'select_level': tools.ustr(int(f.select)),
'readonly': (f.readonly and 1) or 0,
'required': (f.required and 1) or 0,
'selectable': (f.selectable and 1) or 0,
'translate': (f.translate and 1) or 0,
'relation_field': f._fields_id if isinstance(f, fields.one2many) else '',
'serialization_field_id': None,
}
if getattr(f, 'serialization_field', None):
# resolve link to serialization_field if specified by name
serialization_field_id = ir_model_fields_obj.search(cr, SUPERUSER_ID, [('model','=',vals['model']), ('name', '=', f.serialization_field)])
if not serialization_field_id:
raise except_orm(_('Error'), _("Serialization field `%s` not found for sparse field `%s`!") % (f.serialization_field, k))
vals['serialization_field_id'] = serialization_field_id[0]
# When its a custom field,it does not contain f.select
if context.get('field_state', 'base') == 'manual':
if context.get('field_name', '') == k:
vals['select_level'] = context.get('select', '0')
#setting value to let the problem NOT occur next time
elif k in cols:
vals['select_level'] = cols[k]['select_level']
if k not in cols:
cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
id = cr.fetchone()[0]
vals['id'] = id
cr.execute("""INSERT INTO ir_model_fields (
id, model_id, model, name, field_description, ttype,
relation,state,select_level,relation_field, translate, serialization_field_id
) VALUES (
%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
)""", (
id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
vals['relation'], 'base',
vals['select_level'], vals['relation_field'], bool(vals['translate']), vals['serialization_field_id']
))
if 'module' in context:
name1 = 'field_' + self._table + '_' + k
cr.execute("select name from ir_model_data where name=%s", (name1,))
if cr.fetchone():
name1 = name1 + "_" + str(id)
cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
(name1, context['module'], 'ir.model.fields', id)
)
else:
for key, val in vals.items():
if cols[k][key] != vals[key]:
cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
cr.execute("""UPDATE ir_model_fields SET
model_id=%s, field_description=%s, ttype=%s, relation=%s,
select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s, serialization_field_id=%s
WHERE
model=%s AND name=%s""", (
vals['model_id'], vals['field_description'], vals['ttype'],
vals['relation'],
vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'], vals['model'], vals['name']
))
break
self.invalidate_cache(cr, SUPERUSER_ID)
@classmethod
def _add_field(cls, name, field):
""" Add the given `field` under the given `name` in the class """
field.set_class_name(cls, name)
# add field in _fields (for reflection)
cls._fields[name] = field
# add field as an attribute, unless another kind of value already exists
if isinstance(getattr(cls, name, field), Field):
setattr(cls, name, field)
else:
_logger.warning("In model %r, member %r is not a field", cls._name, name)
if field.store:
cls._columns[name] = field.to_column()
else:
# remove potential column that may be overridden by field
cls._columns.pop(name, None)
@classmethod
def _pop_field(cls, name):
""" Remove the field with the given `name` from the model.
This method should only be used for manual fields.
"""
field = cls._fields.pop(name)
cls._columns.pop(name, None)
cls._all_columns.pop(name, None)
if hasattr(cls, name):
delattr(cls, name)
return field
@classmethod
def _add_magic_fields(cls):
""" Introduce magic fields on the current class
* id is a "normal" field (with a specific getter)
* create_uid, create_date, write_uid and write_date have become
"normal" fields
* $CONCURRENCY_CHECK_FIELD is a computed field with its computing
method defined dynamically. Uses ``str(datetime.datetime.utcnow())``
to get the same structure as the previous
``(now() at time zone 'UTC')::timestamp``::
# select (now() at time zone 'UTC')::timestamp;
timezone
----------------------------
2013-06-18 08:30:37.292809
>>> str(datetime.datetime.utcnow())
'2013-06-18 08:31:32.821177'
"""
def add(name, field):
""" add `field` with the given `name` if it does not exist yet """
if name not in cls._columns and name not in cls._fields:
cls._add_field(name, field)
# cyclic import
from . import fields
# this field 'id' must override any other column or field
cls._add_field('id', fields.Id(automatic=True))
add('display_name', fields.Char(string='Display Name', automatic=True,
compute='_compute_display_name'))
if cls._log_access:
add('create_uid', fields.Many2one('res.users', string='Created by', automatic=True))
add('create_date', fields.Datetime(string='Created on', automatic=True))
add('write_uid', fields.Many2one('res.users', string='Last Updated by', automatic=True))
add('write_date', fields.Datetime(string='Last Updated on', automatic=True))
last_modified_name = 'compute_concurrency_field_with_access'
else:
last_modified_name = 'compute_concurrency_field'
# this field must override any other column or field
cls._add_field(cls.CONCURRENCY_CHECK_FIELD, fields.Datetime(
string='Last Modified on', compute=last_modified_name, automatic=True))
@api.one
def compute_concurrency_field(self):
self[self.CONCURRENCY_CHECK_FIELD] = \
datetime.datetime.utcnow().strftime(DEFAULT_SERVER_DATETIME_FORMAT)
@api.one
@api.depends('create_date', 'write_date')
def compute_concurrency_field_with_access(self):
self[self.CONCURRENCY_CHECK_FIELD] = \
self.write_date or self.create_date or \
datetime.datetime.utcnow().strftime(DEFAULT_SERVER_DATETIME_FORMAT)
#
# Goal: try to apply inheritance at the instanciation level and
# put objects in the pool var
#
@classmethod
def _build_model(cls, pool, cr):
""" Instanciate a given model.
This class method instanciates the class of some model (i.e. a class
deriving from osv or osv_memory). The class might be the class passed
in argument or, if it inherits from another class, a class constructed
by combining the two classes.
"""
# IMPORTANT: the registry contains an instance for each model. The class
# of each model carries inferred metadata that is shared among the
# model's instances for this registry, but not among registries. Hence
# we cannot use that "registry class" for combining model classes by
# inheritance, since it confuses the metadata inference process.
# Keep links to non-inherited constraints in cls; this is useful for
# instance when exporting translations
cls._local_constraints = cls.__dict__.get('_constraints', [])
cls._local_sql_constraints = cls.__dict__.get('_sql_constraints', [])
# determine inherited models
parents = getattr(cls, '_inherit', [])
parents = [parents] if isinstance(parents, basestring) else (parents or [])
# determine the model's name
name = cls._name or (len(parents) == 1 and parents[0]) or cls.__name__
# determine the module that introduced the model
original_module = pool[name]._original_module if name in parents else cls._module
# build the class hierarchy for the model
for parent in parents:
if parent not in pool:
raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
'You may need to add a dependency on the parent class\' module.' % (name, parent))
parent_model = pool[parent]
# do no use the class of parent_model, since that class contains
# inferred metadata; use its ancestor instead
parent_class = type(parent_model).__base__
# don't inherit custom fields
columns = dict((key, val)
for key, val in parent_class._columns.iteritems()
if not val.manual
)
columns.update(cls._columns)
defaults = dict(parent_class._defaults)
defaults.update(cls._defaults)
inherits = dict(parent_class._inherits)
inherits.update(cls._inherits)
depends = dict(parent_class._depends)
for m, fs in cls._depends.iteritems():
depends[m] = depends.get(m, []) + fs
old_constraints = parent_class._constraints
new_constraints = cls._constraints
# filter out from old_constraints the ones overridden by a
# constraint with the same function name in new_constraints
constraints = new_constraints + [oldc
for oldc in old_constraints
if not any(newc[2] == oldc[2] and same_name(newc[0], oldc[0])
for newc in new_constraints)
]
sql_constraints = cls._sql_constraints + \
parent_class._sql_constraints
attrs = {
'_name': name,
'_register': False,
'_columns': columns,
'_defaults': defaults,
'_inherits': inherits,
'_depends': depends,
'_constraints': constraints,
'_sql_constraints': sql_constraints,
}
cls = type(name, (cls, parent_class), attrs)
# introduce the "registry class" of the model;
# duplicate some attributes so that the ORM can modify them
attrs = {
'_name': name,
'_register': False,
'_columns': dict(cls._columns),
'_defaults': dict(cls._defaults),
'_inherits': dict(cls._inherits),
'_depends': dict(cls._depends),
'_constraints': list(cls._constraints),
'_sql_constraints': list(cls._sql_constraints),
'_original_module': original_module,
}
cls = type(cls._name, (cls,), attrs)
# instantiate the model, and initialize it
model = object.__new__(cls)
model.__init__(pool, cr)
return model
@classmethod
def _init_function_fields(cls, pool, cr):
# initialize the list of non-stored function fields for this model
pool._pure_function_fields[cls._name] = []
# process store of low-level function fields
for fname, column in cls._columns.iteritems():
if hasattr(column, 'digits_change'):
column.digits_change(cr)
# filter out existing store about this field
pool._store_function[cls._name] = [
stored
for stored in pool._store_function.get(cls._name, [])
if (stored[0], stored[1]) != (cls._name, fname)
]
if not isinstance(column, fields.function):
continue
if not column.store:
# register it on the pool for invalidation
pool._pure_function_fields[cls._name].append(fname)
continue
# process store parameter
store = column.store
if store is True:
get_ids = lambda self, cr, uid, ids, c={}: ids
store = {cls._name: (get_ids, None, column.priority, None)}
for model, spec in store.iteritems():
if len(spec) == 4:
(fnct, fields2, order, length) = spec
elif len(spec) == 3:
(fnct, fields2, order) = spec
length = None
else:
raise except_orm('Error',
('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (fname, cls._name)))
pool._store_function.setdefault(model, [])
t = (cls._name, fname, fnct, tuple(fields2) if fields2 else None, order, length)
if t not in pool._store_function[model]:
pool._store_function[model].append(t)
pool._store_function[model].sort(key=lambda x: x[4])
@classmethod
def _init_manual_fields(cls, pool, cr):
# Check whether the query is already done
if pool.fields_by_model is not None:
manual_fields = pool.fields_by_model.get(cls._name, [])
else:
cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (cls._name, 'manual'))
manual_fields = cr.dictfetchall()
for field in manual_fields:
if field['name'] in cls._columns:
continue
attrs = {
'string': field['field_description'],
'required': bool(field['required']),
'readonly': bool(field['readonly']),
'domain': eval(field['domain']) if field['domain'] else None,
'size': field['size'] or None,
'ondelete': field['on_delete'],
'translate': (field['translate']),
'manual': True,
'_prefetch': False,
#'select': int(field['select_level'])
}
if field['serialization_field_id']:
cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
if field['ttype'] in ['many2one', 'one2many', 'many2many']:
attrs.update({'relation': field['relation']})
cls._columns[field['name']] = fields.sparse(**attrs)
elif field['ttype'] == 'selection':
cls._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
elif field['ttype'] == 'reference':
cls._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
elif field['ttype'] == 'many2one':
cls._columns[field['name']] = fields.many2one(field['relation'], **attrs)
elif field['ttype'] == 'one2many':
cls._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
elif field['ttype'] == 'many2many':
_rel1 = field['relation'].replace('.', '_')
_rel2 = field['model'].replace('.', '_')
_rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
cls._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
else:
cls._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
@classmethod
def _init_constraints_onchanges(cls):
# store sql constraint error messages
for (key, _, msg) in cls._sql_constraints:
cls.pool._sql_error[cls._table + '_' + key] = msg
# collect constraint and onchange methods
cls._constraint_methods = []
cls._onchange_methods = defaultdict(list)
for attr, func in getmembers(cls, callable):
if hasattr(func, '_constrains'):
if not all(name in cls._fields for name in func._constrains):
_logger.warning("@constrains%r parameters must be field names", func._constrains)
cls._constraint_methods.append(func)
if hasattr(func, '_onchange'):
if not all(name in cls._fields for name in func._onchange):
_logger.warning("@onchange%r parameters must be field names", func._onchange)
for name in func._onchange:
cls._onchange_methods[name].append(func)
def __new__(cls):
# In the past, this method was registering the model class in the server.
# This job is now done entirely by the metaclass MetaModel.
#
# Do not create an instance here. Model instances are created by method
# _build_model().
return None
def __init__(self, pool, cr):
""" Initialize a model and make it part of the given registry.
- copy the stored fields' functions in the registry,
- retrieve custom fields and add them in the model,
- ensure there is a many2one for each _inherits'd parent,
- update the children's _columns,
- give a chance to each field to initialize itself.
"""
cls = type(self)
# link the class to the registry, and update the registry
cls.pool = pool
cls._model = self # backward compatibility
pool.add(cls._name, self)
# determine description, table, sequence and log_access
if not cls._description:
cls._description = cls._name
if not cls._table:
cls._table = cls._name.replace('.', '_')
if not cls._sequence:
cls._sequence = cls._table + '_id_seq'
if not hasattr(cls, '_log_access'):
# If _log_access is not specified, it is the same value as _auto.
cls._log_access = cls._auto
# Transience
if cls.is_transient():
cls._transient_check_count = 0
cls._transient_max_count = config.get('osv_memory_count_limit')
cls._transient_max_hours = config.get('osv_memory_age_limit')
assert cls._log_access, \
"TransientModels must have log_access turned on, " \
"in order to implement their access rights policy"
# retrieve new-style fields and duplicate them (to avoid clashes with
# inheritance between different models)
cls._fields = {}
for attr, field in getmembers(cls, Field.__instancecheck__):
if not field.inherited:
cls._add_field(attr, field.copy())
# introduce magic fields
cls._add_magic_fields()
# register stuff about low-level function fields and custom fields
cls._init_function_fields(pool, cr)
cls._init_manual_fields(pool, cr)
# process _inherits
cls._inherits_check()
cls._inherits_reload()
# register constraints and onchange methods
cls._init_constraints_onchanges()
# check defaults
for k in cls._defaults:
assert k in cls._fields, \
"Model %s has a default for nonexiting field %s" % (cls._name, k)
# restart columns
for column in cls._columns.itervalues():
column.restart()
# validate rec_name
if cls._rec_name:
assert cls._rec_name in cls._fields, \
"Invalid rec_name %s for model %s" % (cls._rec_name, cls._name)
elif 'name' in cls._fields:
cls._rec_name = 'name'
# prepare ormcache, which must be shared by all instances of the model
cls._ormcache = {}
@api.model
@ormcache()
def _is_an_ordinary_table(self):
self.env.cr.execute("""\
SELECT 1
FROM pg_class
WHERE relname = %s
AND relkind = %s""", [self._table, 'r'])
return bool(self.env.cr.fetchone())
def __export_xml_id(self):
""" Return a valid xml_id for the record `self`. """
if not self._is_an_ordinary_table():
raise Exception(
"You can not export the column ID of model %s, because the "
"table %s is not an ordinary table."
% (self._name, self._table))
ir_model_data = self.sudo().env['ir.model.data']
data = ir_model_data.search([('model', '=', self._name), ('res_id', '=', self.id)])
if data:
if data[0].module:
return '%s.%s' % (data[0].module, data[0].name)
else:
return data[0].name
else:
postfix = 0
name = '%s_%s' % (self._table, self.id)
while ir_model_data.search([('module', '=', '__export__'), ('name', '=', name)]):
postfix += 1
name = '%s_%s_%s' % (self._table, self.id, postfix)
ir_model_data.create({
'model': self._name,
'res_id': self.id,
'module': '__export__',
'name': name,
})
return '__export__.' + name
@api.multi
def __export_rows(self, fields):
""" Export fields of the records in `self`.
:param fields: list of lists of fields to traverse
:return: list of lists of corresponding values
"""
lines = []
for record in self:
# main line of record, initially empty
current = [''] * len(fields)
lines.append(current)
# list of primary fields followed by secondary field(s)
primary_done = []
# process column by column
for i, path in enumerate(fields):
if not path:
continue
name = path[0]
if name in primary_done:
continue
if name == '.id':
current[i] = str(record.id)
elif name == 'id':
current[i] = record.__export_xml_id()
else:
field = record._fields[name]
value = record[name]
# this part could be simpler, but it has to be done this way
# in order to reproduce the former behavior
if not isinstance(value, BaseModel):
current[i] = field.convert_to_export(value, self.env)
else:
primary_done.append(name)
# This is a special case, its strange behavior is intended!
if field.type == 'many2many' and len(path) > 1 and path[1] == 'id':
xml_ids = [r.__export_xml_id() for r in value]
current[i] = ','.join(xml_ids) or False
continue
# recursively export the fields that follow name
fields2 = [(p[1:] if p and p[0] == name else []) for p in fields]
lines2 = value.__export_rows(fields2)
if lines2:
# merge first line with record's main line
for j, val in enumerate(lines2[0]):
if val:
current[j] = val
# check value of current field
if not current[i]:
# assign xml_ids, and forget about remaining lines
xml_ids = [item[1] for item in value.name_get()]
current[i] = ','.join(xml_ids)
else:
# append the other lines at the end
lines += lines2[1:]
else:
current[i] = False
return lines
@api.multi
def export_data(self, fields_to_export, raw_data=False):
""" Export fields for selected objects
:param fields_to_export: list of fields
:param raw_data: True to return value in native Python type
:rtype: dictionary with a *datas* matrix
This method is used when exporting data via client menu
"""
fields_to_export = map(fix_import_export_id_paths, fields_to_export)
if raw_data:
self = self.with_context(export_raw_data=True)
return {'datas': self.__export_rows(fields_to_export)}
def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
"""
.. deprecated:: 7.0
Use :meth:`~load` instead
Import given data in given module
This method is used when importing data via client menu.
Example of fields to import for a sale.order::
.id, (=database_id)
partner_id, (=name_search)
order_line/.id, (=database_id)
order_line/name,
order_line/product_id/id, (=xml id)
order_line/price_unit,
order_line/product_uom_qty,
order_line/product_uom/id (=xml_id)
This method returns a 4-tuple with the following structure::
(return_code, errored_resource, error_message, unused)
* The first item is a return code, it is ``-1`` in case of
import error, or the last imported row number in case of success
* The second item contains the record data dict that failed to import
in case of error, otherwise it's 0
* The third item contains an error message string in case of error,
otherwise it's 0
* The last item is currently unused, with no specific semantics
:param fields: list of fields to import
:param datas: data to import
:param mode: 'init' or 'update' for record creation
:param current_module: module name
:param noupdate: flag for record creation
:param filename: optional file to store partial import state for recovery
:returns: 4-tuple in the form (return_code, errored_resource, error_message, unused)
:rtype: (int, dict or 0, str or 0, str or 0)
"""
context = dict(context) if context is not None else {}
context['_import_current_module'] = current_module
fields = map(fix_import_export_id_paths, fields)
ir_model_data_obj = self.pool.get('ir.model.data')
def log(m):
if m['type'] == 'error':
raise Exception(m['message'])
if config.get('import_partial') and filename:
with open(config.get('import_partial'), 'rb') as partial_import_file:
data = pickle.load(partial_import_file)
position = data.get(filename, 0)
position = 0
try:
for res_id, xml_id, res, info in self._convert_records(cr, uid,
self._extract_records(cr, uid, fields, datas,
context=context, log=log),
context=context, log=log):
ir_model_data_obj._update(cr, uid, self._name,
current_module, res, mode=mode, xml_id=xml_id,
noupdate=noupdate, res_id=res_id, context=context)
position = info.get('rows', {}).get('to', 0) + 1
if config.get('import_partial') and filename and (not (position%100)):
with open(config.get('import_partial'), 'rb') as partial_import:
data = pickle.load(partial_import)
data[filename] = position
with open(config.get('import_partial'), 'wb') as partial_import:
pickle.dump(data, partial_import)
if context.get('defer_parent_store_computation'):
self._parent_store_compute(cr)
cr.commit()
except Exception, e:
cr.rollback()
return -1, {}, 'Line %d : %s' % (position + 1, tools.ustr(e)), ''
if context.get('defer_parent_store_computation'):
self._parent_store_compute(cr)
return position, 0, 0, 0
def load(self, cr, uid, fields, data, context=None):
"""
Attempts to load the data matrix, and returns a list of ids (or
``False`` if there was an error and no id could be generated) and a
list of messages.
The ids are those of the records created and saved (in database), in
the same order they were extracted from the file. They can be passed
directly to :meth:`~read`
:param fields: list of fields to import, at the same index as the corresponding data
:type fields: list(str)
:param data: row-major matrix of data to import
:type data: list(list(str))
:param dict context:
:returns: {ids: list(int)|False, messages: [Message]}
"""
cr.execute('SAVEPOINT model_load')
messages = []
fields = map(fix_import_export_id_paths, fields)
ModelData = self.pool['ir.model.data'].clear_caches()
fg = self.fields_get(cr, uid, context=context)
mode = 'init'
current_module = ''
noupdate = False
ids = []
for id, xid, record, info in self._convert_records(cr, uid,
self._extract_records(cr, uid, fields, data,
context=context, log=messages.append),
context=context, log=messages.append):
try:
cr.execute('SAVEPOINT model_load_save')
except psycopg2.InternalError, e:
# broken transaction, exit and hope the source error was
# already logged
if not any(message['type'] == 'error' for message in messages):
messages.append(dict(info, type='error',message=
u"Unknown database error: '%s'" % e))
break
try:
ids.append(ModelData._update(cr, uid, self._name,
current_module, record, mode=mode, xml_id=xid,
noupdate=noupdate, res_id=id, context=context))
cr.execute('RELEASE SAVEPOINT model_load_save')
except psycopg2.Warning, e:
messages.append(dict(info, type='warning', message=str(e)))
cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
except psycopg2.Error, e:
messages.append(dict(
info, type='error',
**PGERROR_TO_OE[e.pgcode](self, fg, info, e)))
# Failed to write, log to messages, rollback savepoint (to
# avoid broken transaction) and keep going
cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
except Exception, e:
message = (_('Unknown error during import:') +
' %s: %s' % (type(e), unicode(e)))
moreinfo = _('Resolve other errors first')
messages.append(dict(info, type='error',
message=message,
moreinfo=moreinfo))
# Failed for some reason, perhaps due to invalid data supplied,
# rollback savepoint and keep going
cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
if any(message['type'] == 'error' for message in messages):
cr.execute('ROLLBACK TO SAVEPOINT model_load')
ids = False
return {'ids': ids, 'messages': messages}
def _extract_records(self, cr, uid, fields_, data,
context=None, log=lambda a: None):
""" Generates record dicts from the data sequence.
The result is a generator of dicts mapping field names to raw
(unconverted, unvalidated) values.
For relational fields, if sub-fields were provided the value will be
a list of sub-records
The following sub-fields may be set on the record (by key):
* None is the name_get for the record (to use with name_create/name_search)
* "id" is the External ID for the record
* ".id" is the Database ID for the record
"""
columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
# Fake columns to avoid special cases in extractor
columns[None] = fields.char('rec_name')
columns['id'] = fields.char('External ID')
columns['.id'] = fields.integer('Database ID')
# m2o fields can't be on multiple lines so exclude them from the
# is_relational field rows filter, but special-case it later on to
# be handled with relational fields (as it can have subfields)
is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one')
get_o2m_values = itemgetter_tuple(
[index for index, field in enumerate(fields_)
if columns[field[0]]._type == 'one2many'])
get_nono2m_values = itemgetter_tuple(
[index for index, field in enumerate(fields_)
if columns[field[0]]._type != 'one2many'])
# Checks if the provided row has any non-empty non-relational field
def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values):
return any(g(row)) and not any(f(row))
index = 0
while True:
if index >= len(data): return
row = data[index]
# copy non-relational fields to record dict
record = dict((field[0], value)
for field, value in itertools.izip(fields_, row)
if not is_relational(field[0]))
# Get all following rows which have relational values attached to
# the current record (no non-relational values)
record_span = itertools.takewhile(
only_o2m_values, itertools.islice(data, index + 1, None))
# stitch record row back on for relational fields
record_span = list(itertools.chain([row], record_span))
for relfield in set(
field[0] for field in fields_
if is_relational(field[0])):
column = columns[relfield]
# FIXME: how to not use _obj without relying on fields_get?
Model = self.pool[column._obj]
# get only cells for this sub-field, should be strictly
# non-empty, field path [None] is for name_get column
indices, subfields = zip(*((index, field[1:] or [None])
for index, field in enumerate(fields_)
if field[0] == relfield))
# return all rows which have at least one value for the
# subfields of relfield
relfield_data = filter(any, map(itemgetter_tuple(indices), record_span))
record[relfield] = [subrecord
for subrecord, _subinfo in Model._extract_records(
cr, uid, subfields, relfield_data,
context=context, log=log)]
yield record, {'rows': {
'from': index,
'to': index + len(record_span) - 1
}}
index += len(record_span)
def _convert_records(self, cr, uid, records,
context=None, log=lambda a: None):
""" Converts records from the source iterable (recursive dicts of
strings) into forms which can be written to the database (via
self.create or (ir.model.data)._update)
:returns: a list of triplets of (id, xid, record)
:rtype: list((int|None, str|None, dict))
"""
if context is None: context = {}
Converter = self.pool['ir.fields.converter']
columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
Translation = self.pool['ir.translation']
field_names = dict(
(f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field',
context.get('lang'))
or column.string))
for f, column in columns.iteritems())
convert = Converter.for_model(cr, uid, self, context=context)
def _log(base, field, exception):
type = 'warning' if isinstance(exception, Warning) else 'error'
# logs the logical (not human-readable) field name for automated
# processing of response, but injects human readable in message
record = dict(base, type=type, field=field,
message=unicode(exception.args[0]) % base)
if len(exception.args) > 1 and exception.args[1]:
record.update(exception.args[1])
log(record)
stream = CountingStream(records)
for record, extras in stream:
dbid = False
xid = False
# name_get/name_create
if None in record: pass
# xid
if 'id' in record:
xid = record['id']
# dbid
if '.id' in record:
try:
dbid = int(record['.id'])
except ValueError:
# in case of overridden id column
dbid = record['.id']
if not self.search(cr, uid, [('id', '=', dbid)], context=context):
log(dict(extras,
type='error',
record=stream.index,
field='.id',
message=_(u"Unknown database identifier '%s'") % dbid))
dbid = False
converted = convert(record, lambda field, err:\
_log(dict(extras, record=stream.index, field=field_names[field]), field, err))
yield dbid, xid, converted, dict(extras, record=stream.index)
@api.multi
def _validate_fields(self, field_names):
field_names = set(field_names)
# old-style constraint methods
trans = self.env['ir.translation']
cr, uid, context = self.env.args
ids = self.ids
errors = []
for fun, msg, names in self._constraints:
try:
# validation must be context-independent; call `fun` without context
valid = not (set(names) & field_names) or fun(self._model, cr, uid, ids)
extra_error = None
except Exception, e:
_logger.debug('Exception while validating constraint', exc_info=True)
valid = False
extra_error = tools.ustr(e)
if not valid:
if callable(msg):
res_msg = msg(self._model, cr, uid, ids, context=context)
if isinstance(res_msg, tuple):
template, params = res_msg
res_msg = template % params
else:
res_msg = trans._get_source(self._name, 'constraint', self.env.lang, msg)
if extra_error:
res_msg += "\n\n%s\n%s" % (_('Error details:'), extra_error)
errors.append(
_("Field(s) `%s` failed against a constraint: %s") %
(', '.join(names), res_msg)
)
if errors:
raise ValidationError('\n'.join(errors))
# new-style constraint methods
for check in self._constraint_methods:
if set(check._constrains) & field_names:
try:
check(self)
except ValidationError, e:
raise
except Exception, e:
raise ValidationError("Error while validating constraint\n\n%s" % tools.ustr(e))
def default_get(self, cr, uid, fields_list, context=None):
""" default_get(fields) -> default_values
Return default values for the fields in `fields_list`. Default
values are determined by the context, user defaults, and the model
itself.
:param fields_list: a list of field names
:return: a dictionary mapping each field name to its corresponding
default value; the keys of the dictionary are the fields in
`fields_list` that have a default value different from ``False``.
This method should not be overridden. In order to change the
mechanism for determining default values, you should override method
:meth:`add_default_value` instead.
"""
# trigger view init hook
self.view_init(cr, uid, fields_list, context)
# use a new record to determine default values; evaluate fields on the
# new record and put default values in result
record = self.new(cr, uid, {}, context=context)
result = {}
for name in fields_list:
if name in self._fields:
value = record[name]
if name in record._cache:
result[name] = value # it really is a default value
# convert default values to the expected format
result = self._convert_to_write(result)
return result
def add_default_value(self, field):
""" Set the default value of `field` to the new record `self`.
The value must be assigned to `self`.
"""
assert not self.id, "Expected new record: %s" % self
cr, uid, context = self.env.args
name = field.name
# 1. look up context
key = 'default_' + name
if key in context:
self[name] = context[key]
return
# 2. look up ir_values
# Note: performance is good, because get_defaults_dict is cached!
ir_values_dict = self.env['ir.values'].get_defaults_dict(self._name)
if name in ir_values_dict:
self[name] = ir_values_dict[name]
return
# 3. look up property fields
# TODO: get rid of this one
column = self._columns.get(name)
if isinstance(column, fields.property):
self[name] = self.env['ir.property'].get(name, self._name)
return
# 4. look up _defaults
if name in self._defaults:
value = self._defaults[name]
if callable(value):
value = value(self._model, cr, uid, context)
self[name] = value
return
# 5. delegate to field
field.determine_default(self)
def fields_get_keys(self, cr, user, context=None):
res = self._columns.keys()
# TODO I believe this loop can be replace by
# res.extend(self._inherit_fields.key())
for parent in self._inherits:
res.extend(self.pool[parent].fields_get_keys(cr, user, context))
return res
def _rec_name_fallback(self, cr, uid, context=None):
rec_name = self._rec_name
if rec_name not in self._columns:
rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id"
return rec_name
#
# Overload this method if you need a window title which depends on the context
#
def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
return False
def user_has_groups(self, cr, uid, groups, context=None):
"""Return true if the user is at least member of one of the groups
in groups_str. Typically used to resolve `groups` attribute
in view and model definitions.
:param str groups: comma-separated list of fully-qualified group
external IDs, e.g.: ``base.group_user,base.group_system``
:return: True if the current user is a member of one of the
given groups
"""
return any(self.pool['res.users'].has_group(cr, uid, group_ext_id)
for group_ext_id in groups.split(','))
def _get_default_form_view(self, cr, user, context=None):
""" Generates a default single-line form view using all fields
of the current model except the m2m and o2m ones.
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a form view as an lxml document
:rtype: etree._Element
"""
view = etree.Element('form', string=self._description)
group = etree.SubElement(view, 'group', col="4")
for fname, field in self._fields.iteritems():
if field.automatic or field.type in ('one2many', 'many2many'):
continue
etree.SubElement(group, 'field', name=fname)
if field.type == 'text':
etree.SubElement(group, 'newline')
return view
def _get_default_search_view(self, cr, user, context=None):
""" Generates a single-field search view, based on _rec_name.
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a tree view as an lxml document
:rtype: etree._Element
"""
view = etree.Element('search', string=self._description)
etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
return view
def _get_default_tree_view(self, cr, user, context=None):
""" Generates a single-field tree view, based on _rec_name.
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a tree view as an lxml document
:rtype: etree._Element
"""
view = etree.Element('tree', string=self._description)
etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
return view
def _get_default_calendar_view(self, cr, user, context=None):
""" Generates a default calendar view by trying to infer
calendar fields from a number of pre-set attribute names
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a calendar view
:rtype: etree._Element
"""
def set_first_of(seq, in_, to):
"""Sets the first value of `seq` also found in `in_` to
the `to` attribute of the view being closed over.
Returns whether it's found a suitable value (and set it on
the attribute) or not
"""
for item in seq:
if item in in_:
view.set(to, item)
return True
return False
view = etree.Element('calendar', string=self._description)
etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
if self._date_name not in self._columns:
date_found = False
for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
if dt in self._columns:
self._date_name = dt
date_found = True
break
if not date_found:
raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
view.set('date_start', self._date_name)
set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"],
self._columns, 'color')
if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"],
self._columns, 'date_stop'):
if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"],
self._columns, 'date_delay'):
raise except_orm(
_('Invalid Object Architecture!'),
_("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % self._name))
return view
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
""" fields_view_get([view_id | view_type='form'])
Get the detailed composition of the requested view like fields, model, view architecture
:param view_id: id of the view or None
:param view_type: type of the view to return if view_id is None ('form', tree', ...)
:param toolbar: true to include contextual actions
:param submenu: deprecated
:return: dictionary describing the composition of the requested view (including inherited views and extensions)
:raise AttributeError:
* if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
* if some tag other than 'position' is found in parent view
:raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
"""
if context is None:
context = {}
View = self.pool['ir.ui.view']
result = {
'model': self._name,
'field_parent': False,
}
# try to find a view_id if none provided
if not view_id:
# <view_type>_view_ref in context can be used to overrride the default view
view_ref_key = view_type + '_view_ref'
view_ref = context.get(view_ref_key)
if view_ref:
if '.' in view_ref:
module, view_ref = view_ref.split('.', 1)
cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
view_ref_res = cr.fetchone()
if view_ref_res:
view_id = view_ref_res[0]
else:
_logger.warning('%r requires a fully-qualified external id (got: %r for model %s). '
'Please use the complete `module.view_id` form instead.', view_ref_key, view_ref,
self._name)
if not view_id:
# otherwise try to find the lowest priority matching ir.ui.view
view_id = View.default_view(cr, uid, self._name, view_type, context=context)
# context for post-processing might be overriden
ctx = context
if view_id:
# read the view with inherited views applied
root_view = View.read_combined(cr, uid, view_id, fields=['id', 'name', 'field_parent', 'type', 'model', 'arch'], context=context)
result['arch'] = root_view['arch']
result['name'] = root_view['name']
result['type'] = root_view['type']
result['view_id'] = root_view['id']
result['field_parent'] = root_view['field_parent']
# override context fro postprocessing
if root_view.get('model') != self._name:
ctx = dict(context, base_model_name=root_view.get('model'))
else:
# fallback on default views methods if no ir.ui.view could be found
try:
get_func = getattr(self, '_get_default_%s_view' % view_type)
arch_etree = get_func(cr, uid, context)
result['arch'] = etree.tostring(arch_etree, encoding='utf-8')
result['type'] = view_type
result['name'] = 'default'
except AttributeError:
raise except_orm(_('Invalid Architecture!'), _("No default view of type '%s' could be found !") % view_type)
# Apply post processing, groups and modifiers etc...
xarch, xfields = View.postprocess_and_fields(cr, uid, self._name, etree.fromstring(result['arch']), view_id, context=ctx)
result['arch'] = xarch
result['fields'] = xfields
# Add related action information if aksed
if toolbar:
toclean = ('report_sxw_content', 'report_rml_content', 'report_sxw', 'report_rml', 'report_sxw_content_data', 'report_rml_content_data')
def clean(x):
x = x[2]
for key in toclean:
x.pop(key, None)
return x
ir_values_obj = self.pool.get('ir.values')
resprint = ir_values_obj.get(cr, uid, 'action', 'client_print_multi', [(self._name, False)], False, context)
resaction = ir_values_obj.get(cr, uid, 'action', 'client_action_multi', [(self._name, False)], False, context)
resrelate = ir_values_obj.get(cr, uid, 'action', 'client_action_relate', [(self._name, False)], False, context)
resaction = [clean(action) for action in resaction if view_type == 'tree' or not action[2].get('multi')]
resprint = [clean(print_) for print_ in resprint if view_type == 'tree' or not print_[2].get('multi')]
#When multi="True" set it will display only in More of the list view
resrelate = [clean(action) for action in resrelate
if (action[2].get('multi') and view_type == 'tree') or (not action[2].get('multi') and view_type == 'form')]
for x in itertools.chain(resprint, resaction, resrelate):
x['string'] = x['name']
result['toolbar'] = {
'print': resprint,
'action': resaction,
'relate': resrelate
}
return result
def get_formview_id(self, cr, uid, id, context=None):
""" Return an view id to open the document with. This method is meant to be
overridden in addons that want to give specific view ids for example.
:param int id: id of the document to open
"""
return False
def get_formview_action(self, cr, uid, id, context=None):
""" Return an action to open the document. This method is meant to be
overridden in addons that want to give specific view ids for example.
:param int id: id of the document to open
"""
view_id = self.get_formview_id(cr, uid, id, context=context)
return {
'type': 'ir.actions.act_window',
'res_model': self._name,
'view_type': 'form',
'view_mode': 'form',
'views': [(view_id, 'form')],
'target': 'current',
'res_id': id,
}
def get_access_action(self, cr, uid, id, context=None):
""" Return an action to open the document. This method is meant to be
overridden in addons that want to give specific access to the document.
By default it opens the formview of the document.
:paramt int id: id of the document to open
"""
return self.get_formview_action(cr, uid, id, context=context)
def _view_look_dom_arch(self, cr, uid, node, view_id, context=None):
return self.pool['ir.ui.view'].postprocess_and_fields(
cr, uid, self._name, node, view_id, context=context)
def search_count(self, cr, user, args, context=None):
""" search_count(args) -> int
Returns the number of records in the current model matching :ref:`the
provided domain <reference/orm/domains>`.
"""
res = self.search(cr, user, args, context=context, count=True)
if isinstance(res, list):
return len(res)
return res
@api.returns('self')
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
""" search(args[, offset=0][, limit=None][, order=None][, count=False])
Searches for records based on the ``args``
:ref:`search domain <reference/orm/domains>`.
:param args: :ref:`A search domain <reference/orm/domains>`. Use an empty
list to match all records.
:param int offset: number of results to ignore (default: none)
:param int limit: maximum number of records to return (default: all)
:param str order: sort string
:param bool count: if ``True``, the call should return the number of
records matching ``args`` rather than the records
themselves.
:returns: at most ``limit`` records matching the search criteria
:raise AccessError: * if user tries to bypass access rules for read on the requested object.
"""
return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
#
# display_name, name_get, name_create, name_search
#
@api.depends(lambda self: (self._rec_name,) if self._rec_name else ())
def _compute_display_name(self):
names = dict(self.name_get())
for record in self:
record.display_name = names.get(record.id, False)
@api.multi
def name_get(self):
""" name_get() -> [(id, name), ...]
Returns a textual representation for the records in ``self``.
By default this is the value of the ``display_name`` field.
:return: list of pairs ``(id, text_repr)`` for each records
:rtype: list(tuple)
"""
result = []
name = self._rec_name
if name in self._fields:
convert = self._fields[name].convert_to_display_name
for record in self:
result.append((record.id, convert(record[name])))
else:
for record in self:
result.append((record.id, "%s,%s" % (record._name, record.id)))
return result
@api.model
def name_create(self, name):
""" name_create(name) -> record
Create a new record by calling :meth:`~.create` with only one value
provided: the display name of the new record.
The new record will be initialized with any default values
applicable to this model, or provided through the context. The usual
behavior of :meth:`~.create` applies.
:param name: display name of the record to create
:rtype: tuple
:return: the :meth:`~.name_get` pair value of the created record
"""
if self._rec_name:
record = self.create({self._rec_name: name})
return record.name_get()[0]
else:
_logger.warning("Cannot execute name_create, no _rec_name defined on %s", self._name)
return False
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
""" name_search(name='', args=None, operator='ilike', limit=100) -> records
Search for records that have a display name matching the given
`name` pattern when compared with the given `operator`, while also
matching the optional search domain (`args`).
This is used for example to provide suggestions based on a partial
value for a relational field. Sometimes be seen as the inverse
function of :meth:`~.name_get`, but it is not guaranteed to be.
This method is equivalent to calling :meth:`~.search` with a search
domain based on ``display_name`` and then :meth:`~.name_get` on the
result of the search.
:param str name: the name pattern to match
:param list args: optional search domain (see :meth:`~.search` for
syntax), specifying further restrictions
:param str operator: domain operator for matching `name`, such as
``'like'`` or ``'='``.
:param int limit: optional max number of records to return
:rtype: list
:return: list of pairs ``(id, text_repr)`` for all matching records.
"""
return self._name_search(name, args, operator, limit=limit)
def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
# private implementation of name_search, allows passing a dedicated user
# for the name_get part to solve some access rights issues
args = list(args or [])
# optimize out the default criterion of ``ilike ''`` that matches everything
if not self._rec_name:
_logger.warning("Cannot execute name_search, no _rec_name defined on %s", self._name)
elif not (name == '' and operator == 'ilike'):
args += [(self._rec_name, operator, name)]
access_rights_uid = name_get_uid or user
ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
res = self.name_get(cr, access_rights_uid, ids, context)
return res
def read_string(self, cr, uid, id, langs, fields=None, context=None):
res = {}
res2 = {}
self.pool.get('ir.translation').check_access_rights(cr, uid, 'read')
if not fields:
fields = self._columns.keys() + self._inherit_fields.keys()
#FIXME: collect all calls to _get_source into one SQL call.
for lang in langs:
res[lang] = {'code': lang}
for f in fields:
if f in self._columns:
res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
if res_trans:
res[lang][f] = res_trans
else:
res[lang][f] = self._columns[f].string
for table in self._inherits:
cols = intersect(self._inherit_fields.keys(), fields)
res2 = self.pool[table].read_string(cr, uid, id, langs, cols, context)
for lang in res2:
if lang in res:
res[lang]['code'] = lang
for f in res2[lang]:
res[lang][f] = res2[lang][f]
return res
def write_string(self, cr, uid, id, langs, vals, context=None):
self.pool.get('ir.translation').check_access_rights(cr, uid, 'write')
#FIXME: try to only call the translation in one SQL
for lang in langs:
for field in vals:
if field in self._columns:
src = self._columns[field].string
self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
for table in self._inherits:
cols = intersect(self._inherit_fields.keys(), vals)
if cols:
self.pool[table].write_string(cr, uid, id, langs, vals, context)
return True
def _add_missing_default_values(self, cr, uid, values, context=None):
# avoid overriding inherited values when parent is set
avoid_tables = []
for tables, parent_field in self._inherits.items():
if parent_field in values:
avoid_tables.append(tables)
# compute missing fields
missing_defaults = set()
for field in self._columns.keys():
if not field in values:
missing_defaults.add(field)
for field in self._inherit_fields.keys():
if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
missing_defaults.add(field)
# discard magic fields
missing_defaults -= set(MAGIC_COLUMNS)
if missing_defaults:
# override defaults with the provided values, never allow the other way around
defaults = self.default_get(cr, uid, list(missing_defaults), context)
for dv in defaults:
if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
defaults[dv] = [(6, 0, defaults[dv])]
if (dv in self._columns and self._columns[dv]._type == 'one2many' \
or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
defaults[dv] = [(0, 0, x) for x in defaults[dv]]
defaults.update(values)
values = defaults
return values
def clear_caches(self):
""" Clear the caches
This clears the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi``.
"""
try:
self._ormcache.clear()
self.pool._any_cache_cleared = True
except AttributeError:
pass
def _read_group_fill_results(self, cr, uid, domain, groupby, remaining_groupbys,
aggregated_fields, count_field,
read_group_result, read_group_order=None, context=None):
"""Helper method for filling in empty groups for all possible values of
the field being grouped by"""
# self._group_by_full should map groupable fields to a method that returns
# a list of all aggregated values that we want to display for this field,
# in the form of a m2o-like pair (key,label).
# This is useful to implement kanban views for instance, where all columns
# should be displayed even if they don't contain any record.
# Grab the list of all groups that should be displayed, including all present groups
present_group_ids = [x[groupby][0] for x in read_group_result if x[groupby]]
all_groups,folded = self._group_by_full[groupby](self, cr, uid, present_group_ids, domain,
read_group_order=read_group_order,
access_rights_uid=openerp.SUPERUSER_ID,
context=context)
result_template = dict.fromkeys(aggregated_fields, False)
result_template[groupby + '_count'] = 0
if remaining_groupbys:
result_template['__context'] = {'group_by': remaining_groupbys}
# Merge the left_side (current results as dicts) with the right_side (all
# possible values as m2o pairs). Both lists are supposed to be using the
# same ordering, and can be merged in one pass.
result = []
known_values = {}
def append_left(left_side):
grouped_value = left_side[groupby] and left_side[groupby][0]
if not grouped_value in known_values:
result.append(left_side)
known_values[grouped_value] = left_side
else:
known_values[grouped_value].update({count_field: left_side[count_field]})
def append_right(right_side):
grouped_value = right_side[0]
if not grouped_value in known_values:
line = dict(result_template)
line[groupby] = right_side
line['__domain'] = [(groupby,'=',grouped_value)] + domain
result.append(line)
known_values[grouped_value] = line
while read_group_result or all_groups:
left_side = read_group_result[0] if read_group_result else None
right_side = all_groups[0] if all_groups else None
assert left_side is None or left_side[groupby] is False \
or isinstance(left_side[groupby], (tuple,list)), \
'M2O-like pair expected, got %r' % left_side[groupby]
assert right_side is None or isinstance(right_side, (tuple,list)), \
'M2O-like pair expected, got %r' % right_side
if left_side is None:
append_right(all_groups.pop(0))
elif right_side is None:
append_left(read_group_result.pop(0))
elif left_side[groupby] == right_side:
append_left(read_group_result.pop(0))
all_groups.pop(0) # discard right_side
elif not left_side[groupby] or not left_side[groupby][0]:
# left side == "Undefined" entry, not present on right_side
append_left(read_group_result.pop(0))
else:
append_right(all_groups.pop(0))
if folded:
for r in result:
r['__fold'] = folded.get(r[groupby] and r[groupby][0], False)
return result
def _read_group_prepare(self, orderby, aggregated_fields, annotated_groupbys, query):
"""
Prepares the GROUP BY and ORDER BY terms for the read_group method. Adds the missing JOIN clause
to the query if order should be computed against m2o field.
:param orderby: the orderby definition in the form "%(field)s %(order)s"
:param aggregated_fields: list of aggregated fields in the query
:param annotated_groupbys: list of dictionaries returned by _read_group_process_groupby
These dictionaries contains the qualified name of each groupby
(fully qualified SQL name for the corresponding field),
and the (non raw) field name.
:param osv.Query query: the query under construction
:return: (groupby_terms, orderby_terms)
"""
orderby_terms = []
groupby_terms = [gb['qualified_field'] for gb in annotated_groupbys]
groupby_fields = [gb['groupby'] for gb in annotated_groupbys]
if not orderby:
return groupby_terms, orderby_terms
self._check_qorder(orderby)
for order_part in orderby.split(','):
order_split = order_part.split()
order_field = order_split[0]
if order_field in groupby_fields:
if self._all_columns[order_field.split(':')[0]].column._type == 'many2one':
order_clause = self._generate_order_by(order_part, query).replace('ORDER BY ', '')
if order_clause:
orderby_terms.append(order_clause)
groupby_terms += [order_term.split()[0] for order_term in order_clause.split(',')]
else:
order = '"%s" %s' % (order_field, '' if len(order_split) == 1 else order_split[1])
orderby_terms.append(order)
elif order_field in aggregated_fields:
orderby_terms.append(order_part)
else:
# Cannot order by a field that will not appear in the results (needs to be grouped or aggregated)
_logger.warn('%s: read_group order by `%s` ignored, cannot sort on empty columns (not grouped/aggregated)',
self._name, order_part)
return groupby_terms, orderby_terms
def _read_group_process_groupby(self, gb, query, context):
"""
Helper method to collect important information about groupbys: raw
field name, type, time informations, qualified name, ...
"""
split = gb.split(':')
field_type = self._all_columns[split[0]].column._type
gb_function = split[1] if len(split) == 2 else None
temporal = field_type in ('date', 'datetime')
tz_convert = field_type == 'datetime' and context.get('tz') in pytz.all_timezones
qualified_field = self._inherits_join_calc(split[0], query)
if temporal:
display_formats = {
'day': 'dd MMM YYYY',
'week': "'W'w YYYY",
'month': 'MMMM YYYY',
'quarter': 'QQQ YYYY',
'year': 'YYYY'
}
time_intervals = {
'day': dateutil.relativedelta.relativedelta(days=1),
'week': datetime.timedelta(days=7),
'month': dateutil.relativedelta.relativedelta(months=1),
'quarter': dateutil.relativedelta.relativedelta(months=3),
'year': dateutil.relativedelta.relativedelta(years=1)
}
if tz_convert:
qualified_field = "timezone('%s', timezone('UTC',%s))" % (context.get('tz', 'UTC'), qualified_field)
qualified_field = "date_trunc('%s', %s)" % (gb_function or 'month', qualified_field)
if field_type == 'boolean':
qualified_field = "coalesce(%s,false)" % qualified_field
return {
'field': split[0],
'groupby': gb,
'type': field_type,
'display_format': display_formats[gb_function or 'month'] if temporal else None,
'interval': time_intervals[gb_function or 'month'] if temporal else None,
'tz_convert': tz_convert,
'qualified_field': qualified_field
}
def _read_group_prepare_data(self, key, value, groupby_dict, context):
"""
Helper method to sanitize the data received by read_group. The None
values are converted to False, and the date/datetime are formatted,
and corrected according to the timezones.
"""
value = False if value is None else value
gb = groupby_dict.get(key)
if gb and gb['type'] in ('date', 'datetime') and value:
if isinstance(value, basestring):
dt_format = DEFAULT_SERVER_DATETIME_FORMAT if gb['type'] == 'datetime' else DEFAULT_SERVER_DATE_FORMAT
value = datetime.datetime.strptime(value, dt_format)
if gb['tz_convert']:
value = pytz.timezone(context['tz']).localize(value)
return value
def _read_group_get_domain(self, groupby, value):
"""
Helper method to construct the domain corresponding to a groupby and
a given value. This is mostly relevant for date/datetime.
"""
if groupby['type'] in ('date', 'datetime') and value:
dt_format = DEFAULT_SERVER_DATETIME_FORMAT if groupby['type'] == 'datetime' else DEFAULT_SERVER_DATE_FORMAT
domain_dt_begin = value
domain_dt_end = value + groupby['interval']
if groupby['tz_convert']:
domain_dt_begin = domain_dt_begin.astimezone(pytz.utc)
domain_dt_end = domain_dt_end.astimezone(pytz.utc)
return [(groupby['field'], '>=', domain_dt_begin.strftime(dt_format)),
(groupby['field'], '<', domain_dt_end.strftime(dt_format))]
if groupby['type'] == 'many2one' and value:
value = value[0]
return [(groupby['field'], '=', value)]
def _read_group_format_result(self, data, annotated_groupbys, groupby, groupby_dict, domain, context):
"""
Helper method to format the data contained in the dictianary data by
adding the domain corresponding to its values, the groupbys in the
context and by properly formatting the date/datetime values.
"""
domain_group = [dom for gb in annotated_groupbys for dom in self._read_group_get_domain(gb, data[gb['groupby']])]
for k,v in data.iteritems():
gb = groupby_dict.get(k)
if gb and gb['type'] in ('date', 'datetime') and v:
data[k] = babel.dates.format_date(v, format=gb['display_format'], locale=context.get('lang', 'en_US'))
data['__domain'] = domain_group + domain
if len(groupby) - len(annotated_groupbys) >= 1:
data['__context'] = { 'group_by': groupby[len(annotated_groupbys):]}
del data['id']
return data
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True):
"""
Get the list of records in list view grouped by the given ``groupby`` fields
:param cr: database cursor
:param uid: current user id
:param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
:param list fields: list of fields present in the list view specified on the object
:param list groupby: list of groupby descriptions by which the records will be grouped.
A groupby description is either a field (then it will be grouped by that field)
or a string 'field:groupby_function'. Right now, the only functions supported
are 'day', 'week', 'month', 'quarter' or 'year', and they only make sense for
date/datetime fields.
:param int offset: optional number of records to skip
:param int limit: optional max number of records to return
:param dict context: context arguments, like lang, time zone.
:param list orderby: optional ``order by`` specification, for
overriding the natural sort ordering of the
groups, see also :py:meth:`~osv.osv.osv.search`
(supported only for many2one fields currently)
:param bool lazy: if true, the results are only grouped by the first groupby and the
remaining groupbys are put in the __context key. If false, all the groupbys are
done in one call.
:return: list of dictionaries(one dictionary for each record) containing:
* the values of fields grouped by the fields in ``groupby`` argument
* __domain: list of tuples specifying the search criteria
* __context: dictionary with argument like ``groupby``
:rtype: [{'field_name_1': value, ...]
:raise AccessError: * if user has no read rights on the requested object
* if user tries to bypass access rules for read on the requested object
"""
if context is None:
context = {}
self.check_access_rights(cr, uid, 'read')
query = self._where_calc(cr, uid, domain, context=context)
fields = fields or self._columns.keys()
groupby = [groupby] if isinstance(groupby, basestring) else groupby
groupby_list = groupby[:1] if lazy else groupby
annotated_groupbys = [self._read_group_process_groupby(gb, query, context)
for gb in groupby_list]
groupby_fields = [g['field'] for g in annotated_groupbys]
order = orderby or ','.join([g for g in groupby_list])
groupby_dict = {gb['groupby']: gb for gb in annotated_groupbys}
self._apply_ir_rules(cr, uid, query, 'read', context=context)
for gb in groupby_fields:
assert gb in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
groupby_def = self._columns.get(gb) or (self._inherit_fields.get(gb) and self._inherit_fields.get(gb)[2])
assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
if not (gb in self._all_columns):
# Don't allow arbitrary values, as this would be a SQL injection vector!
raise except_orm(_('Invalid group_by'),
_('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(gb,))
aggregated_fields = [
f for f in fields
if f not in ('id', 'sequence')
if f not in groupby_fields
if f in self._all_columns
if self._all_columns[f].column._type in ('integer', 'float')
if getattr(self._all_columns[f].column, '_classic_write')]
field_formatter = lambda f: (self._all_columns[f].column.group_operator or 'sum', self._inherits_join_calc(f, query), f)
select_terms = ["%s(%s) AS %s" % field_formatter(f) for f in aggregated_fields]
for gb in annotated_groupbys:
select_terms.append('%s as "%s" ' % (gb['qualified_field'], gb['groupby']))
groupby_terms, orderby_terms = self._read_group_prepare(order, aggregated_fields, annotated_groupbys, query)
from_clause, where_clause, where_clause_params = query.get_sql()
if lazy and (len(groupby_fields) >= 2 or not context.get('group_by_no_leaf')):
count_field = groupby_fields[0] if len(groupby_fields) >= 1 else '_'
else:
count_field = '_'
count_field += '_count'
prefix_terms = lambda prefix, terms: (prefix + " " + ",".join(terms)) if terms else ''
prefix_term = lambda prefix, term: ('%s %s' % (prefix, term)) if term else ''
query = """
SELECT min(%(table)s.id) AS id, count(%(table)s.id) AS %(count_field)s %(extra_fields)s
FROM %(from)s
%(where)s
%(groupby)s
%(orderby)s
%(limit)s
%(offset)s
""" % {
'table': self._table,
'count_field': count_field,
'extra_fields': prefix_terms(',', select_terms),
'from': from_clause,
'where': prefix_term('WHERE', where_clause),
'groupby': prefix_terms('GROUP BY', groupby_terms),
'orderby': prefix_terms('ORDER BY', orderby_terms),
'limit': prefix_term('LIMIT', int(limit) if limit else None),
'offset': prefix_term('OFFSET', int(offset) if limit else None),
}
cr.execute(query, where_clause_params)
fetched_data = cr.dictfetchall()
if not groupby_fields:
return fetched_data
many2onefields = [gb['field'] for gb in annotated_groupbys if gb['type'] == 'many2one']
if many2onefields:
data_ids = [r['id'] for r in fetched_data]
many2onefields = list(set(many2onefields))
data_dict = {d['id']: d for d in self.read(cr, uid, data_ids, many2onefields, context=context)}
for d in fetched_data:
d.update(data_dict[d['id']])
data = map(lambda r: {k: self._read_group_prepare_data(k,v, groupby_dict, context) for k,v in r.iteritems()}, fetched_data)
result = [self._read_group_format_result(d, annotated_groupbys, groupby, groupby_dict, domain, context) for d in data]
if lazy and groupby_fields[0] in self._group_by_full:
# Right now, read_group only fill results in lazy mode (by default).
# If you need to have the empty groups in 'eager' mode, then the
# method _read_group_fill_results need to be completely reimplemented
# in a sane way
result = self._read_group_fill_results(cr, uid, domain, groupby_fields[0], groupby[len(annotated_groupbys):],
aggregated_fields, count_field, result, read_group_order=order,
context=context)
return result
def _inherits_join_add(self, current_model, parent_model_name, query):
"""
Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
:param current_model: current model object
:param parent_model_name: name of the parent model for which the clauses should be added
:param query: query object on which the JOIN should be added
"""
inherits_field = current_model._inherits[parent_model_name]
parent_model = self.pool[parent_model_name]
parent_alias, parent_alias_statement = query.add_join((current_model._table, parent_model._table, inherits_field, 'id', inherits_field), implicit=True)
return parent_alias
def _inherits_join_calc(self, field, query):
"""
Adds missing table select and join clause(s) to ``query`` for reaching
the field coming from an '_inherits' parent table (no duplicates).
:param field: name of inherited field to reach
:param query: query object on which the JOIN should be added
:return: qualified name of field, to be used in SELECT clause
"""
current_table = self
parent_alias = '"%s"' % current_table._table
while field in current_table._inherit_fields and not field in current_table._columns:
parent_model_name = current_table._inherit_fields[field][0]
parent_table = self.pool[parent_model_name]
parent_alias = self._inherits_join_add(current_table, parent_model_name, query)
current_table = parent_table
return '%s."%s"' % (parent_alias, field)
def _parent_store_compute(self, cr):
if not self._parent_store:
return
_logger.info('Computing parent left and right for table %s...', self._table)
def browse_rec(root, pos=0):
# TODO: set order
where = self._parent_name+'='+str(root)
if not root:
where = self._parent_name+' IS NULL'
if self._parent_order:
where += ' order by '+self._parent_order
cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
pos2 = pos + 1
for id in cr.fetchall():
pos2 = browse_rec(id[0], pos2)
cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
return pos2 + 1
query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
if self._parent_order:
query += ' order by ' + self._parent_order
pos = 0
cr.execute(query)
for (root,) in cr.fetchall():
pos = browse_rec(root, pos)
self.invalidate_cache(cr, SUPERUSER_ID, ['parent_left', 'parent_right'])
return True
def _update_store(self, cr, f, k):
_logger.info("storing computed values of fields.function '%s'", k)
ss = self._columns[k]._symbol_set
update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
cr.execute('select id from '+self._table)
ids_lst = map(lambda x: x[0], cr.fetchall())
while ids_lst:
iids = ids_lst[:AUTOINIT_RECALCULATE_STORED_FIELDS]
ids_lst = ids_lst[AUTOINIT_RECALCULATE_STORED_FIELDS:]
res = f.get(cr, self, iids, k, SUPERUSER_ID, {})
for key, val in res.items():
if f._multi:
val = val[k]
# if val is a many2one, just write the ID
if type(val) == tuple:
val = val[0]
if val is not False:
cr.execute(update_query, (ss[1](val), key))
def _check_selection_field_value(self, cr, uid, field, value, context=None):
"""Raise except_orm if value is not among the valid values for the selection field"""
if self._columns[field]._type == 'reference':
val_model, val_id_str = value.split(',', 1)
val_id = False
try:
val_id = long(val_id_str)
except ValueError:
pass
if not val_id:
raise except_orm(_('ValidateError'),
_('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
val = val_model
else:
val = value
if isinstance(self._columns[field].selection, (tuple, list)):
if val in dict(self._columns[field].selection):
return
elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
return
raise except_orm(_('ValidateError'),
_('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._name, field))
def _check_removed_columns(self, cr, log=False):
# iterate on the database columns to drop the NOT NULL constraints
# of fields which were required but have been removed (or will be added by another module)
columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
columns += MAGIC_COLUMNS
cr.execute("SELECT a.attname, a.attnotnull"
" FROM pg_class c, pg_attribute a"
" WHERE c.relname=%s"
" AND c.oid=a.attrelid"
" AND a.attisdropped=%s"
" AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
" AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
for column in cr.dictfetchall():
if log:
_logger.debug("column %s is in the table %s but not in the corresponding object %s",
column['attname'], self._table, self._name)
if column['attnotnull']:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
_schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
self._table, column['attname'])
def _save_constraint(self, cr, constraint_name, type):
"""
Record the creation of a constraint for this model, to make it possible
to delete it later when the module is uninstalled. Type can be either
'f' or 'u' depending on the constraint being a foreign key or not.
"""
if not self._module:
# no need to save constraints for custom models as they're not part
# of any module
return
assert type in ('f', 'u')
cr.execute("""
SELECT 1 FROM ir_model_constraint, ir_module_module
WHERE ir_model_constraint.module=ir_module_module.id
AND ir_model_constraint.name=%s
AND ir_module_module.name=%s
""", (constraint_name, self._module))
if not cr.rowcount:
cr.execute("""
INSERT INTO ir_model_constraint
(name, date_init, date_update, module, model, type)
VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
(SELECT id FROM ir_module_module WHERE name=%s),
(SELECT id FROM ir_model WHERE model=%s), %s)""",
(constraint_name, self._module, self._name, type))
def _save_relation_table(self, cr, relation_table):
"""
Record the creation of a many2many for this model, to make it possible
to delete it later when the module is uninstalled.
"""
cr.execute("""
SELECT 1 FROM ir_model_relation, ir_module_module
WHERE ir_model_relation.module=ir_module_module.id
AND ir_model_relation.name=%s
AND ir_module_module.name=%s
""", (relation_table, self._module))
if not cr.rowcount:
cr.execute("""INSERT INTO ir_model_relation (name, date_init, date_update, module, model)
VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
(SELECT id FROM ir_module_module WHERE name=%s),
(SELECT id FROM ir_model WHERE model=%s))""",
(relation_table, self._module, self._name))
self.invalidate_cache(cr, SUPERUSER_ID)
# checked version: for direct m2o starting from `self`
def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete):
assert self.is_transient() or not dest_model.is_transient(), \
'Many2One relationships from non-transient Model to TransientModel are forbidden'
if self.is_transient() and not dest_model.is_transient():
# TransientModel relationships to regular Models are annoying
# usually because they could block deletion due to the FKs.
# So unless stated otherwise we default them to ondelete=cascade.
ondelete = ondelete or 'cascade'
fk_def = (self._table, source_field, dest_model._table, ondelete or 'set null')
self._foreign_keys.add(fk_def)
_schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
# unchecked version: for custom cases, such as m2m relationships
def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
fk_def = (source_table, source_field, dest_model._table, ondelete or 'set null')
self._foreign_keys.add(fk_def)
_schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
def _drop_constraint(self, cr, source_table, constraint_name):
cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name))
def _m2o_fix_foreign_key(self, cr, source_table, source_field, dest_model, ondelete):
# Find FK constraint(s) currently established for the m2o field,
# and see whether they are stale or not
cr.execute("""SELECT confdeltype as ondelete_rule, conname as constraint_name,
cl2.relname as foreign_table
FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
pg_attribute as att1, pg_attribute as att2
WHERE con.conrelid = cl1.oid
AND cl1.relname = %s
AND con.confrelid = cl2.oid
AND array_lower(con.conkey, 1) = 1
AND con.conkey[1] = att1.attnum
AND att1.attrelid = cl1.oid
AND att1.attname = %s
AND array_lower(con.confkey, 1) = 1
AND con.confkey[1] = att2.attnum
AND att2.attrelid = cl2.oid
AND att2.attname = %s
AND con.contype = 'f'""", (source_table, source_field, 'id'))
constraints = cr.dictfetchall()
if constraints:
if len(constraints) == 1:
# Is it the right constraint?
cons, = constraints
if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\
or cons['foreign_table'] != dest_model._table:
# Wrong FK: drop it and recreate
_schema.debug("Table '%s': dropping obsolete FK constraint: '%s'",
source_table, cons['constraint_name'])
self._drop_constraint(cr, source_table, cons['constraint_name'])
else:
# it's all good, nothing to do!
return
else:
# Multiple FKs found for the same field, drop them all, and re-create
for cons in constraints:
_schema.debug("Table '%s': dropping duplicate FK constraints: '%s'",
source_table, cons['constraint_name'])
self._drop_constraint(cr, source_table, cons['constraint_name'])
# (re-)create the FK
self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
def _set_default_value_on_column(self, cr, column_name, context=None):
# ideally should use add_default_value but fails
# due to ir.values not being ready
# get old-style default
default = self._defaults.get(column_name)
if callable(default):
default = default(self, cr, SUPERUSER_ID, context)
# get new_style default if no old-style
if default is None:
record = self.new(cr, SUPERUSER_ID, context=context)
field = self._fields[column_name]
field.determine_default(record)
defaults = dict(record._cache)
if column_name in defaults:
default = field.convert_to_write(defaults[column_name])
column = self._columns[column_name]
ss = column._symbol_set
db_default = ss[1](default)
# Write default if non-NULL, except for booleans for which False means
# the same as NULL - this saves us an expensive query on large tables.
write_default = (db_default is not None if column._type != 'boolean'
else db_default)
if write_default:
_logger.debug("Table '%s': setting default value of new column %s to %r",
self._table, column_name, default)
query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (
self._table, column_name, ss[0], column_name)
cr.execute(query, (db_default,))
# this is a disgrace
cr.commit()
def _auto_init(self, cr, context=None):
"""
Call _field_create and, unless _auto is False:
- create the corresponding table in database for the model,
- possibly add the parent columns in database,
- possibly add the columns 'create_uid', 'create_date', 'write_uid',
'write_date' in database if _log_access is True (the default),
- report on database columns no more existing in _columns,
- remove no more existing not null constraints,
- alter existing database columns to match _columns,
- create database tables to match _columns,
- add database indices to match _columns,
- save in self._foreign_keys a list a foreign keys to create (see
_auto_end).
"""
self._foreign_keys = set()
raise_on_invalid_object_name(self._name)
if context is None:
context = {}
store_compute = False
stored_fields = [] # new-style stored fields with compute
todo_end = []
update_custom_fields = context.get('update_custom_fields', False)
self._field_create(cr, context=context)
create = not self._table_exist(cr)
if self._auto:
if create:
self._create_table(cr)
has_rows = False
else:
cr.execute('SELECT 1 FROM "%s" LIMIT 1' % self._table)
has_rows = cr.rowcount
cr.commit()
if self._parent_store:
if not self._parent_columns_exist(cr):
self._create_parent_columns(cr)
store_compute = True
self._check_removed_columns(cr, log=False)
# iterate on the "object columns"
column_data = self._select_column_data(cr)
for k, f in self._columns.iteritems():
if k == 'id': # FIXME: maybe id should be a regular column?
continue
# Don't update custom (also called manual) fields
if f.manual and not update_custom_fields:
continue
if isinstance(f, fields.one2many):
self._o2m_raise_on_missing_reference(cr, f)
elif isinstance(f, fields.many2many):
self._m2m_raise_or_create_relation(cr, f)
else:
res = column_data.get(k)
# The field is not found as-is in database, try if it
# exists with an old name.
if not res and hasattr(f, 'oldname'):
res = column_data.get(f.oldname)
if res:
cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
res['attname'] = k
column_data[k] = res
_schema.debug("Table '%s': renamed column '%s' to '%s'",
self._table, f.oldname, k)
# The field already exists in database. Possibly
# change its type, rename it, drop it or change its
# constraints.
if res:
f_pg_type = res['typname']
f_pg_size = res['size']
f_pg_notnull = res['attnotnull']
if isinstance(f, fields.function) and not f.store and\
not getattr(f, 'nodrop', False):
_logger.info('column %s (%s) converted to a function, removed from table %s',
k, f.string, self._table)
cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
cr.commit()
_schema.debug("Table '%s': dropped column '%s' with cascade",
self._table, k)
f_obj_type = None
else:
f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
if f_obj_type:
ok = False
casts = [
('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)),
('varchar', 'text', 'TEXT', ''),
('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
('timestamp', 'date', 'date', '::date'),
('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
]
if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size and (f.size is None or f_pg_size < f.size):
try:
with cr.savepoint():
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" TYPE %s' % (self._table, k, pg_varchar(f.size)))
except psycopg2.NotSupportedError:
# In place alter table cannot be done because a view is depending of this field.
# Do a manual copy. This will drop the view (that will be recreated later)
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size)))
cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size)))
cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
cr.commit()
_schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
self._table, k, f_pg_size or 'unlimited', f.size or 'unlimited')
for c in casts:
if (f_pg_type==c[0]) and (f._type==c[1]):
if f_pg_type != f_obj_type:
ok = True
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO __temp_type_cast' % (self._table, k))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
cr.execute(('UPDATE "%s" SET "%s"= __temp_type_cast'+c[3]) % (self._table, k))
cr.execute('ALTER TABLE "%s" DROP COLUMN __temp_type_cast CASCADE' % (self._table,))
cr.commit()
_schema.debug("Table '%s': column '%s' changed type from %s to %s",
self._table, k, c[0], c[1])
break
if f_pg_type != f_obj_type:
if not ok:
i = 0
while True:
newname = k + '_moved' + str(i)
cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
"WHERE c.relname=%s " \
"AND a.attname=%s " \
"AND c.oid=a.attrelid ", (self._table, newname))
if not cr.fetchone()[0]:
break
i += 1
if f_pg_notnull:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
_schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
self._table, k, f_pg_type, f._type, newname)
# if the field is required and hasn't got a NOT NULL constraint
if f.required and f_pg_notnull == 0:
if has_rows:
self._set_default_value_on_column(cr, k, context=context)
# add the NOT NULL constraint
try:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
cr.commit()
_schema.debug("Table '%s': column '%s': added NOT NULL constraint",
self._table, k)
except Exception:
msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
"If you want to have it, you should update the records and execute manually:\n"\
"ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
_schema.warning(msg, self._table, k, self._table, k)
cr.commit()
elif not f.required and f_pg_notnull == 1:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
cr.commit()
_schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
self._table, k)
# Verify index
indexname = '%s_%s_index' % (self._table, k)
cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
res2 = cr.dictfetchall()
if not res2 and f.select:
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
cr.commit()
if f._type == 'text':
# FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
msg = "Table '%s': Adding (b-tree) index for %s column '%s'."\
"This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
" because there is a length limit for indexable btree values!\n"\
"Use a search view instead if you simply want to make the field searchable."
_schema.warning(msg, self._table, f._type, k)
if res2 and not f.select:
cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
cr.commit()
msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
_schema.debug(msg, self._table, k, f._type)
if isinstance(f, fields.many2one) or (isinstance(f, fields.function) and f._type == 'many2one' and f.store):
dest_model = self.pool[f._obj]
if dest_model._auto and dest_model._table != 'ir_actions':
self._m2o_fix_foreign_key(cr, self._table, k, dest_model, f.ondelete)
# The field doesn't exist in database. Create it if necessary.
else:
if not isinstance(f, fields.function) or f.store:
# add the missing field
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
_schema.debug("Table '%s': added column '%s' with definition=%s",
self._table, k, get_pg_type(f)[1])
# initialize it
if has_rows:
self._set_default_value_on_column(cr, k, context=context)
# remember the functions to call for the stored fields
if isinstance(f, fields.function):
order = 10
if f.store is not True: # i.e. if f.store is a dict
order = f.store[f.store.keys()[0]][2]
todo_end.append((order, self._update_store, (f, k)))
# remember new-style stored fields with compute method
if k in self._fields and self._fields[k].depends:
stored_fields.append(self._fields[k])
# and add constraints if needed
if isinstance(f, fields.many2one) or (isinstance(f, fields.function) and f._type == 'many2one' and f.store):
if f._obj not in self.pool:
raise except_orm('Programming Error', 'There is no reference available for %s' % (f._obj,))
dest_model = self.pool[f._obj]
ref = dest_model._table
# ir_actions is inherited so foreign key doesn't work on it
if dest_model._auto and ref != 'ir_actions':
self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
if f.select:
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
if f.required:
try:
cr.commit()
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k))
_schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
self._table, k)
except Exception:
msg = "WARNING: unable to set column %s of table %s not null !\n"\
"Try to re-run: openerp-server --update=module\n"\
"If it doesn't work, update records and execute manually:\n"\
"ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
_logger.warning(msg, k, self._table, self._table, k, exc_info=True)
cr.commit()
else:
cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
create = not bool(cr.fetchone())
cr.commit() # start a new transaction
if self._auto:
self._add_sql_constraints(cr)
if create:
self._execute_sql(cr)
if store_compute:
self._parent_store_compute(cr)
cr.commit()
if stored_fields:
# trigger computation of new-style stored fields with a compute
def func(cr):
_logger.info("Storing computed values of %s fields %s",
self._name, ', '.join(sorted(f.name for f in stored_fields)))
recs = self.browse(cr, SUPERUSER_ID, [], {'active_test': False})
recs = recs.search([])
if recs:
map(recs._recompute_todo, stored_fields)
recs.recompute()
todo_end.append((1000, func, ()))
return todo_end
def _auto_end(self, cr, context=None):
""" Create the foreign keys recorded by _auto_init. """
for t, k, r, d in self._foreign_keys:
cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f')
cr.commit()
del self._foreign_keys
def _table_exist(self, cr):
cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
return cr.rowcount
def _create_table(self, cr):
cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id))' % (self._table,))
cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
_schema.debug("Table '%s': created", self._table)
def _parent_columns_exist(self, cr):
cr.execute("""SELECT c.relname
FROM pg_class c, pg_attribute a
WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
""", (self._table, 'parent_left'))
return cr.rowcount
def _create_parent_columns(self, cr):
cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
if 'parent_left' not in self._columns:
_logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
self._table)
_schema.debug("Table '%s': added column '%s' with definition=%s",
self._table, 'parent_left', 'INTEGER')
elif not self._columns['parent_left'].select:
_logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
self._table)
if 'parent_right' not in self._columns:
_logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
self._table)
_schema.debug("Table '%s': added column '%s' with definition=%s",
self._table, 'parent_right', 'INTEGER')
elif not self._columns['parent_right'].select:
_logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
self._table)
if self._columns[self._parent_name].ondelete not in ('cascade', 'restrict'):
_logger.error("The column %s on object %s must be set as ondelete='cascade' or 'restrict'",
self._parent_name, self._name)
cr.commit()
def _select_column_data(self, cr):
# attlen is the number of bytes necessary to represent the type when
# the type has a fixed size. If the type has a varying size attlen is
# -1 and atttypmod is the size limit + 4, or -1 if there is no limit.
cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN (CASE WHEN a.atttypmod=-1 THEN 0 ELSE a.atttypmod-4 END) ELSE a.attlen END as size " \
"FROM pg_class c,pg_attribute a,pg_type t " \
"WHERE c.relname=%s " \
"AND c.oid=a.attrelid " \
"AND a.atttypid=t.oid", (self._table,))
return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
def _o2m_raise_on_missing_reference(self, cr, f):
# TODO this check should be a method on fields.one2many.
if f._obj in self.pool:
other = self.pool[f._obj]
# TODO the condition could use fields_get_keys().
if f._fields_id not in other._columns.keys():
if f._fields_id not in other._inherit_fields.keys():
raise except_orm('Programming Error', "There is no reference field '%s' found for '%s'" % (f._fields_id, f._obj,))
def _m2m_raise_or_create_relation(self, cr, f):
m2m_tbl, col1, col2 = f._sql_names(self)
# do not create relations for custom fields as they do not belong to a module
# they will be automatically removed when dropping the corresponding ir.model.field
# table name for custom relation all starts with x_, see __init__
if not m2m_tbl.startswith('x_'):
self._save_relation_table(cr, m2m_tbl)
cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,))
if not cr.dictfetchall():
if f._obj not in self.pool:
raise except_orm('Programming Error', 'Many2Many destination model does not exist: `%s`' % (f._obj,))
dest_model = self.pool[f._obj]
ref = dest_model._table
cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s"))' % (m2m_tbl, col1, col2, col1, col2))
# create foreign key references with ondelete=cascade, unless the targets are SQL views
cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,))
if not cr.fetchall():
self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade')
cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,))
if not cr.fetchall():
self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade')
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1))
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2))
cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref))
cr.commit()
_schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref)
def _add_sql_constraints(self, cr):
"""
Modify this model's database table constraints so they match the one in
_sql_constraints.
"""
def unify_cons_text(txt):
return txt.lower().replace(', ',',').replace(' (','(')
for (key, con, _) in self._sql_constraints:
conname = '%s_%s' % (self._table, key)
self._save_constraint(cr, conname, 'u')
cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
existing_constraints = cr.dictfetchall()
sql_actions = {
'drop': {
'execute': False,
'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
self._table, conname, con),
'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
'order': 1,
},
'add': {
'execute': False,
'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
self._table, con),
'order': 2,
},
}
if not existing_constraints:
# constraint does not exists:
sql_actions['add']['execute'] = True
sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]:
# constraint exists but its definition has changed:
sql_actions['drop']['execute'] = True
sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
sql_actions['add']['execute'] = True
sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
# we need to add the constraint:
sql_actions = [item for item in sql_actions.values()]
sql_actions.sort(key=lambda x: x['order'])
for sql_action in [action for action in sql_actions if action['execute']]:
try:
cr.execute(sql_action['query'])
cr.commit()
_schema.debug(sql_action['msg_ok'])
except:
_schema.warning(sql_action['msg_err'])
cr.rollback()
def _execute_sql(self, cr):
""" Execute the SQL code from the _sql attribute (if any)."""
if hasattr(self, "_sql"):
for line in self._sql.split(';'):
line2 = line.replace('\n', '').strip()
if line2:
cr.execute(line2)
cr.commit()
#
# Update objects that uses this one to update their _inherits fields
#
@classmethod
def _inherits_reload_src(cls):
""" Recompute the _inherit_fields mapping on each _inherits'd child model."""
for model in cls.pool.values():
if cls._name in model._inherits:
model._inherits_reload()
@classmethod
def _inherits_reload(cls):
""" Recompute the _inherit_fields mapping.
This will also call itself on each inherits'd child model.
"""
res = {}
for table in cls._inherits:
other = cls.pool[table]
for col in other._columns.keys():
res[col] = (table, cls._inherits[table], other._columns[col], table)
for col in other._inherit_fields.keys():
res[col] = (table, cls._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
cls._inherit_fields = res
cls._all_columns = cls._get_column_infos()
# interface columns with new-style fields
for attr, column in cls._columns.items():
if attr not in cls._fields:
cls._add_field(attr, column.to_field())
# interface inherited fields with new-style fields (note that the
# reverse order is for being consistent with _all_columns above)
for parent_model, parent_field in reversed(cls._inherits.items()):
for attr, field in cls.pool[parent_model]._fields.iteritems():
if attr not in cls._fields:
cls._add_field(attr, field.copy(
inherited=True,
related=(parent_field, attr),
related_sudo=False,
))
cls._inherits_reload_src()
@classmethod
def _get_column_infos(cls):
"""Returns a dict mapping all fields names (direct fields and
inherited field via _inherits) to a ``column_info`` struct
giving detailed columns """
result = {}
# do not inverse for loops, since local fields may hide inherited ones!
for k, (parent, m2o, col, original_parent) in cls._inherit_fields.iteritems():
result[k] = fields.column_info(k, col, parent, m2o, original_parent)
for k, col in cls._columns.iteritems():
result[k] = fields.column_info(k, col)
return result
@classmethod
def _inherits_check(cls):
for table, field_name in cls._inherits.items():
if field_name not in cls._columns:
_logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, cls._name)
cls._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
required=True, ondelete="cascade")
elif not cls._columns[field_name].required or cls._columns[field_name].ondelete.lower() not in ("cascade", "restrict"):
_logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade" or "restrict", forcing it to required + cascade.', field_name, cls._name)
cls._columns[field_name].required = True
cls._columns[field_name].ondelete = "cascade"
# reflect fields with delegate=True in dictionary cls._inherits
for field in cls._fields.itervalues():
if field.type == 'many2one' and not field.related and field.delegate:
if not field.required:
_logger.warning("Field %s with delegate=True must be required.", field)
field.required = True
if field.ondelete.lower() not in ('cascade', 'restrict'):
field.ondelete = 'cascade'
cls._inherits[field.comodel_name] = field.name
@api.model
def _prepare_setup_fields(self):
""" Prepare the setup of fields once the models have been loaded. """
for field in self._fields.itervalues():
field.reset()
@api.model
def _setup_fields(self, partial=False):
""" Setup the fields (dependency triggers, etc). """
for field in self._fields.itervalues():
if partial and field.manual and \
field.relational and \
(field.comodel_name not in self.pool or \
(field.type == 'one2many' and field.inverse_name not in self.pool[field.comodel_name]._fields)):
# do not set up manual fields that refer to unknown models
continue
field.setup(self.env)
# group fields by compute to determine field.computed_fields
fields_by_compute = defaultdict(list)
for field in self._fields.itervalues():
if field.compute:
field.computed_fields = fields_by_compute[field.compute]
field.computed_fields.append(field)
else:
field.computed_fields = []
def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
""" fields_get([fields])
Return the definition of each field.
The returned value is a dictionary (indiced by field name) of
dictionaries. The _inherits'd fields are included. The string, help,
and selection (if present) attributes are translated.
:param cr: database cursor
:param user: current user id
:param allfields: list of fields
:param context: context arguments, like lang, time zone
:return: dictionary of field dictionaries, each one describing a field of the business object
:raise AccessError: * if user has no create/write rights on the requested object
"""
recs = self.browse(cr, user, [], context)
res = {}
for fname, field in self._fields.iteritems():
if allfields and fname not in allfields:
continue
if not field.setup_done:
continue
if field.groups and not recs.user_has_groups(field.groups):
continue
res[fname] = field.get_description(recs.env)
# if user cannot create or modify records, make all fields readonly
has_access = functools.partial(recs.check_access_rights, raise_exception=False)
if not (has_access('write') or has_access('create')):
for description in res.itervalues():
description['readonly'] = True
description['states'] = {}
return res
def get_empty_list_help(self, cr, user, help, context=None):
""" Generic method giving the help message displayed when having
no result to display in a list or kanban view. By default it returns
the help given in parameter that is generally the help message
defined in the action.
"""
return help
def check_field_access_rights(self, cr, user, operation, fields, context=None):
"""
Check the user access rights on the given fields. This raises Access
Denied if the user does not have the rights. Otherwise it returns the
fields (as is if the fields is not falsy, or the readable/writable
fields if fields is falsy).
"""
if user == SUPERUSER_ID:
return fields or list(self._fields)
def valid(fname):
""" determine whether user has access to field `fname` """
field = self._fields.get(fname)
if field and field.groups:
return self.user_has_groups(cr, user, groups=field.groups, context=context)
else:
return True
if not fields:
fields = filter(valid, self._fields)
else:
invalid_fields = set(filter(lambda name: not valid(name), fields))
if invalid_fields:
_logger.warning('Access Denied by ACLs for operation: %s, uid: %s, model: %s, fields: %s',
operation, user, self._name, ', '.join(invalid_fields))
raise AccessError(
_('The requested operation cannot be completed due to security restrictions. '
'Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
(self._description, operation))
return fields
# add explicit old-style implementation to read()
@api.v7
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
records = self.browse(cr, user, ids, context)
result = BaseModel.read(records, fields, load=load)
return result if isinstance(ids, list) else (bool(result) and result[0])
# new-style implementation of read()
@api.v8
def read(self, fields=None, load='_classic_read'):
""" read([fields])
Reads the requested fields for the records in `self`, low-level/RPC
method. In Python code, prefer :meth:`~.browse`.
:param fields: list of field names to return (default is all fields)
:return: a list of dictionaries mapping field names to their values,
with one dictionary per record
:raise AccessError: if user has no read rights on some of the given
records
"""
# check access rights
self.check_access_rights('read')
fields = self.check_field_access_rights('read', fields)
# split fields into stored and computed fields
stored, computed = [], []
for name in fields:
if name in self._columns:
stored.append(name)
elif name in self._fields:
computed.append(name)
else:
_logger.warning("%s.read() with unknown field '%s'", self._name, name)
# fetch stored fields from the database to the cache
self._read_from_database(stored)
# retrieve results from records; this takes values from the cache and
# computes remaining fields
result = []
name_fields = [(name, self._fields[name]) for name in (stored + computed)]
use_name_get = (load == '_classic_read')
for record in self:
try:
values = {'id': record.id}
for name, field in name_fields:
values[name] = field.convert_to_read(record[name], use_name_get)
result.append(values)
except MissingError:
pass
return result
@api.multi
def _prefetch_field(self, field):
""" Read from the database in order to fetch `field` (:class:`Field`
instance) for `self` in cache.
"""
# fetch the records of this model without field_name in their cache
records = self._in_cache_without(field)
if len(records) > PREFETCH_MAX:
records = records[:PREFETCH_MAX] | self
# by default, simply fetch field
fnames = {field.name}
if self.env.in_draft:
# we may be doing an onchange, do not prefetch other fields
pass
elif self.env.field_todo(field):
# field must be recomputed, do not prefetch records to recompute
records -= self.env.field_todo(field)
elif not self._context.get('prefetch_fields', True):
# do not prefetch other fields
pass
elif self._columns[field.name]._prefetch:
# here we can optimize: prefetch all classic and many2one fields
fnames = set(fname
for fname, fcolumn in self._columns.iteritems()
if fcolumn._prefetch
if not fcolumn.groups or self.user_has_groups(fcolumn.groups)
)
# fetch records with read()
assert self in records and field.name in fnames
result = []
try:
result = records.read(list(fnames), load='_classic_write')
except AccessError:
pass
# check the cache, and update it if necessary
if not self._cache.contains(field):
for values in result:
record = self.browse(values.pop('id'))
record._cache.update(record._convert_to_cache(values, validate=False))
if not self._cache.contains(field):
e = AccessError("No value found for %s.%s" % (self, field.name))
self._cache[field] = FailedValue(e)
@api.multi
def _read_from_database(self, field_names):
""" Read the given fields of the records in `self` from the database,
and store them in cache. Access errors are also stored in cache.
"""
env = self.env
cr, user, context = env.args
# FIXME: The query construction needs to be rewritten using the internal Query
# object, as in search(), to avoid ambiguous column references when
# reading/sorting on a table that is auto_joined to another table with
# common columns (e.g. the magical columns)
# Construct a clause for the security rules.
# 'tables' holds the list of tables necessary for the SELECT, including
# the ir.rule clauses, and contains at least self._table.
rule_clause, rule_params, tables = env['ir.rule'].domain_get(self._name, 'read')
# determine the fields that are stored as columns in self._table
fields_pre = [f for f in field_names if self._columns[f]._classic_write]
# we need fully-qualified column names in case len(tables) > 1
def qualify(f):
if isinstance(self._columns.get(f), fields.binary) and \
context.get('bin_size_%s' % f, context.get('bin_size')):
# PG 9.2 introduces conflicting pg_size_pretty(numeric) -> need ::cast
return 'pg_size_pretty(length(%s."%s")::bigint) as "%s"' % (self._table, f, f)
else:
return '%s."%s"' % (self._table, f)
qual_names = map(qualify, set(fields_pre + ['id']))
query = """ SELECT %(qual_names)s FROM %(tables)s
WHERE %(table)s.id IN %%s AND (%(extra)s)
ORDER BY %(order)s
""" % {
'qual_names': ",".join(qual_names),
'tables': ",".join(tables),
'table': self._table,
'extra': " OR ".join(rule_clause) if rule_clause else "TRUE",
'order': self._parent_order or self._order,
}
result = []
for sub_ids in cr.split_for_in_conditions(self.ids):
cr.execute(query, [tuple(sub_ids)] + rule_params)
result.extend(cr.dictfetchall())
ids = [vals['id'] for vals in result]
if ids:
# translate the fields if necessary
if context.get('lang'):
ir_translation = env['ir.translation']
for f in fields_pre:
if self._columns[f].translate:
#TODO: optimize out of this loop
res_trans = ir_translation._get_ids(
'%s,%s' % (self._name, f), 'model', context['lang'], ids)
for vals in result:
vals[f] = res_trans.get(vals['id'], False) or vals[f]
# apply the symbol_get functions of the fields we just read
for f in fields_pre:
symbol_get = self._columns[f]._symbol_get
if symbol_get:
for vals in result:
vals[f] = symbol_get(vals[f])
# store result in cache for POST fields
for vals in result:
record = self.browse(vals['id'])
record._cache.update(record._convert_to_cache(vals, validate=False))
# determine the fields that must be processed now
fields_post = [f for f in field_names if not self._columns[f]._classic_write]
# Compute POST fields, grouped by multi
by_multi = defaultdict(list)
for f in fields_post:
by_multi[self._columns[f]._multi].append(f)
for multi, fs in by_multi.iteritems():
if multi:
res2 = self._columns[fs[0]].get(cr, self._model, ids, fs, user, context=context, values=result)
assert res2 is not None, \
'The function field "%s" on the "%s" model returned None\n' \
'(a dictionary was expected).' % (fs[0], self._name)
for vals in result:
# TOCHECK : why got string instend of dict in python2.6
# if isinstance(res2[vals['id']], str): res2[vals['id']] = eval(res2[vals['id']])
multi_fields = res2.get(vals['id'], {})
if multi_fields:
for f in fs:
vals[f] = multi_fields.get(f, [])
else:
for f in fs:
res2 = self._columns[f].get(cr, self._model, ids, f, user, context=context, values=result)
for vals in result:
if res2:
vals[f] = res2[vals['id']]
else:
vals[f] = []
# Warn about deprecated fields now that fields_pre and fields_post are computed
for f in field_names:
column = self._columns[f]
if column.deprecated:
_logger.warning('Field %s.%s is deprecated: %s', self._name, f, column.deprecated)
# store result in cache
for vals in result:
record = self.browse(vals.pop('id'))
record._cache.update(record._convert_to_cache(vals, validate=False))
# store failed values in cache for the records that could not be read
fetched = self.browse(ids)
missing = self - fetched
if missing:
extras = fetched - self
if extras:
raise AccessError(
_("Database fetch misses ids ({}) and has extra ids ({}), may be caused by a type incoherence in a previous request").format(
', '.join(map(repr, missing._ids)),
', '.join(map(repr, extras._ids)),
))
# store an access error exception in existing records
exc = AccessError(
_('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
(self._name, 'read')
)
forbidden = missing.exists()
forbidden._cache.update(FailedValue(exc))
# store a missing error exception in non-existing records
exc = MissingError(
_('One of the documents you are trying to access has been deleted, please try again after refreshing.')
)
(missing - forbidden)._cache.update(FailedValue(exc))
@api.multi
def get_metadata(self):
"""
Returns some metadata about the given records.
:return: list of ownership dictionaries for each requested record
:rtype: list of dictionaries with the following keys:
* id: object id
* create_uid: user who created the record
* create_date: date when the record was created
* write_uid: last user who changed the record
* write_date: date of the last change to the record
* xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
"""
fields = ['id']
if self._log_access:
fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
quoted_table = '"%s"' % self._table
fields_str = ",".join('%s.%s' % (quoted_table, field) for field in fields)
query = '''SELECT %s, __imd.module, __imd.name
FROM %s LEFT JOIN ir_model_data __imd
ON (__imd.model = %%s and __imd.res_id = %s.id)
WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
self._cr.execute(query, (self._name, tuple(self.ids)))
res = self._cr.dictfetchall()
uids = set(r[k] for r in res for k in ['write_uid', 'create_uid'] if r.get(k))
names = dict(self.env['res.users'].browse(uids).name_get())
for r in res:
for key in r:
value = r[key] = r[key] or False
if key in ('write_uid', 'create_uid') and value in names:
r[key] = (value, names[value])
r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
del r['name'], r['module']
return res
def _check_concurrency(self, cr, ids, context):
if not context:
return
if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
return
check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp)"
for sub_ids in cr.split_for_in_conditions(ids):
ids_to_check = []
for id in sub_ids:
id_ref = "%s,%s" % (self._name, id)
update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
if update_date:
ids_to_check.extend([id, update_date])
if not ids_to_check:
continue
cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
res = cr.fetchone()
if res:
# mention the first one only to keep the error message readable
raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
def _check_record_rules_result_count(self, cr, uid, ids, result_ids, operation, context=None):
"""Verify the returned rows after applying record rules matches
the length of `ids`, and raise an appropriate exception if it does not.
"""
if context is None:
context = {}
ids, result_ids = set(ids), set(result_ids)
missing_ids = ids - result_ids
if missing_ids:
# Attempt to distinguish record rule restriction vs deleted records,
# to provide a more specific error message - check if the missinf
cr.execute('SELECT id FROM ' + self._table + ' WHERE id IN %s', (tuple(missing_ids),))
forbidden_ids = [x[0] for x in cr.fetchall()]
if forbidden_ids:
# the missing ids are (at least partially) hidden by access rules
if uid == SUPERUSER_ID:
return
_logger.warning('Access Denied by record rules for operation: %s on record ids: %r, uid: %s, model: %s', operation, forbidden_ids, uid, self._name)
raise except_orm(_('Access Denied'),
_('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
(self._description, operation))
else:
# If we get here, the missing_ids are not in the database
if operation in ('read','unlink'):
# No need to warn about deleting an already deleted record.
# And no error when reading a record that was deleted, to prevent spurious
# errors for non-transactional search/read sequences coming from clients
return
_logger.warning('Failed operation on deleted record(s): %s, uid: %s, model: %s', operation, uid, self._name)
raise except_orm(_('Missing document(s)'),
_('One of the documents you are trying to access has been deleted, please try again after refreshing.'))
def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose.
"""Verifies that the operation given by ``operation`` is allowed for the user
according to the access rights."""
return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception)
def check_access_rule(self, cr, uid, ids, operation, context=None):
"""Verifies that the operation given by ``operation`` is allowed for the user
according to ir.rules.
:param operation: one of ``write``, ``unlink``
:raise except_orm: * if current ir.rules do not permit this operation.
:return: None if the operation is allowed
"""
if uid == SUPERUSER_ID:
return
if self.is_transient():
# Only one single implicit access rule for transient models: owner only!
# This is ok to hardcode because we assert that TransientModels always
# have log_access enabled so that the create_uid column is always there.
# And even with _inherits, these fields are always present in the local
# table too, so no need for JOINs.
cr.execute("""SELECT distinct create_uid
FROM %s
WHERE id IN %%s""" % self._table, (tuple(ids),))
uids = [x[0] for x in cr.fetchall()]
if len(uids) != 1 or uids[0] != uid:
raise except_orm(_('Access Denied'),
_('For this kind of document, you may only access records you created yourself.\n\n(Document type: %s)') % (self._description,))
else:
where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
if where_clause:
where_clause = ' and ' + ' and '.join(where_clause)
for sub_ids in cr.split_for_in_conditions(ids):
cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
' WHERE ' + self._table + '.id IN %s' + where_clause,
[sub_ids] + where_params)
returned_ids = [x['id'] for x in cr.dictfetchall()]
self._check_record_rules_result_count(cr, uid, sub_ids, returned_ids, operation, context=context)
def create_workflow(self, cr, uid, ids, context=None):
"""Create a workflow instance for each given record IDs."""
from openerp import workflow
for res_id in ids:
workflow.trg_create(uid, self._name, res_id, cr)
# self.invalidate_cache(cr, uid, context=context) ?
return True
def delete_workflow(self, cr, uid, ids, context=None):
"""Delete the workflow instances bound to the given record IDs."""
from openerp import workflow
for res_id in ids:
workflow.trg_delete(uid, self._name, res_id, cr)
self.invalidate_cache(cr, uid, context=context)
return True
def step_workflow(self, cr, uid, ids, context=None):
"""Reevaluate the workflow instances of the given record IDs."""
from openerp import workflow
for res_id in ids:
workflow.trg_write(uid, self._name, res_id, cr)
# self.invalidate_cache(cr, uid, context=context) ?
return True
def signal_workflow(self, cr, uid, ids, signal, context=None):
"""Send given workflow signal and return a dict mapping ids to workflow results"""
from openerp import workflow
result = {}
for res_id in ids:
result[res_id] = workflow.trg_validate(uid, self._name, res_id, signal, cr)
# self.invalidate_cache(cr, uid, context=context) ?
return result
def redirect_workflow(self, cr, uid, old_new_ids, context=None):
""" Rebind the workflow instance bound to the given 'old' record IDs to
the given 'new' IDs. (``old_new_ids`` is a list of pairs ``(old, new)``.
"""
from openerp import workflow
for old_id, new_id in old_new_ids:
workflow.trg_redirect(uid, self._name, old_id, new_id, cr)
self.invalidate_cache(cr, uid, context=context)
return True
def unlink(self, cr, uid, ids, context=None):
""" unlink()
Deletes the records of the current set
:raise AccessError: * if user has no unlink rights on the requested object
* if user tries to bypass access rules for unlink on the requested object
:raise UserError: if the record is default property for other records
"""
if not ids:
return True
if isinstance(ids, (int, long)):
ids = [ids]
result_store = self._store_get_values(cr, uid, ids, self._all_columns.keys(), context)
# for recomputing new-style fields
recs = self.browse(cr, uid, ids, context)
recs.modified(self._fields)
self._check_concurrency(cr, ids, context)
self.check_access_rights(cr, uid, 'unlink')
ir_property = self.pool.get('ir.property')
# Check if the records are used as default properties.
domain = [('res_id', '=', False),
('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
]
if ir_property.search(cr, uid, domain, context=context):
raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
# Delete the records' properties.
property_ids = ir_property.search(cr, uid, [('res_id', 'in', ['%s,%s' % (self._name, i) for i in ids])], context=context)
ir_property.unlink(cr, uid, property_ids, context=context)
self.delete_workflow(cr, uid, ids, context=context)
self.check_access_rule(cr, uid, ids, 'unlink', context=context)
pool_model_data = self.pool.get('ir.model.data')
ir_values_obj = self.pool.get('ir.values')
ir_attachment_obj = self.pool.get('ir.attachment')
for sub_ids in cr.split_for_in_conditions(ids):
cr.execute('delete from ' + self._table + ' ' \
'where id IN %s', (sub_ids,))
# Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
# as these are not connected with real database foreign keys, and would be dangling references.
# Note: following steps performed as admin to avoid access rights restrictions, and with no context
# to avoid possible side-effects during admin calls.
# Step 1. Calling unlink of ir_model_data only for the affected IDS
reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
# Step 2. Marching towards the real deletion of referenced records
if reference_ids:
pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids)
# For the same reason, removing the record relevant to ir_values
ir_value_ids = ir_values_obj.search(cr, uid,
['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
context=context)
if ir_value_ids:
ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
# For the same reason, removing the record relevant to ir_attachment
# The search is performed with sql as the search method of ir_attachment is overridden to hide attachments of deleted records
cr.execute('select id from ir_attachment where res_model = %s and res_id in %s', (self._name, sub_ids))
ir_attachment_ids = [ir_attachment[0] for ir_attachment in cr.fetchall()]
if ir_attachment_ids:
ir_attachment_obj.unlink(cr, uid, ir_attachment_ids, context=context)
# invalidate the *whole* cache, since the orm does not handle all
# changes made in the database, like cascading delete!
recs.invalidate_cache()
for order, obj_name, store_ids, fields in result_store:
if obj_name == self._name:
effective_store_ids = set(store_ids) - set(ids)
else:
effective_store_ids = store_ids
if effective_store_ids:
obj = self.pool[obj_name]
cr.execute('select id from '+obj._table+' where id IN %s', (tuple(effective_store_ids),))
rids = map(lambda x: x[0], cr.fetchall())
if rids:
obj._store_set_values(cr, uid, rids, fields, context)
# recompute new-style fields
recs.recompute()
return True
#
# TODO: Validate
#
@api.multi
def write(self, vals):
""" write(vals)
Updates all records in the current set with the provided values.
:param dict vals: fields to update and the value to set on them e.g::
{'foo': 1, 'bar': "Qux"}
will set the field ``foo`` to ``1`` and the field ``bar`` to
``"Qux"`` if those are valid (otherwise it will trigger an error).
:raise AccessError: * if user has no write rights on the requested object
* if user tries to bypass access rules for write on the requested object
:raise ValidateError: if user tries to enter invalid value for a field that is not in selection
:raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
.. _openerp/models/relationals/format:
.. note:: Relational fields use a special "commands" format to manipulate their values
This format is a list of command triplets executed sequentially,
possible command triplets are:
``(0, _, values: dict)``
links to a new record created from the provided values
``(1, id, values: dict)``
updates the already-linked record of id ``id`` with the
provided ``values``
``(2, id, _)``
unlinks and deletes the linked record of id ``id``
``(3, id, _)``
unlinks the linked record of id ``id`` without deleting it
``(4, id, _)``
links to an existing record of id ``id``
``(5, _, _)``
unlinks all records in the relation, equivalent to using
the command ``3`` on every linked record
``(6, _, ids)``
replaces the existing list of linked records by the provoded
ones, equivalent to using ``5`` then ``4`` for each id in
``ids``)
(in command triplets, ``_`` values are ignored and can be
anything, generally ``0`` or ``False``)
Any command can be used on :class:`~openerp.fields.Many2many`,
only ``0``, ``1`` and ``2`` can be used on
:class:`~openerp.fields.One2many`.
"""
if not self:
return True
self._check_concurrency(self._ids)
self.check_access_rights('write')
# No user-driven update of these columns
for field in itertools.chain(MAGIC_COLUMNS, ('parent_left', 'parent_right')):
vals.pop(field, None)
# split up fields into old-style and pure new-style ones
old_vals, new_vals, unknown = {}, {}, []
for key, val in vals.iteritems():
if key in self._columns:
old_vals[key] = val
elif key in self._fields:
new_vals[key] = val
else:
unknown.append(key)
if unknown:
_logger.warning("%s.write() with unknown fields: %s", self._name, ', '.join(sorted(unknown)))
# write old-style fields with (low-level) method _write
if old_vals:
self._write(old_vals)
# put the values of pure new-style fields into cache, and inverse them
if new_vals:
for record in self:
record._cache.update(record._convert_to_cache(new_vals, update=True))
for key in new_vals:
self._fields[key].determine_inverse(self)
return True
def _write(self, cr, user, ids, vals, context=None):
# low-level implementation of write()
if not context:
context = {}
readonly = None
self.check_field_access_rights(cr, user, 'write', vals.keys())
deleted_related = defaultdict(list)
for field in vals.keys():
fobj = None
if field in self._columns:
fobj = self._columns[field]
elif field in self._inherit_fields:
fobj = self._inherit_fields[field][2]
if not fobj:
continue
if fobj._type in ['one2many', 'many2many'] and vals[field]:
for wtuple in vals[field]:
if isinstance(wtuple, (tuple, list)) and wtuple[0] == 2:
deleted_related[fobj._obj].append(wtuple[1])
groups = fobj.write
if groups:
edit = False
for group in groups:
module = group.split(".")[0]
grp = group.split(".")[1]
cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
(grp, module, 'res.groups', user))
readonly = cr.fetchall()
if readonly[0][0] >= 1:
edit = True
break
if not edit:
vals.pop(field)
result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
# for recomputing new-style fields
recs = self.browse(cr, user, ids, context)
modified_fields = list(vals)
if self._log_access:
modified_fields += ['write_date', 'write_uid']
recs.modified(modified_fields)
parents_changed = []
parent_order = self._parent_order or self._order
if self._parent_store and (self._parent_name in vals) and not context.get('defer_parent_store_computation'):
# The parent_left/right computation may take up to
# 5 seconds. No need to recompute the values if the
# parent is the same.
# Note: to respect parent_order, nodes must be processed in
# order, so ``parents_changed`` must be ordered properly.
parent_val = vals[self._parent_name]
if parent_val:
query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
(self._table, self._parent_name, self._parent_name, parent_order)
cr.execute(query, (tuple(ids), parent_val))
else:
query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
(self._table, self._parent_name, parent_order)
cr.execute(query, (tuple(ids),))
parents_changed = map(operator.itemgetter(0), cr.fetchall())
upd0 = []
upd1 = []
upd_todo = []
updend = []
direct = []
totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
for field in vals:
field_column = self._all_columns.get(field) and self._all_columns.get(field).column
if field_column and field_column.deprecated:
_logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated)
if field in self._columns:
if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
if (not totranslate) or not self._columns[field].translate:
upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
upd1.append(self._columns[field]._symbol_set[1](vals[field]))
direct.append(field)
else:
upd_todo.append(field)
else:
updend.append(field)
if field in self._columns \
and hasattr(self._columns[field], 'selection') \
and vals[field]:
self._check_selection_field_value(cr, user, field, vals[field], context=context)
if self._log_access:
upd0.append('write_uid=%s')
upd0.append("write_date=(now() at time zone 'UTC')")
upd1.append(user)
if len(upd0):
self.check_access_rule(cr, user, ids, 'write', context=context)
for sub_ids in cr.split_for_in_conditions(ids):
cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
'where id IN %s', upd1 + [sub_ids])
if cr.rowcount != len(sub_ids):
raise MissingError(_('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
if totranslate:
# TODO: optimize
for f in direct:
if self._columns[f].translate:
src_trans = self.pool[self._name].read(cr, user, ids, [f])[0][f]
if not src_trans:
src_trans = vals[f]
# Inserting value to DB
context_wo_lang = dict(context, lang=None)
self.write(cr, user, ids, {f: vals[f]}, context=context_wo_lang)
self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
# call the 'set' method of fields which are not classic_write
upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
# default element in context must be removed when call a one2many or many2many
rel_context = context.copy()
for c in context.items():
if c[0].startswith('default_'):
del rel_context[c[0]]
for field in upd_todo:
for id in ids:
result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
unknown_fields = updend[:]
for table in self._inherits:
col = self._inherits[table]
nids = []
for sub_ids in cr.split_for_in_conditions(ids):
cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
'where id IN %s', (sub_ids,))
nids.extend([x[0] for x in cr.fetchall()])
v = {}
for val in updend:
if self._inherit_fields[val][0] == table:
v[val] = vals[val]
unknown_fields.remove(val)
if v:
self.pool[table].write(cr, user, nids, v, context)
if unknown_fields:
_logger.warning(
'No such field(s) in model %s: %s.',
self._name, ', '.join(unknown_fields))
# check Python constraints
recs._validate_fields(vals)
# TODO: use _order to set dest at the right position and not first node of parent
# We can't defer parent_store computation because the stored function
# fields that are computer may refer (directly or indirectly) to
# parent_left/right (via a child_of domain)
if parents_changed:
if self.pool._init:
self.pool._init_parent[self._name] = True
else:
order = self._parent_order or self._order
parent_val = vals[self._parent_name]
if parent_val:
clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
else:
clause, params = '%s IS NULL' % (self._parent_name,), ()
for id in parents_changed:
cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
pleft, pright = cr.fetchone()
distance = pright - pleft + 1
# Positions of current siblings, to locate proper insertion point;
# this can _not_ be fetched outside the loop, as it needs to be refreshed
# after each update, in case several nodes are sequentially inserted one
# next to the other (i.e computed incrementally)
cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
parents = cr.fetchall()
# Find Position of the element
position = None
for (parent_pright, parent_id) in parents:
if parent_id == id:
break
position = parent_pright and parent_pright + 1 or 1
# It's the first node of the parent
if not position:
if not parent_val:
position = 1
else:
cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
position = cr.fetchone()[0] + 1
if pleft < position <= pright:
raise except_orm(_('UserError'), _('Recursivity Detected.'))
if pleft < position:
cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
else:
cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
recs.invalidate_cache(['parent_left', 'parent_right'])
result += self._store_get_values(cr, user, ids, vals.keys(), context)
result.sort()
# for recomputing new-style fields
recs.modified(modified_fields)
done = {}
for order, model_name, ids_to_update, fields_to_recompute in result:
key = (model_name, tuple(fields_to_recompute))
done.setdefault(key, {})
# avoid to do several times the same computation
todo = []
for id in ids_to_update:
if id not in done[key]:
done[key][id] = True
if id not in deleted_related[model_name]:
todo.append(id)
self.pool[model_name]._store_set_values(cr, user, todo, fields_to_recompute, context)
# recompute new-style fields
if context.get('recompute', True):
recs.recompute()
self.step_workflow(cr, user, ids, context=context)
return True
#
# TODO: Should set perm to user.xxx
#
@api.model
@api.returns('self', lambda value: value.id)
def create(self, vals):
""" create(vals) -> record
Creates a new record for the model.
The new record is initialized using the values from ``vals`` and
if necessary those from :meth:`~.default_get`.
:param dict vals:
values for the model's fields, as a dictionary::
{'field_name': field_value, ...}
see :meth:`~.write` for details
:return: new record created
:raise AccessError: * if user has no create rights on the requested object
* if user tries to bypass access rules for create on the requested object
:raise ValidateError: if user tries to enter invalid value for a field that is not in selection
:raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
"""
self.check_access_rights('create')
# add missing defaults, and drop fields that may not be set by user
vals = self._add_missing_default_values(vals)
for field in itertools.chain(MAGIC_COLUMNS, ('parent_left', 'parent_right')):
vals.pop(field, None)
# split up fields into old-style and pure new-style ones
old_vals, new_vals, unknown = {}, {}, []
for key, val in vals.iteritems():
if key in self._all_columns:
old_vals[key] = val
elif key in self._fields:
new_vals[key] = val
else:
unknown.append(key)
if unknown:
_logger.warning("%s.create() with unknown fields: %s", self._name, ', '.join(sorted(unknown)))
# create record with old-style fields
record = self.browse(self._create(old_vals))
# put the values of pure new-style fields into cache, and inverse them
record._cache.update(record._convert_to_cache(new_vals))
for key in new_vals:
self._fields[key].determine_inverse(record)
return record
def _create(self, cr, user, vals, context=None):
# low-level implementation of create()
if not context:
context = {}
if self.is_transient():
self._transient_vacuum(cr, user)
tocreate = {}
for v in self._inherits:
if self._inherits[v] not in vals:
tocreate[v] = {}
else:
tocreate[v] = {'id': vals[self._inherits[v]]}
updates = [
# list of column assignments defined as tuples like:
# (column_name, format_string, column_value)
# (column_name, sql_formula)
# Those tuples will be used by the string formatting for the INSERT
# statement below.
('id', "nextval('%s')" % self._sequence),
]
upd_todo = []
unknown_fields = []
for v in vals.keys():
if v in self._inherit_fields and v not in self._columns:
(table, col, col_detail, original_parent) = self._inherit_fields[v]
tocreate[table][v] = vals[v]
del vals[v]
else:
if (v not in self._inherit_fields) and (v not in self._columns):
del vals[v]
unknown_fields.append(v)
if unknown_fields:
_logger.warning(
'No such field(s) in model %s: %s.',
self._name, ', '.join(unknown_fields))
for table in tocreate:
if self._inherits[table] in vals:
del vals[self._inherits[table]]
record_id = tocreate[table].pop('id', None)
if record_id is None or not record_id:
record_id = self.pool[table].create(cr, user, tocreate[table], context=context)
else:
self.pool[table].write(cr, user, [record_id], tocreate[table], context=context)
updates.append((self._inherits[table], '%s', record_id))
#Start : Set bool fields to be False if they are not touched(to make search more powerful)
bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
for bool_field in bool_fields:
if bool_field not in vals:
vals[bool_field] = False
#End
for field in vals.keys():
fobj = None
if field in self._columns:
fobj = self._columns[field]
else:
fobj = self._inherit_fields[field][2]
if not fobj:
continue
groups = fobj.write
if groups:
edit = False
for group in groups:
module = group.split(".")[0]
grp = group.split(".")[1]
cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
(grp, module, 'res.groups', user))
readonly = cr.fetchall()
if readonly[0][0] >= 1:
edit = True
break
elif readonly[0][0] == 0:
edit = False
else:
edit = False
if not edit:
vals.pop(field)
for field in vals:
current_field = self._columns[field]
if current_field._classic_write:
updates.append((field, '%s', current_field._symbol_set[1](vals[field])))
#for the function fields that receive a value, we set them directly in the database
#(they may be required), but we also need to trigger the _fct_inv()
if (hasattr(current_field, '_fnct_inv')) and not isinstance(current_field, fields.related):
#TODO: this way to special case the related fields is really creepy but it shouldn't be changed at
#one week of the release candidate. It seems the only good way to handle correctly this is to add an
#attribute to make a field `really readonly´ and thus totally ignored by the create()... otherwise
#if, for example, the related has a default value (for usability) then the fct_inv is called and it
#may raise some access rights error. Changing this is a too big change for now, and is thus postponed
#after the release but, definitively, the behavior shouldn't be different for related and function
#fields.
upd_todo.append(field)
else:
#TODO: this `if´ statement should be removed because there is no good reason to special case the fields
#related. See the above TODO comment for further explanations.
if not isinstance(current_field, fields.related):
upd_todo.append(field)
if field in self._columns \
and hasattr(current_field, 'selection') \
and vals[field]:
self._check_selection_field_value(cr, user, field, vals[field], context=context)
if self._log_access:
updates.append(('create_uid', '%s', user))
updates.append(('write_uid', '%s', user))
updates.append(('create_date', "(now() at time zone 'UTC')"))
updates.append(('write_date', "(now() at time zone 'UTC')"))
# the list of tuples used in this formatting corresponds to
# tuple(field_name, format, value)
# In some case, for example (id, create_date, write_date) we does not
# need to read the third value of the tuple, because the real value is
# encoded in the second value (the format).
cr.execute(
"""INSERT INTO "%s" (%s) VALUES(%s) RETURNING id""" % (
self._table,
', '.join('"%s"' % u[0] for u in updates),
', '.join(u[1] for u in updates)
),
tuple([u[2] for u in updates if len(u) > 2])
)
id_new, = cr.fetchone()
recs = self.browse(cr, user, id_new, context)
upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
if self._parent_store and not context.get('defer_parent_store_computation'):
if self.pool._init:
self.pool._init_parent[self._name] = True
else:
parent = vals.get(self._parent_name, False)
if parent:
cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
pleft_old = None
result_p = cr.fetchall()
for (pleft,) in result_p:
if not pleft:
break
pleft_old = pleft
if not pleft_old:
cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
pleft_old = cr.fetchone()[0]
pleft = pleft_old
else:
cr.execute('select max(parent_right) from '+self._table)
pleft = cr.fetchone()[0] or 0
cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
recs.invalidate_cache(['parent_left', 'parent_right'])
# default element in context must be remove when call a one2many or many2many
rel_context = context.copy()
for c in context.items():
if c[0].startswith('default_'):
del rel_context[c[0]]
result = []
for field in upd_todo:
result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
# check Python constraints
recs._validate_fields(vals)
# invalidate and mark new-style fields to recompute
modified_fields = list(vals)
if self._log_access:
modified_fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
recs.modified(modified_fields)
if context.get('recompute', True):
result += self._store_get_values(cr, user, [id_new],
list(set(vals.keys() + self._inherits.values())),
context)
result.sort()
done = []
for order, model_name, ids, fields2 in result:
if not (model_name, ids, fields2) in done:
self.pool[model_name]._store_set_values(cr, user, ids, fields2, context)
done.append((model_name, ids, fields2))
# recompute new-style fields
recs.recompute()
if self._log_create and context.get('recompute', True):
message = self._description + \
" '" + \
self.name_get(cr, user, [id_new], context=context)[0][1] + \
"' " + _("created.")
self.log(cr, user, id_new, message, True, context=context)
self.check_access_rule(cr, user, [id_new], 'create', context=context)
self.create_workflow(cr, user, [id_new], context=context)
return id_new
def _store_get_values(self, cr, uid, ids, fields, context):
"""Returns an ordered list of fields.function to call due to
an update operation on ``fields`` of records with ``ids``,
obtained by calling the 'store' triggers of these fields,
as setup by their 'store' attribute.
:return: [(priority, model_name, [record_ids,], [function_fields,])]
"""
if fields is None: fields = []
stored_functions = self.pool._store_function.get(self._name, [])
# use indexed names for the details of the stored_functions:
model_name_, func_field_to_compute_, target_ids_func_, trigger_fields_, priority_ = range(5)
# only keep store triggers that should be triggered for the ``fields``
# being written to.
triggers_to_compute = (
f for f in stored_functions
if not f[trigger_fields_] or set(fields).intersection(f[trigger_fields_])
)
to_compute_map = {}
target_id_results = {}
for store_trigger in triggers_to_compute:
target_func_id_ = id(store_trigger[target_ids_func_])
if target_func_id_ not in target_id_results:
# use admin user for accessing objects having rules defined on store fields
target_id_results[target_func_id_] = [i for i in store_trigger[target_ids_func_](self, cr, SUPERUSER_ID, ids, context) if i]
target_ids = target_id_results[target_func_id_]
# the compound key must consider the priority and model name
key = (store_trigger[priority_], store_trigger[model_name_])
for target_id in target_ids:
to_compute_map.setdefault(key, {}).setdefault(target_id,set()).add(tuple(store_trigger))
# Here to_compute_map looks like:
# { (10, 'model_a') : { target_id1: [ (trigger_1_tuple, trigger_2_tuple) ], ... }
# (20, 'model_a') : { target_id2: [ (trigger_3_tuple, trigger_4_tuple) ], ... }
# (99, 'model_a') : { target_id1: [ (trigger_5_tuple, trigger_6_tuple) ], ... }
# }
# Now we need to generate the batch function calls list
# call_map =
# { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
call_map = {}
for ((priority,model), id_map) in to_compute_map.iteritems():
trigger_ids_maps = {}
# function_ids_maps =
# { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
for target_id, triggers in id_map.iteritems():
trigger_ids_maps.setdefault(tuple(triggers), []).append(target_id)
for triggers, target_ids in trigger_ids_maps.iteritems():
call_map.setdefault((priority,model),[]).append((priority, model, target_ids,
[t[func_field_to_compute_] for t in triggers]))
result = []
if call_map:
result = reduce(operator.add, (call_map[k] for k in sorted(call_map)))
return result
def _store_set_values(self, cr, uid, ids, fields, context):
"""Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
respecting ``multi`` attributes), and stores the resulting values in the database directly."""
if not ids:
return True
field_flag = False
field_dict = {}
if self._log_access:
cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
res = cr.fetchall()
for r in res:
if r[1]:
field_dict.setdefault(r[0], [])
res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
for i in self.pool._store_function.get(self._name, []):
if i[5]:
up_write_date = write_date + datetime.timedelta(hours=i[5])
if datetime.datetime.now() < up_write_date:
if i[1] in fields:
field_dict[r[0]].append(i[1])
if not field_flag:
field_flag = True
todo = {}
keys = []
for f in fields:
if self._columns[f]._multi not in keys:
keys.append(self._columns[f]._multi)
todo.setdefault(self._columns[f]._multi, [])
todo[self._columns[f]._multi].append(f)
for key in keys:
val = todo[key]
if key:
# use admin user for accessing objects having rules defined on store fields
result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context)
for id, value in result.items():
if field_flag:
for f in value.keys():
if f in field_dict[id]:
value.pop(f)
upd0 = []
upd1 = []
for v in value:
if v not in val:
continue
if self._columns[v]._type == 'many2one':
try:
value[v] = value[v][0]
except:
pass
upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
upd1.append(self._columns[v]._symbol_set[1](value[v]))
upd1.append(id)
if upd0 and upd1:
cr.execute('update "' + self._table + '" set ' + \
','.join(upd0) + ' where id = %s', upd1)
else:
for f in val:
# use admin user for accessing objects having rules defined on store fields
result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
for r in result.keys():
if field_flag:
if r in field_dict.keys():
if f in field_dict[r]:
result.pop(r)
for id, value in result.items():
if self._columns[f]._type == 'many2one':
try:
value = value[0]
except:
pass
cr.execute('update "' + self._table + '" set ' + \
'"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
# invalidate and mark new-style fields to recompute
self.browse(cr, uid, ids, context).modified(fields)
return True
# TODO: ameliorer avec NULL
def _where_calc(self, cr, user, domain, active_test=True, context=None):
"""Computes the WHERE clause needed to implement an OpenERP domain.
:param domain: the domain to compute
:type domain: list
:param active_test: whether the default filtering of records with ``active``
field set to ``False`` should be applied.
:return: the query expressing the given domain as provided in domain
:rtype: osv.query.Query
"""
if not context:
context = {}
domain = domain[:]
# if the object has a field named 'active', filter out all inactive
# records unless they were explicitely asked for
if 'active' in self._all_columns and (active_test and context.get('active_test', True)):
if domain:
# the item[0] trick below works for domain items and '&'/'|'/'!'
# operators too
if not any(item[0] == 'active' for item in domain):
domain.insert(0, ('active', '=', 1))
else:
domain = [('active', '=', 1)]
if domain:
e = expression.expression(cr, user, domain, self, context)
tables = e.get_tables()
where_clause, where_params = e.to_sql()
where_clause = where_clause and [where_clause] or []
else:
where_clause, where_params, tables = [], [], ['"%s"' % self._table]
return Query(tables, where_clause, where_params)
def _check_qorder(self, word):
if not regex_order.match(word):
raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
return True
def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
"""Add what's missing in ``query`` to implement all appropriate ir.rules
(using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
:param query: the current query object
"""
if uid == SUPERUSER_ID:
return
def apply_rule(added_clause, added_params, added_tables, parent_model=None):
""" :param parent_model: name of the parent model, if the added
clause comes from a parent model
"""
if added_clause:
if parent_model:
# as inherited rules are being applied, we need to add the missing JOIN
# to reach the parent table (if it was not JOINed yet in the query)
parent_alias = self._inherits_join_add(self, parent_model, query)
# inherited rules are applied on the external table -> need to get the alias and replace
parent_table = self.pool[parent_model]._table
added_clause = [clause.replace('"%s"' % parent_table, '"%s"' % parent_alias) for clause in added_clause]
# change references to parent_table to parent_alias, because we now use the alias to refer to the table
new_tables = []
for table in added_tables:
# table is just a table name -> switch to the full alias
if table == '"%s"' % parent_table:
new_tables.append('"%s" as "%s"' % (parent_table, parent_alias))
# table is already a full statement -> replace reference to the table to its alias, is correct with the way aliases are generated
else:
new_tables.append(table.replace('"%s"' % parent_table, '"%s"' % parent_alias))
added_tables = new_tables
query.where_clause += added_clause
query.where_clause_params += added_params
for table in added_tables:
if table not in query.tables:
query.tables.append(table)
return True
return False
# apply main rules on the object
rule_obj = self.pool.get('ir.rule')
rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, self._name, mode, context=context)
apply_rule(rule_where_clause, rule_where_clause_params, rule_tables)
# apply ir.rules from the parents (through _inherits)
for inherited_model in self._inherits:
rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, inherited_model, mode, context=context)
apply_rule(rule_where_clause, rule_where_clause_params, rule_tables,
parent_model=inherited_model)
def _generate_m2o_order_by(self, order_field, query):
"""
Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
either native m2o fields or function/related fields that are stored, including
intermediate JOINs for inheritance if required.
:return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
"""
if order_field not in self._columns and order_field in self._inherit_fields:
# also add missing joins for reaching the table containing the m2o field
qualified_field = self._inherits_join_calc(order_field, query)
order_field_column = self._inherit_fields[order_field][2]
else:
qualified_field = '"%s"."%s"' % (self._table, order_field)
order_field_column = self._columns[order_field]
assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
_logger.debug("Many2one function/related fields must be stored " \
"to be used as ordering fields! Ignoring sorting for %s.%s",
self._name, order_field)
return
# figure out the applicable order_by for the m2o
dest_model = self.pool[order_field_column._obj]
m2o_order = dest_model._order
if not regex_order.match(m2o_order):
# _order is complex, can't use it here, so we default to _rec_name
m2o_order = dest_model._rec_name
else:
# extract the field names, to be able to qualify them and add desc/asc
m2o_order_list = []
for order_part in m2o_order.split(","):
m2o_order_list.append(order_part.strip().split(" ", 1)[0].strip())
m2o_order = m2o_order_list
# Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
# as we don't want to exclude results that have NULL values for the m2o
src_table, src_field = qualified_field.replace('"', '').split('.', 1)
dst_alias, dst_alias_statement = query.add_join((src_table, dest_model._table, src_field, 'id', src_field), implicit=False, outer=True)
qualify = lambda field: '"%s"."%s"' % (dst_alias, field)
return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
def _generate_order_by(self, order_spec, query):
"""
Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
:raise" except_orm in case order_spec is malformed
"""
order_by_clause = ''
order_spec = order_spec or self._order
if order_spec:
order_by_elements = []
self._check_qorder(order_spec)
for order_part in order_spec.split(','):
order_split = order_part.strip().split(' ')
order_field = order_split[0].strip()
order_direction = order_split[1].strip() if len(order_split) == 2 else ''
order_column = None
inner_clause = None
if order_field == 'id':
order_by_elements.append('"%s"."%s" %s' % (self._table, order_field, order_direction))
elif order_field in self._columns:
order_column = self._columns[order_field]
if order_column._classic_read:
inner_clause = '"%s"."%s"' % (self._table, order_field)
elif order_column._type == 'many2one':
inner_clause = self._generate_m2o_order_by(order_field, query)
else:
continue # ignore non-readable or "non-joinable" fields
elif order_field in self._inherit_fields:
parent_obj = self.pool[self._inherit_fields[order_field][3]]
order_column = parent_obj._columns[order_field]
if order_column._classic_read:
inner_clause = self._inherits_join_calc(order_field, query)
elif order_column._type == 'many2one':
inner_clause = self._generate_m2o_order_by(order_field, query)
else:
continue # ignore non-readable or "non-joinable" fields
else:
raise ValueError( _("Sorting field %s not found on model %s") %( order_field, self._name))
if order_column and order_column._type == 'boolean':
inner_clause = "COALESCE(%s, false)" % inner_clause
if inner_clause:
if isinstance(inner_clause, list):
for clause in inner_clause:
order_by_elements.append("%s %s" % (clause, order_direction))
else:
order_by_elements.append("%s %s" % (inner_clause, order_direction))
if order_by_elements:
order_by_clause = ",".join(order_by_elements)
return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
"""
Private implementation of search() method, allowing specifying the uid to use for the access right check.
This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
This is ok at the security level because this method is private and not callable through XML-RPC.
:param access_rights_uid: optional user ID to use when checking access rights
(not for ir.rules, this is only for ir.model.access)
"""
if context is None:
context = {}
self.check_access_rights(cr, access_rights_uid or user, 'read')
# For transient models, restrict acces to the current user, except for the super-user
if self.is_transient() and self._log_access and user != SUPERUSER_ID:
args = expression.AND(([('create_uid', '=', user)], args or []))
query = self._where_calc(cr, user, args, context=context)
self._apply_ir_rules(cr, user, query, 'read', context=context)
order_by = self._generate_order_by(order, query)
from_clause, where_clause, where_clause_params = query.get_sql()
where_str = where_clause and (" WHERE %s" % where_clause) or ''
if count:
# Ignore order, limit and offset when just counting, they don't make sense and could
# hurt performance
query_str = 'SELECT count(1) FROM ' + from_clause + where_str
cr.execute(query_str, where_clause_params)
res = cr.fetchone()
return res[0]
limit_str = limit and ' limit %d' % limit or ''
offset_str = offset and ' offset %d' % offset or ''
query_str = 'SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str
cr.execute(query_str, where_clause_params)
res = cr.fetchall()
# TDE note: with auto_join, we could have several lines about the same result
# i.e. a lead with several unread messages; we uniquify the result using
# a fast way to do it while preserving order (http://www.peterbe.com/plog/uniqifiers-benchmark)
def _uniquify_list(seq):
seen = set()
return [x for x in seq if x not in seen and not seen.add(x)]
return _uniquify_list([x[0] for x in res])
# returns the different values ever entered for one field
# this is used, for example, in the client when the user hits enter on
# a char field
def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
if not args:
args = []
if field in self._inherit_fields:
return self.pool[self._inherit_fields[field][0]].distinct_field_get(cr, uid, field, value, args, offset, limit)
else:
return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
def copy_data(self, cr, uid, id, default=None, context=None):
"""
Copy given record's data with all its fields values
:param cr: database cursor
:param uid: current user id
:param id: id of the record to copy
:param default: field values to override in the original values of the copied record
:type default: dictionary
:param context: context arguments, like lang, time zone
:type context: dictionary
:return: dictionary containing all the field values
"""
if context is None:
context = {}
# avoid recursion through already copied records in case of circular relationship
seen_map = context.setdefault('__copy_data_seen', {})
if id in seen_map.setdefault(self._name, []):
return
seen_map[self._name].append(id)
if default is None:
default = {}
if 'state' not in default:
if 'state' in self._defaults:
if callable(self._defaults['state']):
default['state'] = self._defaults['state'](self, cr, uid, context)
else:
default['state'] = self._defaults['state']
# build a black list of fields that should not be copied
blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right'])
def blacklist_given_fields(obj):
# blacklist the fields that are given by inheritance
for other, field_to_other in obj._inherits.items():
blacklist.add(field_to_other)
if field_to_other in default:
# all the fields of 'other' are given by the record: default[field_to_other],
# except the ones redefined in self
blacklist.update(set(self.pool[other]._all_columns) - set(self._columns))
else:
blacklist_given_fields(self.pool[other])
# blacklist deprecated fields
for name, field in obj._columns.items():
if field.deprecated:
blacklist.add(name)
blacklist_given_fields(self)
fields_to_copy = dict((f,fi) for f, fi in self._all_columns.iteritems()
if fi.column.copy
if f not in default
if f not in blacklist)
data = self.read(cr, uid, [id], fields_to_copy.keys(), context=context)
if data:
data = data[0]
else:
raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
res = dict(default)
for f, colinfo in fields_to_copy.iteritems():
field = colinfo.column
if field._type == 'many2one':
res[f] = data[f] and data[f][0]
elif field._type == 'one2many':
other = self.pool[field._obj]
# duplicate following the order of the ids because we'll rely on
# it later for copying translations in copy_translation()!
lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])]
# the lines are duplicated using the wrong (old) parent, but then
# are reassigned to the correct one thanks to the (0, 0, ...)
res[f] = [(0, 0, line) for line in lines if line]
elif field._type == 'many2many':
res[f] = [(6, 0, data[f])]
else:
res[f] = data[f]
return res
def copy_translations(self, cr, uid, old_id, new_id, context=None):
if context is None:
context = {}
# avoid recursion through already copied records in case of circular relationship
seen_map = context.setdefault('__copy_translations_seen',{})
if old_id in seen_map.setdefault(self._name,[]):
return
seen_map[self._name].append(old_id)
trans_obj = self.pool.get('ir.translation')
# TODO it seems fields_get can be replaced by _all_columns (no need for translation)
fields = self.fields_get(cr, uid, context=context)
for field_name, field_def in fields.items():
# removing the lang to compare untranslated values
context_wo_lang = dict(context, lang=None)
old_record, new_record = self.browse(cr, uid, [old_id, new_id], context=context_wo_lang)
# we must recursively copy the translations for o2o and o2m
if field_def['type'] == 'one2many':
target_obj = self.pool[field_def['relation']]
# here we rely on the order of the ids to match the translations
# as foreseen in copy_data()
old_children = sorted(r.id for r in old_record[field_name])
new_children = sorted(r.id for r in new_record[field_name])
for (old_child, new_child) in zip(old_children, new_children):
target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
# and for translatable fields we keep them for copy
elif field_def.get('translate'):
if field_name in self._columns:
trans_name = self._name + "," + field_name
target_id = new_id
source_id = old_id
elif field_name in self._inherit_fields:
trans_name = self._inherit_fields[field_name][0] + "," + field_name
# get the id of the parent record to set the translation
inherit_field_name = self._inherit_fields[field_name][1]
target_id = new_record[inherit_field_name].id
source_id = old_record[inherit_field_name].id
else:
continue
trans_ids = trans_obj.search(cr, uid, [
('name', '=', trans_name),
('res_id', '=', source_id)
])
user_lang = context.get('lang')
for record in trans_obj.read(cr, uid, trans_ids, context=context):
del record['id']
# remove source to avoid triggering _set_src
del record['source']
record.update({'res_id': target_id})
if user_lang and user_lang == record['lang']:
# 'source' to force the call to _set_src
# 'value' needed if value is changed in copy(), want to see the new_value
record['source'] = old_record[field_name]
record['value'] = new_record[field_name]
trans_obj.create(cr, uid, record, context=context)
@api.returns('self', lambda value: value.id)
def copy(self, cr, uid, id, default=None, context=None):
""" copy(default=None)
Duplicate record with given id updating it with default values
:param dict default: dictionary of field values to override in the
original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
:returns: new record
"""
if context is None:
context = {}
context = context.copy()
data = self.copy_data(cr, uid, id, default, context)
new_id = self.create(cr, uid, data, context)
self.copy_translations(cr, uid, id, new_id, context)
return new_id
@api.multi
@api.returns('self')
def exists(self):
""" exists() -> records
Returns the subset of records in `self` that exist, and marks deleted
records as such in cache. It can be used as a test on records::
if record.exists():
...
By convention, new records are returned as existing.
"""
ids = filter(None, self._ids) # ids to check in database
if not ids:
return self
query = """SELECT id FROM "%s" WHERE id IN %%s""" % self._table
self._cr.execute(query, (ids,))
ids = ([r[0] for r in self._cr.fetchall()] + # ids in database
[id for id in self._ids if not id]) # new ids
existing = self.browse(ids)
if len(existing) < len(self):
# mark missing records in cache with a failed value
exc = MissingError(_("Record does not exist or has been deleted."))
(self - existing)._cache.update(FailedValue(exc))
return existing
def check_recursion(self, cr, uid, ids, context=None, parent=None):
_logger.warning("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
self._name)
assert parent is None or parent in self._columns or parent in self._inherit_fields,\
"The 'parent' parameter passed to check_recursion() must be None or a valid field name"
return self._check_recursion(cr, uid, ids, context, parent)
def _check_recursion(self, cr, uid, ids, context=None, parent=None):
"""
Verifies that there is no loop in a hierarchical structure of records,
by following the parent relationship using the **parent** field until a loop
is detected or until a top-level record is found.
:param cr: database cursor
:param uid: current user id
:param ids: list of ids of records to check
:param parent: optional parent field name (default: ``self._parent_name = parent_id``)
:return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
"""
if not parent:
parent = self._parent_name
# must ignore 'active' flag, ir.rules, etc. => direct SQL query
query = 'SELECT "%s" FROM "%s" WHERE id = %%s' % (parent, self._table)
for id in ids:
current_id = id
while current_id is not None:
cr.execute(query, (current_id,))
result = cr.fetchone()
current_id = result[0] if result else None
if current_id == id:
return False
return True
def _check_m2m_recursion(self, cr, uid, ids, field_name):
"""
Verifies that there is no loop in a hierarchical structure of records,
by following the parent relationship using the **parent** field until a loop
is detected or until a top-level record is found.
:param cr: database cursor
:param uid: current user id
:param ids: list of ids of records to check
:param field_name: field to check
:return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
"""
field = self._all_columns.get(field_name)
field = field.column if field else None
if not field or field._type != 'many2many' or field._obj != self._name:
# field must be a many2many on itself
raise ValueError('invalid field_name: %r' % (field_name,))
query = 'SELECT distinct "%s" FROM "%s" WHERE "%s" IN %%s' % (field._id2, field._rel, field._id1)
ids_parent = ids[:]
while ids_parent:
ids_parent2 = []
for i in range(0, len(ids_parent), cr.IN_MAX):
j = i + cr.IN_MAX
sub_ids_parent = ids_parent[i:j]
cr.execute(query, (tuple(sub_ids_parent),))
ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
ids_parent = ids_parent2
for i in ids_parent:
if i in ids:
return False
return True
def _get_external_ids(self, cr, uid, ids, *args, **kwargs):
"""Retrieve the External ID(s) of any database record.
**Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
:return: map of ids to the list of their fully qualified External IDs
in the form ``module.key``, or an empty list when there's no External
ID for a record, e.g.::
{ 'id': ['module.ext_id', 'module.ext_id_bis'],
'id2': [] }
"""
ir_model_data = self.pool.get('ir.model.data')
data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
result = {}
for id in ids:
# can't use dict.fromkeys() as the list would be shared!
result[id] = []
for record in data_results:
result[record['res_id']].append('%(module)s.%(name)s' % record)
return result
def get_external_id(self, cr, uid, ids, *args, **kwargs):
"""Retrieve the External ID of any database record, if there
is one. This method works as a possible implementation
for a function field, to be able to add it to any
model object easily, referencing it as ``Model.get_external_id``.
When multiple External IDs exist for a record, only one
of them is returned (randomly).
:return: map of ids to their fully qualified XML ID,
defaulting to an empty string when there's none
(to be usable as a function field),
e.g.::
{ 'id': 'module.ext_id',
'id2': '' }
"""
results = self._get_xml_ids(cr, uid, ids)
for k, v in results.iteritems():
if results[k]:
results[k] = v[0]
else:
results[k] = ''
return results
# backwards compatibility
get_xml_id = get_external_id
_get_xml_ids = _get_external_ids
def print_report(self, cr, uid, ids, name, data, context=None):
"""
Render the report `name` for the given IDs. The report must be defined
for this model, not another.
"""
report = self.pool['ir.actions.report.xml']._lookup_report(cr, name)
assert self._name == report.table
return report.create(cr, uid, ids, data, context)
# Transience
@classmethod
def is_transient(cls):
""" Return whether the model is transient.
See :class:`TransientModel`.
"""
return cls._transient
def _transient_clean_rows_older_than(self, cr, seconds):
assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
# Never delete rows used in last 5 minutes
seconds = max(seconds, 300)
query = ("SELECT id FROM " + self._table + " WHERE"
" COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp"
" < ((now() at time zone 'UTC') - interval %s)")
cr.execute(query, ("%s seconds" % seconds,))
ids = [x[0] for x in cr.fetchall()]
self.unlink(cr, SUPERUSER_ID, ids)
def _transient_clean_old_rows(self, cr, max_count):
# Check how many rows we have in the table
cr.execute("SELECT count(*) AS row_count FROM " + self._table)
res = cr.fetchall()
if res[0][0] <= max_count:
return # max not reached, nothing to do
self._transient_clean_rows_older_than(cr, 300)
def _transient_vacuum(self, cr, uid, force=False):
"""Clean the transient records.
This unlinks old records from the transient model tables whenever the
"_transient_max_count" or "_max_age" conditions (if any) are reached.
Actual cleaning will happen only once every "_transient_check_time" calls.
This means this method can be called frequently called (e.g. whenever
a new record is created).
Example with both max_hours and max_count active:
Suppose max_hours = 0.2 (e.g. 12 minutes), max_count = 20, there are 55 rows in the
table, 10 created/changed in the last 5 minutes, an additional 12 created/changed between
5 and 10 minutes ago, the rest created/changed more then 12 minutes ago.
- age based vacuum will leave the 22 rows created/changed in the last 12 minutes
- count based vacuum will wipe out another 12 rows. Not just 2, otherwise each addition
would immediately cause the maximum to be reached again.
- the 10 rows that have been created/changed the last 5 minutes will NOT be deleted
"""
assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
_transient_check_time = 20 # arbitrary limit on vacuum executions
self._transient_check_count += 1
if not force and (self._transient_check_count < _transient_check_time):
return True # no vacuum cleaning this time
self._transient_check_count = 0
# Age-based expiration
if self._transient_max_hours:
self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60)
# Count-based expiration
if self._transient_max_count:
self._transient_clean_old_rows(cr, self._transient_max_count)
return True
def resolve_2many_commands(self, cr, uid, field_name, commands, fields=None, context=None):
""" Serializes one2many and many2many commands into record dictionaries
(as if all the records came from the database via a read()). This
method is aimed at onchange methods on one2many and many2many fields.
Because commands might be creation commands, not all record dicts
will contain an ``id`` field. Commands matching an existing record
will have an ``id``.
:param field_name: name of the one2many or many2many field matching the commands
:type field_name: str
:param commands: one2many or many2many commands to execute on ``field_name``
:type commands: list((int|False, int|False, dict|False))
:param fields: list of fields to read from the database, when applicable
:type fields: list(str)
:returns: records in a shape similar to that returned by ``read()``
(except records may be missing the ``id`` field if they don't exist in db)
:rtype: list(dict)
"""
result = [] # result (list of dict)
record_ids = [] # ids of records to read
updates = {} # {id: dict} of updates on particular records
for command in commands or []:
if not isinstance(command, (list, tuple)):
record_ids.append(command)
elif command[0] == 0:
result.append(command[2])
elif command[0] == 1:
record_ids.append(command[1])
updates.setdefault(command[1], {}).update(command[2])
elif command[0] in (2, 3):
record_ids = [id for id in record_ids if id != command[1]]
elif command[0] == 4:
record_ids.append(command[1])
elif command[0] == 5:
result, record_ids = [], []
elif command[0] == 6:
result, record_ids = [], list(command[2])
# read the records and apply the updates
other_model = self.pool[self._all_columns[field_name].column._obj]
for record in other_model.read(cr, uid, record_ids, fields=fields, context=context):
record.update(updates.get(record['id'], {}))
result.append(record)
return result
# for backward compatibility
resolve_o2m_commands_to_record_dicts = resolve_2many_commands
def search_read(self, cr, uid, domain=None, fields=None, offset=0, limit=None, order=None, context=None):
"""
Performs a ``search()`` followed by a ``read()``.
:param cr: database cursor
:param user: current user id
:param domain: Search domain, see ``args`` parameter in ``search()``. Defaults to an empty domain that will match all records.
:param fields: List of fields to read, see ``fields`` parameter in ``read()``. Defaults to all fields.
:param offset: Number of records to skip, see ``offset`` parameter in ``search()``. Defaults to 0.
:param limit: Maximum number of records to return, see ``limit`` parameter in ``search()``. Defaults to no limit.
:param order: Columns to sort result, see ``order`` parameter in ``search()``. Defaults to no sort.
:param context: context arguments.
:return: List of dictionaries containing the asked fields.
:rtype: List of dictionaries.
"""
record_ids = self.search(cr, uid, domain or [], offset=offset, limit=limit, order=order, context=context)
if not record_ids:
return []
if fields and fields == ['id']:
# shortcut read if we only want the ids
return [{'id': id} for id in record_ids]
# read() ignores active_test, but it would forward it to any downstream search call
# (e.g. for x2m or function fields), and this is not the desired behavior, the flag
# was presumably only meant for the main search().
# TODO: Move this to read() directly?
read_ctx = dict(context or {})
read_ctx.pop('active_test', None)
result = self.read(cr, uid, record_ids, fields, context=read_ctx)
if len(result) <= 1:
return result
# reorder read
index = dict((r['id'], r) for r in result)
return [index[x] for x in record_ids if x in index]
def _register_hook(self, cr):
""" stuff to do right after the registry is built """
pass
@classmethod
def _patch_method(cls, name, method):
""" Monkey-patch a method for all instances of this model. This replaces
the method called `name` by `method` in the given class.
The original method is then accessible via ``method.origin``, and it
can be restored with :meth:`~._revert_method`.
Example::
@api.multi
def do_write(self, values):
# do stuff, and call the original method
return do_write.origin(self, values)
# patch method write of model
model._patch_method('write', do_write)
# this will call do_write
records = model.search([...])
records.write(...)
# restore the original method
model._revert_method('write')
"""
origin = getattr(cls, name)
method.origin = origin
# propagate decorators from origin to method, and apply api decorator
wrapped = api.guess(api.propagate(origin, method))
wrapped.origin = origin
setattr(cls, name, wrapped)
@classmethod
def _revert_method(cls, name):
""" Revert the original method called `name` in the given class.
See :meth:`~._patch_method`.
"""
method = getattr(cls, name)
setattr(cls, name, method.origin)
#
# Instance creation
#
# An instance represents an ordered collection of records in a given
# execution environment. The instance object refers to the environment, and
# the records themselves are represented by their cache dictionary. The 'id'
# of each record is found in its corresponding cache dictionary.
#
# This design has the following advantages:
# - cache access is direct and thus fast;
# - one can consider records without an 'id' (see new records);
# - the global cache is only an index to "resolve" a record 'id'.
#
@classmethod
def _browse(cls, env, ids):
""" Create an instance attached to `env`; `ids` is a tuple of record
ids.
"""
records = object.__new__(cls)
records.env = env
records._ids = ids
env.prefetch[cls._name].update(ids)
return records
@api.v7
def browse(self, cr, uid, arg=None, context=None):
ids = _normalize_ids(arg)
#assert all(isinstance(id, IdType) for id in ids), "Browsing invalid ids: %s" % ids
return self._browse(Environment(cr, uid, context or {}), ids)
@api.v8
def browse(self, arg=None):
""" browse([ids]) -> records
Returns a recordset for the ids provided as parameter in the current
environment.
Can take no ids, a single id or a sequence of ids.
"""
ids = _normalize_ids(arg)
#assert all(isinstance(id, IdType) for id in ids), "Browsing invalid ids: %s" % ids
return self._browse(self.env, ids)
#
# Internal properties, for manipulating the instance's implementation
#
@property
def ids(self):
""" List of actual record ids in this recordset (ignores placeholder
ids for records to create)
"""
return filter(None, list(self._ids))
# backward-compatibility with former browse records
_cr = property(lambda self: self.env.cr)
_uid = property(lambda self: self.env.uid)
_context = property(lambda self: self.env.context)
#
# Conversion methods
#
def ensure_one(self):
""" Verifies that the current recorset holds a single record. Raises
an exception otherwise.
"""
if len(self) == 1:
return self
raise except_orm("ValueError", "Expected singleton: %s" % self)
def with_env(self, env):
""" Returns a new version of this recordset attached to the provided
environment
:type env: :class:`~openerp.api.Environment`
"""
return self._browse(env, self._ids)
def sudo(self, user=SUPERUSER_ID):
""" sudo([user=SUPERUSER])
Returns a new version of this recordset attached to the provided
user.
"""
return self.with_env(self.env(user=user))
def with_context(self, *args, **kwargs):
""" with_context([context][, **overrides]) -> records
Returns a new version of this recordset attached to an extended
context.
The extended context is either the provided ``context`` in which
``overrides`` are merged or the *current* context in which
``overrides`` are merged e.g.::
# current context is {'key1': True}
r2 = records.with_context({}, key2=True)
# -> r2._context is {'key2': True}
r2 = records.with_context(key2=True)
# -> r2._context is {'key1': True, 'key2': True}
"""
context = dict(args[0] if args else self._context, **kwargs)
return self.with_env(self.env(context=context))
def _convert_to_cache(self, values, update=False, validate=True):
""" Convert the `values` dictionary into cached values.
:param update: whether the conversion is made for updating `self`;
this is necessary for interpreting the commands of *2many fields
:param validate: whether values must be checked
"""
fields = self._fields
target = self if update else self.browse()
return {
name: fields[name].convert_to_cache(value, target, validate=validate)
for name, value in values.iteritems()
if name in fields
}
def _convert_to_write(self, values):
""" Convert the `values` dictionary into the format of :meth:`write`. """
fields = self._fields
result = {}
for name, value in values.iteritems():
if name in fields:
value = fields[name].convert_to_write(value)
if not isinstance(value, NewId):
result[name] = value
return result
#
# Record traversal and update
#
def _mapped_func(self, func):
""" Apply function `func` on all records in `self`, and return the
result as a list or a recordset (if `func` return recordsets).
"""
vals = [func(rec) for rec in self]
val0 = vals[0] if vals else func(self)
if isinstance(val0, BaseModel):
return reduce(operator.or_, vals, val0)
return vals
def mapped(self, func):
""" Apply `func` on all records in `self`, and return the result as a
list or a recordset (if `func` return recordsets). In the latter
case, the order of the returned recordset is arbritrary.
:param func: a function or a dot-separated sequence of field names
"""
if isinstance(func, basestring):
recs = self
for name in func.split('.'):
recs = recs._mapped_func(operator.itemgetter(name))
return recs
else:
return self._mapped_func(func)
def _mapped_cache(self, name_seq):
""" Same as `~.mapped`, but `name_seq` is a dot-separated sequence of
field names, and only cached values are used.
"""
recs = self
for name in name_seq.split('.'):
field = recs._fields[name]
null = field.null(self.env)
recs = recs.mapped(lambda rec: rec._cache.get(field, null))
return recs
def filtered(self, func):
""" Select the records in `self` such that `func(rec)` is true, and
return them as a recordset.
:param func: a function or a dot-separated sequence of field names
"""
if isinstance(func, basestring):
name = func
func = lambda rec: filter(None, rec.mapped(name))
return self.browse([rec.id for rec in self if func(rec)])
def sorted(self, key=None):
""" Return the recordset `self` ordered by `key` """
if key is None:
return self.search([('id', 'in', self.ids)])
else:
return self.browse(map(int, sorted(self, key=key)))
def update(self, values):
""" Update record `self[0]` with `values`. """
for name, value in values.iteritems():
self[name] = value
#
# New records - represent records that do not exist in the database yet;
# they are used to compute default values and perform onchanges.
#
@api.model
def new(self, values={}):
""" new([values]) -> record
Return a new record instance attached to the current environment and
initialized with the provided ``value``. The record is *not* created
in database, it only exists in memory.
"""
record = self.browse([NewId()])
record._cache.update(record._convert_to_cache(values, update=True))
if record.env.in_onchange:
# The cache update does not set inverse fields, so do it manually.
# This is useful for computing a function field on secondary
# records, if that field depends on the main record.
for name in values:
field = self._fields.get(name)
if field:
for invf in field.inverse_fields:
invf._update(record[name], record)
return record
#
# Dirty flag, to mark records modified (in draft mode)
#
@property
def _dirty(self):
""" Return whether any record in `self` is dirty. """
dirty = self.env.dirty
return any(record in dirty for record in self)
@_dirty.setter
def _dirty(self, value):
""" Mark the records in `self` as dirty. """
if value:
map(self.env.dirty.add, self)
else:
map(self.env.dirty.discard, self)
#
# "Dunder" methods
#
def __nonzero__(self):
""" Test whether `self` is nonempty. """
return bool(getattr(self, '_ids', True))
def __len__(self):
""" Return the size of `self`. """
return len(self._ids)
def __iter__(self):
""" Return an iterator over `self`. """
for id in self._ids:
yield self._browse(self.env, (id,))
def __contains__(self, item):
""" Test whether `item` (record or field name) is an element of `self`.
In the first case, the test is fully equivalent to::
any(item == record for record in self)
"""
if isinstance(item, BaseModel) and self._name == item._name:
return len(item) == 1 and item.id in self._ids
elif isinstance(item, basestring):
return item in self._fields
else:
raise except_orm("ValueError", "Mixing apples and oranges: %s in %s" % (item, self))
def __add__(self, other):
""" Return the concatenation of two recordsets. """
if not isinstance(other, BaseModel) or self._name != other._name:
raise except_orm("ValueError", "Mixing apples and oranges: %s + %s" % (self, other))
return self.browse(self._ids + other._ids)
def __sub__(self, other):
""" Return the recordset of all the records in `self` that are not in `other`. """
if not isinstance(other, BaseModel) or self._name != other._name:
raise except_orm("ValueError", "Mixing apples and oranges: %s - %s" % (self, other))
other_ids = set(other._ids)
return self.browse([id for id in self._ids if id not in other_ids])
def __and__(self, other):
""" Return the intersection of two recordsets.
Note that recordset order is not preserved.
"""
if not isinstance(other, BaseModel) or self._name != other._name:
raise except_orm("ValueError", "Mixing apples and oranges: %s & %s" % (self, other))
return self.browse(set(self._ids) & set(other._ids))
def __or__(self, other):
""" Return the union of two recordsets.
Note that recordset order is not preserved.
"""
if not isinstance(other, BaseModel) or self._name != other._name:
raise except_orm("ValueError", "Mixing apples and oranges: %s | %s" % (self, other))
return self.browse(set(self._ids) | set(other._ids))
def __eq__(self, other):
""" Test whether two recordsets are equivalent (up to reordering). """
if not isinstance(other, BaseModel):
if other:
_logger.warning("Comparing apples and oranges: %s == %s", self, other)
return False
return self._name == other._name and set(self._ids) == set(other._ids)
def __ne__(self, other):
return not self == other
def __lt__(self, other):
if not isinstance(other, BaseModel) or self._name != other._name:
raise except_orm("ValueError", "Mixing apples and oranges: %s < %s" % (self, other))
return set(self._ids) < set(other._ids)
def __le__(self, other):
if not isinstance(other, BaseModel) or self._name != other._name:
raise except_orm("ValueError", "Mixing apples and oranges: %s <= %s" % (self, other))
return set(self._ids) <= set(other._ids)
def __gt__(self, other):
if not isinstance(other, BaseModel) or self._name != other._name:
raise except_orm("ValueError", "Mixing apples and oranges: %s > %s" % (self, other))
return set(self._ids) > set(other._ids)
def __ge__(self, other):
if not isinstance(other, BaseModel) or self._name != other._name:
raise except_orm("ValueError", "Mixing apples and oranges: %s >= %s" % (self, other))
return set(self._ids) >= set(other._ids)
def __int__(self):
return self.id
def __str__(self):
return "%s%s" % (self._name, getattr(self, '_ids', ""))
def __unicode__(self):
return unicode(str(self))
__repr__ = __str__
def __hash__(self):
if hasattr(self, '_ids'):
return hash((self._name, frozenset(self._ids)))
else:
return hash(self._name)
def __getitem__(self, key):
""" If `key` is an integer or a slice, return the corresponding record
selection as an instance (attached to `self.env`).
Otherwise read the field `key` of the first record in `self`.
Examples::
inst = model.search(dom) # inst is a recordset
r4 = inst[3] # fourth record in inst
rs = inst[10:20] # subset of inst
nm = rs['name'] # name of first record in inst
"""
if isinstance(key, basestring):
# important: one must call the field's getter
return self._fields[key].__get__(self, type(self))
elif isinstance(key, slice):
return self._browse(self.env, self._ids[key])
else:
return self._browse(self.env, (self._ids[key],))
def __setitem__(self, key, value):
""" Assign the field `key` to `value` in record `self`. """
# important: one must call the field's setter
return self._fields[key].__set__(self, value)
#
# Cache and recomputation management
#
@lazy_property
def _cache(self):
""" Return the cache of `self`, mapping field names to values. """
return RecordCache(self)
@api.model
def _in_cache_without(self, field):
""" Make sure `self` is present in cache (for prefetching), and return
the records of model `self` in cache that have no value for `field`
(:class:`Field` instance).
"""
env = self.env
prefetch_ids = env.prefetch[self._name]
prefetch_ids.update(self._ids)
ids = filter(None, prefetch_ids - set(env.cache[field]))
return self.browse(ids)
@api.model
def refresh(self):
""" Clear the records cache.
.. deprecated:: 8.0
The record cache is automatically invalidated.
"""
self.invalidate_cache()
@api.model
def invalidate_cache(self, fnames=None, ids=None):
""" Invalidate the record caches after some records have been modified.
If both `fnames` and `ids` are ``None``, the whole cache is cleared.
:param fnames: the list of modified fields, or ``None`` for all fields
:param ids: the list of modified record ids, or ``None`` for all
"""
if fnames is None:
if ids is None:
return self.env.invalidate_all()
fields = self._fields.values()
else:
fields = map(self._fields.__getitem__, fnames)
# invalidate fields and inverse fields, too
spec = [(f, ids) for f in fields] + \
[(invf, None) for f in fields for invf in f.inverse_fields]
self.env.invalidate(spec)
@api.multi
def modified(self, fnames):
""" Notify that fields have been modified on `self`. This invalidates
the cache, and prepares the recomputation of stored function fields
(new-style fields only).
:param fnames: iterable of field names that have been modified on
records `self`
"""
# each field knows what to invalidate and recompute
spec = []
for fname in fnames:
spec += self._fields[fname].modified(self)
cached_fields = {
field
for env in self.env.all
for field in env.cache
}
# invalidate non-stored fields.function which are currently cached
spec += [(f, None) for f in self.pool.pure_function_fields
if f in cached_fields]
self.env.invalidate(spec)
def _recompute_check(self, field):
""" If `field` must be recomputed on some record in `self`, return the
corresponding records that must be recomputed.
"""
return self.env.check_todo(field, self)
def _recompute_todo(self, field):
""" Mark `field` to be recomputed. """
self.env.add_todo(field, self)
def _recompute_done(self, field):
""" Mark `field` as recomputed. """
self.env.remove_todo(field, self)
@api.model
def recompute(self):
""" Recompute stored function fields. The fields and records to
recompute have been determined by method :meth:`modified`.
"""
while self.env.has_todo():
field, recs = self.env.get_todo()
# evaluate the fields to recompute, and save them to database
for rec, rec1 in zip(recs, recs.with_context(recompute=False)):
try:
values = rec._convert_to_write({
f.name: rec[f.name] for f in field.computed_fields
})
rec1._write(values)
except MissingError:
pass
# mark the computed fields as done
map(recs._recompute_done, field.computed_fields)
#
# Generic onchange method
#
def _has_onchange(self, field, other_fields):
""" Return whether `field` should trigger an onchange event in the
presence of `other_fields`.
"""
# test whether self has an onchange method for field, or field is a
# dependency of any field in other_fields
return field.name in self._onchange_methods or \
any(dep in other_fields for dep in field.dependents)
@api.model
def _onchange_spec(self, view_info=None):
""" Return the onchange spec from a view description; if not given, the
result of ``self.fields_view_get()`` is used.
"""
result = {}
# for traversing the XML arch and populating result
def process(node, info, prefix):
if node.tag == 'field':
name = node.attrib['name']
names = "%s.%s" % (prefix, name) if prefix else name
if not result.get(names):
result[names] = node.attrib.get('on_change')
# traverse the subviews included in relational fields
for subinfo in info['fields'][name].get('views', {}).itervalues():
process(etree.fromstring(subinfo['arch']), subinfo, names)
else:
for child in node:
process(child, info, prefix)
if view_info is None:
view_info = self.fields_view_get()
process(etree.fromstring(view_info['arch']), view_info, '')
return result
def _onchange_eval(self, field_name, onchange, result):
""" Apply onchange method(s) for field `field_name` with spec `onchange`
on record `self`. Value assignments are applied on `self`, while
domain and warning messages are put in dictionary `result`.
"""
onchange = onchange.strip()
# onchange V8
if onchange in ("1", "true"):
for method in self._onchange_methods.get(field_name, ()):
method_res = method(self)
if not method_res:
continue
if 'domain' in method_res:
result.setdefault('domain', {}).update(method_res['domain'])
if 'warning' in method_res:
result['warning'] = method_res['warning']
return
# onchange V7
match = onchange_v7.match(onchange)
if match:
method, params = match.groups()
# evaluate params -> tuple
global_vars = {'context': self._context, 'uid': self._uid}
if self._context.get('field_parent'):
class RawRecord(object):
def __init__(self, record):
self._record = record
def __getattr__(self, name):
field = self._record._fields[name]
value = self._record[name]
return field.convert_to_onchange(value)
record = self[self._context['field_parent']]
global_vars['parent'] = RawRecord(record)
field_vars = {
key: self._fields[key].convert_to_onchange(val)
for key, val in self._cache.iteritems()
}
params = eval("[%s]" % params, global_vars, field_vars)
# call onchange method
args = (self._cr, self._uid, self._origin.ids) + tuple(params)
method_res = getattr(self._model, method)(*args)
if not isinstance(method_res, dict):
return
if 'value' in method_res:
method_res['value'].pop('id', None)
self.update(self._convert_to_cache(method_res['value'], validate=False))
if 'domain' in method_res:
result.setdefault('domain', {}).update(method_res['domain'])
if 'warning' in method_res:
result['warning'] = method_res['warning']
@api.multi
def onchange(self, values, field_name, field_onchange):
""" Perform an onchange on the given field.
:param values: dictionary mapping field names to values, giving the
current state of modification
:param field_name: name of the modified field_name
:param field_onchange: dictionary mapping field names to their
on_change attribute
"""
env = self.env
if field_name and field_name not in self._fields:
return {}
# determine subfields for field.convert_to_write() below
secondary = []
subfields = defaultdict(set)
for dotname in field_onchange:
if '.' in dotname:
secondary.append(dotname)
name, subname = dotname.split('.')
subfields[name].add(subname)
# create a new record with values, and attach `self` to it
with env.do_in_onchange():
record = self.new(values)
values = dict(record._cache)
# attach `self` with a different context (for cache consistency)
record._origin = self.with_context(__onchange=True)
# determine which field should be triggered an onchange
todo = set([field_name]) if field_name else set(values)
done = set()
# dummy assignment: trigger invalidations on the record
for name in todo:
value = record[name]
field = self._fields[name]
if not field_name and field.type == 'many2one' and field.delegate and not value:
# do not nullify all fields of parent record for new records
continue
record[name] = value
result = {'value': {}}
while todo:
name = todo.pop()
if name in done:
continue
done.add(name)
with env.do_in_onchange():
# apply field-specific onchange methods
if field_onchange.get(name):
record._onchange_eval(name, field_onchange[name], result)
# force re-evaluation of function fields on secondary records
for field_seq in secondary:
record.mapped(field_seq)
# determine which fields have been modified
for name, oldval in values.iteritems():
field = self._fields[name]
newval = record[name]
if field.type in ('one2many', 'many2many'):
if newval != oldval or newval._dirty:
# put new value in result
result['value'][name] = field.convert_to_write(
newval, record._origin, subfields.get(name),
)
todo.add(name)
else:
# keep result: newval may have been dirty before
pass
else:
if newval != oldval:
# put new value in result
result['value'][name] = field.convert_to_write(
newval, record._origin, subfields.get(name),
)
todo.add(name)
else:
# clean up result to not return another value
result['value'].pop(name, None)
# At the moment, the client does not support updates on a *2many field
# while this one is modified by the user.
if field_name and self._fields[field_name].type in ('one2many', 'many2many'):
result['value'].pop(field_name, None)
return result
class RecordCache(MutableMapping):
""" Implements a proxy dictionary to read/update the cache of a record.
Upon iteration, it looks like a dictionary mapping field names to
values. However, fields may be used as keys as well.
"""
def __init__(self, records):
self._recs = records
def contains(self, field):
""" Return whether `records[0]` has a value for `field` in cache. """
if isinstance(field, basestring):
field = self._recs._fields[field]
return self._recs.id in self._recs.env.cache[field]
def __contains__(self, field):
""" Return whether `records[0]` has a regular value for `field` in cache. """
if isinstance(field, basestring):
field = self._recs._fields[field]
dummy = SpecialValue(None)
value = self._recs.env.cache[field].get(self._recs.id, dummy)
return not isinstance(value, SpecialValue)
def __getitem__(self, field):
""" Return the cached value of `field` for `records[0]`. """
if isinstance(field, basestring):
field = self._recs._fields[field]
value = self._recs.env.cache[field][self._recs.id]
return value.get() if isinstance(value, SpecialValue) else value
def __setitem__(self, field, value):
""" Assign the cached value of `field` for all records in `records`. """
if isinstance(field, basestring):
field = self._recs._fields[field]
values = dict.fromkeys(self._recs._ids, value)
self._recs.env.cache[field].update(values)
def update(self, *args, **kwargs):
""" Update the cache of all records in `records`. If the argument is a
`SpecialValue`, update all fields (except "magic" columns).
"""
if args and isinstance(args[0], SpecialValue):
values = dict.fromkeys(self._recs._ids, args[0])
for name, field in self._recs._fields.iteritems():
if name != 'id':
self._recs.env.cache[field].update(values)
else:
return super(RecordCache, self).update(*args, **kwargs)
def __delitem__(self, field):
""" Remove the cached value of `field` for all `records`. """
if isinstance(field, basestring):
field = self._recs._fields[field]
field_cache = self._recs.env.cache[field]
for id in self._recs._ids:
field_cache.pop(id, None)
def __iter__(self):
""" Iterate over the field names with a regular value in cache. """
cache, id = self._recs.env.cache, self._recs.id
dummy = SpecialValue(None)
for name, field in self._recs._fields.iteritems():
if name != 'id' and not isinstance(cache[field].get(id, dummy), SpecialValue):
yield name
def __len__(self):
""" Return the number of fields with a regular value in cache. """
return sum(1 for name in self)
class Model(BaseModel):
"""Main super-class for regular database-persisted OpenERP models.
OpenERP models are created by inheriting from this class::
class user(Model):
...
The system will later instantiate the class once per database (on
which the class' module is installed).
"""
_auto = True
_register = False # not visible in ORM registry, meant to be python-inherited only
_transient = False # True in a TransientModel
class TransientModel(BaseModel):
"""Model super-class for transient records, meant to be temporarily
persisted, and regularly vaccuum-cleaned.
A TransientModel has a simplified access rights management,
all users can create new records, and may only access the
records they created. The super-user has unrestricted access
to all TransientModel records.
"""
_auto = True
_register = False # not visible in ORM registry, meant to be python-inherited only
_transient = True
class AbstractModel(BaseModel):
"""Abstract Model super-class for creating an abstract class meant to be
inherited by regular models (Models or TransientModels) but not meant to
be usable on its own, or persisted.
Technical note: we don't want to make AbstractModel the super-class of
Model or BaseModel because it would not make sense to put the main
definition of persistence methods such as create() in it, and still we
should be able to override them within an AbstractModel.
"""
_auto = False # don't create any database backend for AbstractModels
_register = False # not visible in ORM registry, meant to be python-inherited only
_transient = False
def itemgetter_tuple(items):
""" Fixes itemgetter inconsistency (useful in some cases) of not returning
a tuple if len(items) == 1: always returns an n-tuple where n = len(items)
"""
if len(items) == 0:
return lambda a: ()
if len(items) == 1:
return lambda gettable: (gettable[items[0]],)
return operator.itemgetter(*items)
def convert_pgerror_23502(model, fields, info, e):
m = re.match(r'^null value in column "(?P<field>\w+)" violates '
r'not-null constraint\n',
str(e))
field_name = m and m.group('field')
if not m or field_name not in fields:
return {'message': unicode(e)}
message = _(u"Missing required value for the field '%s'.") % field_name
field = fields.get(field_name)
if field:
message = _(u"Missing required value for the field '%s' (%s)") % (field['string'], field_name)
return {
'message': message,
'field': field_name,
}
def convert_pgerror_23505(model, fields, info, e):
m = re.match(r'^duplicate key (?P<field>\w+) violates unique constraint',
str(e))
field_name = m and m.group('field')
if not m or field_name not in fields:
return {'message': unicode(e)}
message = _(u"The value for the field '%s' already exists.") % field_name
field = fields.get(field_name)
if field:
message = _(u"%s This might be '%s' in the current model, or a field "
u"of the same name in an o2m.") % (message, field['string'])
return {
'message': message,
'field': field_name,
}
PGERROR_TO_OE = defaultdict(
# shape of mapped converters
lambda: (lambda model, fvg, info, pgerror: {'message': unicode(pgerror)}), {
# not_null_violation
'23502': convert_pgerror_23502,
# unique constraint error
'23505': convert_pgerror_23505,
})
def _normalize_ids(arg, atoms={int, long, str, unicode, NewId}):
""" Normalizes the ids argument for ``browse`` (v7 and v8) to a tuple.
Various implementations were tested on the corpus of all browse() calls
performed during a full crawler run (after having installed all website_*
modules) and this one was the most efficient overall.
A possible bit of correctness was sacrificed by not doing any test on
Iterable and just assuming that any non-atomic type was an iterable of
some kind.
:rtype: tuple
"""
# much of the corpus is falsy objects (empty list, tuple or set, None)
if not arg:
return ()
# `type in set` is significantly faster (because more restrictive) than
# isinstance(arg, set) or issubclass(type, set); and for new-style classes
# obj.__class__ is equivalent to but faster than type(obj). Not relevant
# (and looks much worse) in most cases, but over millions of calls it
# does have a very minor effect.
if arg.__class__ in atoms:
return arg,
return tuple(arg)
# keep those imports here to avoid dependency cycle errors
from .osv import expression
from .fields import Field, SpecialValue, FailedValue
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
pahans/nototools | third_party/spiro/curves/polymat-bad.py | 15 | 1664 | from Numeric import *
import LinearAlgebra as la
import sys
n = 15
m = zeros(((n + 1) * 4, (n + 1) * 4), Float)
for i in range(n):
m[4 * i + 2][4 * i + 0] = .5
m[4 * i + 2][4 * i + 1] = -1./12
m[4 * i + 2][4 * i + 2] = 1./48
m[4 * i + 2][4 * i + 3] = -1./480
m[4 * i + 2][4 * i + 4] = .5
m[4 * i + 2][4 * i + 5] = 1./12
m[4 * i + 2][4 * i + 6] = 1./48
m[4 * i + 2][4 * i + 7] = 1./480
m[4 * i + 3][4 * i + 0] = 1
m[4 * i + 3][4 * i + 1] = .5
m[4 * i + 3][4 * i + 2] = .125
m[4 * i + 3][4 * i + 3] = 1./48
m[4 * i + 3][4 * i + 4] = -1
m[4 * i + 3][4 * i + 5] = .5
m[4 * i + 3][4 * i + 6] = -.125
m[4 * i + 3][4 * i + 7] = 1./48
m[4 * i + 4][4 * i + 0] = 0
m[4 * i + 4][4 * i + 1] = 1
m[4 * i + 4][4 * i + 2] = .5
m[4 * i + 4][4 * i + 3] = .125
m[4 * i + 4][4 * i + 4] = 0
m[4 * i + 4][4 * i + 5] = -1
m[4 * i + 4][4 * i + 6] = .5
m[4 * i + 4][4 * i + 7] = -.125
m[4 * i + 5][4 * i + 0] = 0
m[4 * i + 5][4 * i + 1] = 0
m[4 * i + 5][4 * i + 2] = 1
m[4 * i + 5][4 * i + 3] = .5
m[4 * i + 5][4 * i + 4] = 0
m[4 * i + 5][4 * i + 5] = 0
m[4 * i + 5][4 * i + 6] = -1
m[4 * i + 5][4 * i + 7] = .5
m[n * 4 + 2][2] = 1
m[n * 4 + 3][3] = 1
m[0][n * 4 + 2] = 1
m[1][n * 4 + 3] = 1
def printarr(m):
for j in range(n * 4 + 4):
for i in range(n * 4 + 4):
print '%6.1f' % m[j][i],
print ''
sys.output_line_width = 160
#print array2string(m, precision = 3)
mi = la.inverse(m)
#printarr(mi)
print ''
for j in range(n + 1):
for k in range(4):
print '%7.2f' % mi[j * 4 + k][(n / 2) * 4 + 2],
print ''
| apache-2.0 |
janusnic/dj-21v | unit_10/mysite/userprofiles/utils.py | 6 | 2613 | from django.core.exceptions import ImproperlyConfigured
# -*- coding: utf-8 -*-
import functools
try:
import urlparse
except ImportError:
from urllib import parse as urlparse # python3 support
from django.core.exceptions import SuspiciousOperation
def default_redirect(request, fallback_url, **kwargs):
"""
Evaluates a redirect url by consulting GET, POST and the session.
"""
redirect_field_name = kwargs.get("redirect_field_name", "next")
next = request.REQUEST.get(redirect_field_name)
if not next:
# try the session if available
if hasattr(request, "session"):
session_key_value = kwargs.get("session_key_value", "redirect_to")
next = request.session.get(session_key_value)
is_safe = functools.partial(
ensure_safe_url,
allowed_protocols=kwargs.get("allowed_protocols"),
allowed_host=request.get_host()
)
redirect_to = next if next and is_safe(next) else fallback_url
# perform one last check to ensure the URL is safe to redirect to. if it
# is not then we should bail here as it is likely developer error and
# they should be notified
is_safe(redirect_to, raise_on_fail=True)
return redirect_to
def ensure_safe_url(url, allowed_protocols=None, allowed_host=None, raise_on_fail=False):
if allowed_protocols is None:
allowed_protocols = ["http", "https"]
parsed = urlparse.urlparse(url)
# perform security checks to ensure no malicious intent
# (i.e., an XSS attack with a data URL)
safe = True
if parsed.scheme and parsed.scheme not in allowed_protocols:
if raise_on_fail:
raise SuspiciousOperation("Unsafe redirect to URL with protocol '%s'" % parsed.scheme)
safe = False
if allowed_host and parsed.netloc and parsed.netloc != allowed_host:
if raise_on_fail:
raise SuspiciousOperation("Unsafe redirect to URL not matching host '%s'" % allowed_host)
safe = False
return safe
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
def get_form_class(path):
i = path.rfind('.')
module, attr = path[:i], path[i + 1:]
try:
mod = import_module(module)
# except ImportError, e: # python 2.7
except ImportError as e: # python 3.4
raise ImproperlyConfigured( 'Error loading module %s: "%s"' % (module, e))
try:
form = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a form named "%s"' % (module, attr))
return form
| mit |
varunarya10/python-glanceclient | glanceclient/common/https.py | 3 | 11205 | # Copyright 2014 Red Hat, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
import struct
import OpenSSL
from requests import adapters
try:
from requests.packages.urllib3 import connectionpool
from requests.packages.urllib3 import poolmanager
except ImportError:
from urllib3 import connectionpool
from urllib3 import poolmanager
import six
import ssl
from glanceclient.common import utils
try:
from eventlet import patcher
# Handle case where we are running in a monkey patched environment
if patcher.is_monkey_patched('socket'):
from eventlet.green.httplib import HTTPSConnection
from eventlet.green.OpenSSL.SSL import GreenConnection as Connection
from eventlet.greenio import GreenSocket
# TODO(mclaren): A getsockopt workaround: see 'getsockopt' doc string
GreenSocket.getsockopt = utils.getsockopt
else:
raise ImportError
except ImportError:
try:
from httplib import HTTPSConnection
except ImportError:
from http.client import HTTPSConnection
from OpenSSL.SSL import Connection as Connection
from glanceclient import exc
from glanceclient.openstack.common import strutils
def to_bytes(s):
if isinstance(s, six.string_types):
return six.b(s)
else:
return s
class HTTPSAdapter(adapters.HTTPAdapter):
"""
This adapter will be used just when
ssl compression should be disabled.
The init method overwrites the default
https pool by setting glanceclient's
one.
"""
def __init__(self, *args, **kwargs):
# NOTE(flaper87): This line forces poolmanager to use
# glanceclient HTTPSConnection
classes_by_scheme = poolmanager.pool_classes_by_scheme
classes_by_scheme["glance+https"] = HTTPSConnectionPool
super(HTTPSAdapter, self).__init__(*args, **kwargs)
def request_url(self, request, proxies):
# NOTE(flaper87): Make sure the url is encoded, otherwise
# python's standard httplib will fail with a TypeError.
url = super(HTTPSAdapter, self).request_url(request, proxies)
return strutils.safe_encode(url)
def cert_verify(self, conn, url, verify, cert):
super(HTTPSAdapter, self).cert_verify(conn, url, verify, cert)
conn.ca_certs = verify[0]
conn.insecure = verify[1]
class HTTPSConnectionPool(connectionpool.HTTPSConnectionPool):
"""
HTTPSConnectionPool will be instantiated when a new
connection is requested to the HTTPSAdapter.This
implementation overwrites the _new_conn method and
returns an instances of glanceclient's VerifiedHTTPSConnection
which handles no compression.
ssl_compression is hard-coded to False because this will
be used just when the user sets --no-ssl-compression.
"""
scheme = 'glance+https'
def _new_conn(self):
self.num_connections += 1
return VerifiedHTTPSConnection(host=self.host,
port=self.port,
key_file=self.key_file,
cert_file=self.cert_file,
cacert=self.ca_certs,
insecure=self.insecure,
ssl_compression=False)
class OpenSSLConnectionDelegator(object):
"""
An OpenSSL.SSL.Connection delegator.
Supplies an additional 'makefile' method which httplib requires
and is not present in OpenSSL.SSL.Connection.
Note: Since it is not possible to inherit from OpenSSL.SSL.Connection
a delegator must be used.
"""
def __init__(self, *args, **kwargs):
self.connection = Connection(*args, **kwargs)
def __getattr__(self, name):
return getattr(self.connection, name)
def makefile(self, *args, **kwargs):
return socket._fileobject(self.connection, *args, **kwargs)
class VerifiedHTTPSConnection(HTTPSConnection):
"""
Extended HTTPSConnection which uses the OpenSSL library
for enhanced SSL support.
Note: Much of this functionality can eventually be replaced
with native Python 3.3 code.
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
cacert=None, timeout=None, insecure=False,
ssl_compression=True):
# List of exceptions reported by Python3 instead of
# SSLConfigurationError
if six.PY3:
excp_lst = (TypeError, FileNotFoundError, ssl.SSLError)
else:
excp_lst = ()
try:
HTTPSConnection.__init__(self, host, port,
key_file=key_file,
cert_file=cert_file)
self.key_file = key_file
self.cert_file = cert_file
self.timeout = timeout
self.insecure = insecure
# NOTE(flaper87): `is_verified` is needed for
# requests' urllib3. If insecure is True then
# the request is not `verified`, hence `not insecure`
self.is_verified = not insecure
self.ssl_compression = ssl_compression
self.cacert = None if cacert is None else str(cacert)
self.set_context()
# ssl exceptions are reported in various form in Python 3
# so to be compatible, we report the same kind as under
# Python2
except excp_lst as e:
raise exc.SSLConfigurationError(str(e))
@staticmethod
def host_matches_cert(host, x509):
"""
Verify that the x509 certificate we have received
from 'host' correctly identifies the server we are
connecting to, ie that the certificate's Common Name
or a Subject Alternative Name matches 'host'.
"""
def check_match(name):
# Directly match the name
if name == host:
return True
# Support single wildcard matching
if name.startswith('*.') and host.find('.') > 0:
if name[2:] == host.split('.', 1)[1]:
return True
common_name = x509.get_subject().commonName
# First see if we can match the CN
if check_match(common_name):
return True
# Also try Subject Alternative Names for a match
san_list = None
for i in range(x509.get_extension_count()):
ext = x509.get_extension(i)
if ext.get_short_name() == b'subjectAltName':
san_list = str(ext)
for san in ''.join(san_list.split()).split(','):
if san.startswith('DNS:'):
if check_match(san.split(':', 1)[1]):
return True
# Server certificate does not match host
msg = ('Host "%s" does not match x509 certificate contents: '
'CommonName "%s"' % (host, common_name))
if san_list is not None:
msg = msg + ', subjectAltName "%s"' % san_list
raise exc.SSLCertificateError(msg)
def verify_callback(self, connection, x509, errnum,
depth, preverify_ok):
if x509.has_expired():
msg = "SSL Certificate expired on '%s'" % x509.get_notAfter()
raise exc.SSLCertificateError(msg)
if depth == 0 and preverify_ok:
# We verify that the host matches against the last
# certificate in the chain
return self.host_matches_cert(self.host, x509)
else:
# Pass through OpenSSL's default result
return preverify_ok
def set_context(self):
"""
Set up the OpenSSL context.
"""
self.context = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD)
if self.ssl_compression is False:
self.context.set_options(0x20000) # SSL_OP_NO_COMPRESSION
if self.insecure is not True:
self.context.set_verify(OpenSSL.SSL.VERIFY_PEER,
self.verify_callback)
else:
self.context.set_verify(OpenSSL.SSL.VERIFY_NONE,
lambda *args: True)
if self.cert_file:
try:
self.context.use_certificate_file(self.cert_file)
except Exception as e:
msg = 'Unable to load cert from "%s" %s' % (self.cert_file, e)
raise exc.SSLConfigurationError(msg)
if self.key_file is None:
# We support having key and cert in same file
try:
self.context.use_privatekey_file(self.cert_file)
except Exception as e:
msg = ('No key file specified and unable to load key '
'from "%s" %s' % (self.cert_file, e))
raise exc.SSLConfigurationError(msg)
if self.key_file:
try:
self.context.use_privatekey_file(self.key_file)
except Exception as e:
msg = 'Unable to load key from "%s" %s' % (self.key_file, e)
raise exc.SSLConfigurationError(msg)
if self.cacert:
try:
self.context.load_verify_locations(to_bytes(self.cacert))
except Exception as e:
msg = 'Unable to load CA from "%s" %s' % (self.cacert, e)
raise exc.SSLConfigurationError(msg)
else:
self.context.set_default_verify_paths()
def connect(self):
"""
Connect to an SSL port using the OpenSSL library and apply
per-connection parameters.
"""
result = socket.getaddrinfo(self.host, self.port, 0,
socket.SOCK_STREAM)
if result:
socket_family = result[0][0]
if socket_family == socket.AF_INET6:
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
else:
# If due to some reason the address lookup fails - we still connect
# to IPv4 socket. This retains the older behavior.
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.timeout is not None:
# '0' microseconds
sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO,
struct.pack('LL', self.timeout, 0))
self.sock = OpenSSLConnectionDelegator(self.context, sock)
self.sock.connect((self.host, self.port))
| apache-2.0 |
amenonsen/ansible | lib/ansible/modules/network/fortios/fortios_vpn_certificate_ocsp_server.py | 14 | 11082 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_vpn_certificate_ocsp_server
short_description: OCSP server configuration in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify vpn_certificate feature and ocsp_server category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
vpn_certificate_ocsp_server:
description:
- OCSP server configuration.
default: null
type: dict
suboptions:
cert:
description:
- OCSP server certificate. Source vpn.certificate.remote.name vpn.certificate.ca.name.
type: str
name:
description:
- OCSP server entry name.
required: true
type: str
secondary_cert:
description:
- Secondary OCSP server certificate. Source vpn.certificate.remote.name vpn.certificate.ca.name.
type: str
secondary_url:
description:
- Secondary OCSP server URL.
type: str
source_ip:
description:
- Source IP address for communications to the OCSP server.
type: str
unavail_action:
description:
- Action when server is unavailable (revoke the certificate or ignore the result of the check).
type: str
choices:
- revoke
- ignore
url:
description:
- OCSP server URL.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: OCSP server configuration.
fortios_vpn_certificate_ocsp_server:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
vpn_certificate_ocsp_server:
cert: "<your_own_value> (source vpn.certificate.remote.name vpn.certificate.ca.name)"
name: "default_name_4"
secondary_cert: "<your_own_value> (source vpn.certificate.remote.name vpn.certificate.ca.name)"
secondary_url: "<your_own_value>"
source_ip: "84.230.14.43"
unavail_action: "revoke"
url: "myurl.com"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_vpn_certificate_ocsp_server_data(json):
option_list = ['cert', 'name', 'secondary_cert',
'secondary_url', 'source_ip', 'unavail_action',
'url']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def vpn_certificate_ocsp_server(data, fos):
vdom = data['vdom']
state = data['state']
vpn_certificate_ocsp_server_data = data['vpn_certificate_ocsp_server']
filtered_data = underscore_to_hyphen(filter_vpn_certificate_ocsp_server_data(vpn_certificate_ocsp_server_data))
if state == "present":
return fos.set('vpn.certificate',
'ocsp-server',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('vpn.certificate',
'ocsp-server',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_vpn_certificate(data, fos):
if data['vpn_certificate_ocsp_server']:
resp = vpn_certificate_ocsp_server(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"vpn_certificate_ocsp_server": {
"required": False, "type": "dict", "default": None,
"options": {
"cert": {"required": False, "type": "str"},
"name": {"required": True, "type": "str"},
"secondary_cert": {"required": False, "type": "str"},
"secondary_url": {"required": False, "type": "str"},
"source_ip": {"required": False, "type": "str"},
"unavail_action": {"required": False, "type": "str",
"choices": ["revoke", "ignore"]},
"url": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_vpn_certificate(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_vpn_certificate(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
remh/dd-agent | checks.d/vsphere.py | 27 | 33304 | # stdlib
from copy import deepcopy
from datetime import datetime, timedelta
from hashlib import md5
from Queue import Empty, Queue
import re
import time
import traceback
# 3p
from pyVim import connect
from pyVmomi import vim
# project
from checks import AgentCheck
from checks.libs.thread_pool import Pool
from checks.libs.vmware.basic_metrics import BASIC_METRICS
from util import Timer
SOURCE_TYPE = 'vsphere'
REAL_TIME_INTERVAL = 20 # Default vCenter sampling interval
# The size of the ThreadPool used to process the request queue
DEFAULT_SIZE_POOL = 4
# The interval in seconds between two refresh of the entities list
REFRESH_MORLIST_INTERVAL = 3 * 60
# The interval in seconds between two refresh of metrics metadata (id<->name)
REFRESH_METRICS_METADATA_INTERVAL = 10 * 60
# The amount of jobs batched at the same time in the queue to query available metrics
BATCH_MORLIST_SIZE = 50
# Time after which we reap the jobs that clog the queue
# TODO: use it
JOB_TIMEOUT = 10
EXCLUDE_FILTERS = {
'AlarmStatusChangedEvent': [r'Gray'],
'TaskEvent': [
r'Initialize powering On',
r'Power Off virtual machine',
r'Power On virtual machine',
r'Reconfigure virtual machine',
r'Relocate virtual machine',
r'Suspend virtual machine',
r'Migrate virtual machine',
],
'VmBeingHotMigratedEvent': [],
'VmMessageEvent': [],
'VmMigratedEvent': [],
'VmPoweredOnEvent': [],
'VmPoweredOffEvent': [],
'VmReconfiguredEvent': [],
'VmResumedEvent': [],
'VmSuspendedEvent': [],
}
MORLIST = 'morlist'
METRICS_METADATA = 'metrics_metadata'
LAST = 'last'
INTERVAL = 'interval'
class VSphereEvent(object):
UNKNOWN = 'unknown'
def __init__(self, raw_event, event_config=None):
self.raw_event = raw_event
if self.raw_event and self.raw_event.__class__.__name__.startswith('vim.event'):
self.event_type = self.raw_event.__class__.__name__[10:]
else:
self.event_type = VSphereEvent.UNKNOWN
self.timestamp = int((self.raw_event.createdTime.replace(tzinfo=None) - datetime(1970, 1, 1)).total_seconds())
self.payload = {
"timestamp": self.timestamp,
"event_type": SOURCE_TYPE,
"source_type_name": SOURCE_TYPE,
}
if event_config is None:
self.event_config = {}
else:
self.event_config = event_config
def _is_filtered(self):
# Filter the unwanted types
if self.event_type not in EXCLUDE_FILTERS:
return True
filters = EXCLUDE_FILTERS[self.event_type]
for f in filters:
if re.search(f, self.raw_event.fullFormattedMessage):
return True
return False
def get_datadog_payload(self):
if self._is_filtered():
return None
transform_method = getattr(self, 'transform_%s' % self.event_type.lower(), None)
if callable(transform_method):
return transform_method()
# Default event transformation
self.payload["msg_title"] = u"{0}".format(self.event_type)
self.payload["msg_text"] = u"@@@\n{0}\n@@@".format(self.raw_event.fullFormattedMessage)
return self.payload
def transform_vmbeinghotmigratedevent(self):
self.payload["msg_title"] = u"VM {0} is being migrated".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"{user} has launched a hot migration of this virtual machine:\n".format(user=self.raw_event.userName)
changes = []
pre_host = self.raw_event.host.name
new_host = self.raw_event.destHost.name
pre_dc = self.raw_event.datacenter.name
new_dc = self.raw_event.destDatacenter.name
pre_ds = self.raw_event.ds.name
new_ds = self.raw_event.destDatastore.name
if pre_host == new_host:
changes.append(u"- No host migration: still {0}".format(new_host))
else:
# Insert in front if it's a change
changes = [u"- Host MIGRATION: from {0} to {1}".format(pre_host, new_host)] + changes
if pre_dc == new_dc:
changes.append(u"- No datacenter migration: still {0}".format(new_dc))
else:
# Insert in front if it's a change
changes = [u"- Datacenter MIGRATION: from {0} to {1}".format(pre_dc, new_dc)] + changes
if pre_ds == new_ds:
changes.append(u"- No datastore migration: still {0}".format(new_ds))
else:
# Insert in front if it's a change
changes = [u"- Datastore MIGRATION: from {0} to {1}".format(pre_ds, new_ds)] + changes
self.payload["msg_text"] += "\n".join(changes)
self.payload['host'] = self.raw_event.vm.name
self.payload['tags'] = [
'vsphere_host:%s' % pre_host,
'vsphere_host:%s' % new_host,
'vsphere_datacenter:%s' % pre_dc,
'vsphere_datacenter:%s' % new_dc,
]
return self.payload
def transform_alarmstatuschangedevent(self):
if self.event_config.get('collect_vcenter_alarms') is None:
return None
def get_transition(before, after):
vals = {
'gray': -1,
'green': 0,
'yellow': 1,
'red': 2
}
before = before.lower()
after = after.lower()
if before not in vals or after not in vals:
return None
if vals[before] < vals[after]:
return 'Triggered'
else:
return 'Recovered'
TO_ALERT_TYPE = {
'green': 'success',
'yellow': 'warning',
'red': 'error'
}
def get_agg_key(alarm_event):
return 'h:{0}|dc:{1}|a:{2}'.format(
md5(alarm_event.entity.name).hexdigest()[:10],
md5(alarm_event.datacenter.name).hexdigest()[:10],
md5(alarm_event.alarm.name).hexdigest()[:10]
)
# Get the entity type/name
if self.raw_event.entity.entity.__class__ == vim.VirtualMachine:
host_type = 'VM'
elif self.raw_event.entity.entity.__class__ == vim.HostSystem:
host_type = 'host'
else:
return None
host_name = self.raw_event.entity.name
# Need a getattr because from is a reserved keyword...
trans_before = getattr(self.raw_event, 'from')
trans_after = self.raw_event.to
transition = get_transition(trans_before, trans_after)
# Bad transition, we shouldn't have got this transition
if transition is None:
return None
self.payload['msg_title'] = u"[{transition}] {monitor} on {host_type} {host_name} is now {status}".format(
transition=transition,
monitor=self.raw_event.alarm.name,
host_type=host_type,
host_name=host_name,
status=trans_after
)
self.payload['alert_type'] = TO_ALERT_TYPE[trans_after]
self.payload['event_object'] = get_agg_key(self.raw_event)
self.payload['msg_text'] = u"""vCenter monitor status changed on this alarm, it was {before} and it's now {after}.""".format(
before=trans_before,
after=trans_after
)
self.payload['host'] = host_name
return self.payload
def transform_vmmessageevent(self):
self.payload["msg_title"] = u"VM {0} is reporting".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"@@@\n{0}\n@@@".format(self.raw_event.fullFormattedMessage)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmmigratedevent(self):
self.payload["msg_title"] = u"VM {0} has been migrated".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"@@@\n{0}\n@@@".format(self.raw_event.fullFormattedMessage)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmpoweredoffevent(self):
self.payload["msg_title"] = u"VM {0} has been powered OFF".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"""{user} has powered off this virtual machine. It was running on:
- datacenter: {dc}
- host: {host}
""".format(
user=self.raw_event.userName,
dc=self.raw_event.datacenter.name,
host=self.raw_event.host.name
)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmpoweredonevent(self):
self.payload["msg_title"] = u"VM {0} has been powered ON".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"""{user} has powered on this virtual machine. It is running on:
- datacenter: {dc}
- host: {host}
""".format(
user=self.raw_event.userName,
dc=self.raw_event.datacenter.name,
host=self.raw_event.host.name
)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmresumingevent(self):
self.payload["msg_title"] = u"VM {0} is RESUMING".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"""{user} has resumed {vm}. It will soon be powered on.""".format(
user=self.raw_event.userName,
vm=self.raw_event.vm.name
)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmsuspendedevent(self):
self.payload["msg_title"] = u"VM {0} has been SUSPENDED".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"""{user} has suspended this virtual machine. It was running on:
- datacenter: {dc}
- host: {host}
""".format(
user=self.raw_event.userName,
dc=self.raw_event.datacenter.name,
host=self.raw_event.host.name
)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmreconfiguredevent(self):
self.payload["msg_title"] = u"VM {0} configuration has been changed".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"{user} saved the new configuration:\n@@@\n".format(user=self.raw_event.userName)
# Add lines for configuration change don't show unset, that's hacky...
config_change_lines = [line for line in self.raw_event.configSpec.__repr__().splitlines() if 'unset' not in line]
self.payload["msg_text"] += u"\n".join(config_change_lines)
self.payload["msg_text"] += u"\n@@@"
self.payload['host'] = self.raw_event.vm.name
return self.payload
def atomic_method(method):
""" Decorator to catch the exceptions that happen in detached thread atomic tasks
and display them in the logs.
"""
def wrapper(*args, **kwargs):
try:
method(*args, **kwargs)
except Exception as e:
args[0].exceptionq.put("A worker thread crashed:\n" + traceback.format_exc())
return wrapper
class VSphereCheck(AgentCheck):
""" Get performance metrics from a vCenter server and upload them to Datadog
References:
http://pubs.vmware.com/vsphere-51/index.jsp#com.vmware.wssdk.apiref.doc/vim.PerformanceManager.html
*_atomic jobs perform one single task asynchronously in the ThreadPool, we
don't know exactly when they will finish, but we reap them if they're stuck.
The other calls are performed synchronously.
"""
SERVICE_CHECK_NAME = 'vcenter.can_connect'
def __init__(self, name, init_config, agentConfig, instances):
AgentCheck.__init__(self, name, init_config, agentConfig, instances)
self.time_started = time.time()
self.pool_started = False
self.exceptionq = Queue()
# Connections open to vCenter instances
self.server_instances = {}
# Event configuration
self.event_config = {}
# Caching resources, timeouts
self.cache_times = {}
for instance in self.instances:
i_key = self._instance_key(instance)
self.cache_times[i_key] = {
MORLIST: {
LAST: 0,
INTERVAL: init_config.get('refresh_morlist_interval',
REFRESH_MORLIST_INTERVAL)
},
METRICS_METADATA: {
LAST: 0,
INTERVAL: init_config.get('refresh_metrics_metadata_interval',
REFRESH_METRICS_METADATA_INTERVAL)
}
}
self.event_config[i_key] = instance.get('event_config')
# First layer of cache (get entities from the tree)
self.morlist_raw = {}
# Second layer, processed from the first one
self.morlist = {}
# Metrics metadata, basically perfCounterId -> {name, group, description}
self.metrics_metadata = {}
self.latest_event_query = {}
def stop(self):
self.stop_pool()
def start_pool(self):
self.log.info("Starting Thread Pool")
self.pool_size = int(self.init_config.get('threads_count', DEFAULT_SIZE_POOL))
self.pool = Pool(self.pool_size)
self.pool_started = True
self.jobs_status = {}
def stop_pool(self):
self.log.info("Stopping Thread Pool")
if self.pool_started:
self.pool.terminate()
self.pool.join()
self.jobs_status.clear()
assert self.pool.get_nworkers() == 0
self.pool_started = False
def restart_pool(self):
self.stop_pool()
self.start_pool()
def _clean(self):
now = time.time()
# TODO: use that
for name in self.jobs_status.keys():
start_time = self.jobs_status[name]
if now - start_time > JOB_TIMEOUT:
self.log.critical("Restarting Pool. One check is stuck.")
self.restart_pool()
break
def _query_event(self, instance):
i_key = self._instance_key(instance)
last_time = self.latest_event_query.get(i_key)
server_instance = self._get_server_instance(instance)
event_manager = server_instance.content.eventManager
# Be sure we don't duplicate any event, never query the "past"
if not last_time:
last_time = self.latest_event_query[i_key] = \
event_manager.latestEvent.createdTime + timedelta(seconds=1)
query_filter = vim.event.EventFilterSpec()
time_filter = vim.event.EventFilterSpec.ByTime(beginTime=self.latest_event_query[i_key])
query_filter.time = time_filter
try:
new_events = event_manager.QueryEvents(query_filter)
self.log.debug("Got {0} events from vCenter event manager".format(len(new_events)))
for event in new_events:
normalized_event = VSphereEvent(event, self.event_config[i_key])
# Can return None if the event if filtered out
event_payload = normalized_event.get_datadog_payload()
if event_payload is not None:
self.event(event_payload)
last_time = event.createdTime + timedelta(seconds=1)
except Exception as e:
# Don't get stuck on a failure to fetch an event
# Ignore them for next pass
self.log.warning("Unable to fetch Events %s", e)
last_time = event_manager.latestEvent.createdTime + timedelta(seconds=1)
self.latest_event_query[i_key] = last_time
def _instance_key(self, instance):
i_key = instance.get('name')
if i_key is None:
raise Exception("Must define a unique 'name' per vCenter instance")
return i_key
def _should_cache(self, instance, entity):
i_key = self._instance_key(instance)
now = time.time()
return now - self.cache_times[i_key][entity][LAST] > self.cache_times[i_key][entity][INTERVAL]
def _get_server_instance(self, instance):
i_key = self._instance_key(instance)
service_check_tags = [
'vcenter_server:{0}'.format(instance.get('name')),
'vcenter_host:{0}'.format(instance.get('host')),
]
if i_key not in self.server_instances:
try:
server_instance = connect.SmartConnect(
host=instance.get('host'),
user=instance.get('username'),
pwd=instance.get('password')
)
except Exception as e:
err_msg = "Connection to %s failed: %s" % (instance.get('host'), e)
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
tags=service_check_tags, message=err_msg)
raise Exception(err_msg)
self.server_instances[i_key] = server_instance
# Test if the connection is working
try:
self.server_instances[i_key].RetrieveContent()
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK,
tags=service_check_tags)
except Exception as e:
err_msg = "Connection to %s died unexpectedly: %s" % (instance.get('host'), e)
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
tags=service_check_tags, message=err_msg)
raise Exception(err_msg)
return self.server_instances[i_key]
def _compute_needed_metrics(self, instance, available_metrics):
""" Compare the available metrics for one MOR we have computed and intersect them
with the set of metrics we want to report
"""
if instance.get('all_metrics', False):
return available_metrics
i_key = self._instance_key(instance)
wanted_metrics = []
# Get only the basic metrics
for metric in available_metrics:
# No cache yet, skip it for now
if (i_key not in self.metrics_metadata
or metric.counterId not in self.metrics_metadata[i_key]):
continue
if self.metrics_metadata[i_key][metric.counterId]['name'] in BASIC_METRICS:
wanted_metrics.append(metric)
return wanted_metrics
def get_external_host_tags(self):
""" Returns a list of tags for every host that is detected by the vSphere
integration.
List of pairs (hostname, list_of_tags)
"""
self.log.info("Sending external_host_tags now")
external_host_tags = []
for instance in self.instances:
i_key = self._instance_key(instance)
mor_list = self.morlist[i_key].items()
for mor_name, mor in mor_list:
external_host_tags.append((mor['hostname'], {SOURCE_TYPE: mor['tags']}))
return external_host_tags
@atomic_method
def _cache_morlist_raw_atomic(self, i_key, obj_type, obj, tags, regexes=None):
""" Compute tags for a single node in the vCenter rootFolder
and queue other such jobs for children nodes.
Usual hierarchy:
rootFolder
- datacenter1
- compute_resource1 == cluster
- host1
- host2
- host3
- compute_resource2
- host5
- vm1
- vm2
If it's a node we want to query metric for, queue it in self.morlist_raw
that will be processed by another job.
"""
### <TEST-INSTRUMENTATION>
t = Timer()
self.log.debug("job_atomic: Exploring MOR {0} (type={1})".format(obj, obj_type))
### </TEST-INSTRUMENTATION>
tags_copy = deepcopy(tags)
if obj_type == 'rootFolder':
for datacenter in obj.childEntity:
# Skip non-datacenter
if not hasattr(datacenter, 'hostFolder'):
continue
self.pool.apply_async(
self._cache_morlist_raw_atomic,
args=(i_key, 'datacenter', datacenter, tags_copy, regexes)
)
elif obj_type == 'datacenter':
dc_tag = "vsphere_datacenter:%s" % obj.name
tags_copy.append(dc_tag)
for compute_resource in obj.hostFolder.childEntity:
# Skip non-compute resource
if not hasattr(compute_resource, 'host'):
continue
self.pool.apply_async(
self._cache_morlist_raw_atomic,
args=(i_key, 'compute_resource', compute_resource, tags_copy, regexes)
)
elif obj_type == 'compute_resource':
if obj.__class__ == vim.ClusterComputeResource:
cluster_tag = "vsphere_cluster:%s" % obj.name
tags_copy.append(cluster_tag)
for host in obj.host:
# Skip non-host
if not hasattr(host, 'vm'):
continue
self.pool.apply_async(
self._cache_morlist_raw_atomic,
args=(i_key, 'host', host, tags_copy, regexes)
)
elif obj_type == 'host':
if regexes and regexes.get('host_include') is not None:
match = re.search(regexes['host_include'], obj.name)
if not match:
self.log.debug(u"Filtered out VM {0} because of host_include_only_regex".format(obj.name))
return
watched_mor = dict(mor_type='host', mor=obj, hostname=obj.name, tags=tags_copy+['vsphere_type:host'])
self.morlist_raw[i_key].append(watched_mor)
host_tag = "vsphere_host:%s" % obj.name
tags_copy.append(host_tag)
for vm in obj.vm:
if vm.runtime.powerState != 'poweredOn':
continue
self.pool.apply_async(
self._cache_morlist_raw_atomic,
args=(i_key, 'vm', vm, tags_copy, regexes)
)
elif obj_type == 'vm':
if regexes and regexes.get('vm_include') is not None:
match = re.search(regexes['vm_include'], obj.name)
if not match:
self.log.debug(u"Filtered out VM {0} because of vm_include_only_regex".format(obj.name))
return
watched_mor = dict(mor_type='vm', mor=obj, hostname=obj.name, tags=tags_copy+['vsphere_type:vm'])
self.morlist_raw[i_key].append(watched_mor)
### <TEST-INSTRUMENTATION>
self.histogram('datadog.agent.vsphere.morlist_raw_atomic.time', t.total())
### </TEST-INSTRUMENTATION>
def _cache_morlist_raw(self, instance):
""" Initiate the first layer to refresh self.morlist by queueing
_cache_morlist_raw_atomic on the rootFolder in a recursive/asncy approach
"""
i_key = self._instance_key(instance)
self.log.debug("Caching the morlist for vcenter instance %s" % i_key)
if i_key in self.morlist_raw and len(self.morlist_raw[i_key]) > 0:
self.log.debug(
"Skipping morlist collection now, RAW results "
"processing not over (latest refresh was {0}s ago)".format(
time.time() - self.cache_times[i_key][MORLIST][LAST])
)
return
self.morlist_raw[i_key] = []
server_instance = self._get_server_instance(instance)
root_folder = server_instance.content.rootFolder
instance_tag = "vcenter_server:%s" % instance.get('name')
regexes = {
'host_include': instance.get('host_include_only_regex'),
'vm_include': instance.get('vm_include_only_regex')
}
self.pool.apply_async(
self._cache_morlist_raw_atomic,
args=(i_key, 'rootFolder', root_folder, [instance_tag], regexes)
)
self.cache_times[i_key][MORLIST][LAST] = time.time()
@atomic_method
def _cache_morlist_process_atomic(self, instance, mor):
""" Process one item of the self.morlist_raw list by querying the available
metrics for this MOR and then putting it in self.morlist
"""
### <TEST-INSTRUMENTATION>
t = Timer()
### </TEST-INSTRUMENTATION>
i_key = self._instance_key(instance)
server_instance = self._get_server_instance(instance)
perfManager = server_instance.content.perfManager
self.log.debug(
"job_atomic: Querying available metrics"
" for MOR {0} (type={1})".format(mor['mor'], mor['mor_type'])
)
available_metrics = perfManager.QueryAvailablePerfMetric(
mor['mor'], intervalId=REAL_TIME_INTERVAL)
mor['metrics'] = self._compute_needed_metrics(instance, available_metrics)
mor_name = str(mor['mor'])
if mor_name in self.morlist[i_key]:
# Was already here last iteration
self.morlist[i_key][mor_name]['metrics'] = mor['metrics']
else:
self.morlist[i_key][mor_name] = mor
self.morlist[i_key][mor_name]['last_seen'] = time.time()
### <TEST-INSTRUMENTATION>
self.histogram('datadog.agent.vsphere.morlist_process_atomic.time', t.total())
### </TEST-INSTRUMENTATION>
def _cache_morlist_process(self, instance):
""" Empties the self.morlist_raw by popping items and running asynchronously
the _cache_morlist_process_atomic operation that will get the available
metrics for this MOR and put it in self.morlist
"""
i_key = self._instance_key(instance)
if i_key not in self.morlist:
self.morlist[i_key] = {}
batch_size = self.init_config.get('batch_morlist_size', BATCH_MORLIST_SIZE)
for i in xrange(batch_size):
try:
mor = self.morlist_raw[i_key].pop()
self.pool.apply_async(self._cache_morlist_process_atomic, args=(instance, mor))
except (IndexError, KeyError):
self.log.debug("No more work to process in morlist_raw")
return
def _vacuum_morlist(self, instance):
""" Check if self.morlist doesn't have some old MORs that are gone, ie
we cannot get any metrics from them anyway (or =0)
"""
i_key = self._instance_key(instance)
morlist = self.morlist[i_key].items()
for mor_name, mor in morlist:
last_seen = mor['last_seen']
if (time.time() - last_seen) > 2 * REFRESH_MORLIST_INTERVAL:
del self.morlist[i_key][mor_name]
def _cache_metrics_metadata(self, instance):
""" Get from the server instance, all the performance counters metadata
meaning name/group/description... attached with the corresponding ID
"""
### <TEST-INSTRUMENTATION>
t = Timer()
### </TEST-INSTRUMENTATION>
i_key = self._instance_key(instance)
self.log.info("Warming metrics metadata cache for instance {0}".format(i_key))
server_instance = self._get_server_instance(instance)
perfManager = server_instance.content.perfManager
new_metadata = {}
for counter in perfManager.perfCounter:
d = dict(
name = "%s.%s" % (counter.groupInfo.key, counter.nameInfo.key),
unit = counter.unitInfo.key,
instance_tag = 'instance' # FIXME: replace by what we want to tag!
)
new_metadata[counter.key] = d
self.cache_times[i_key][METRICS_METADATA][LAST] = time.time()
self.log.info("Finished metadata collection for instance {0}".format(i_key))
# Reset metadata
self.metrics_metadata[i_key] = new_metadata
### <TEST-INSTRUMENTATION>
self.histogram('datadog.agent.vsphere.metric_metadata_collection.time', t.total())
### </TEST-INSTRUMENTATION>
def _transform_value(self, instance, counter_id, value):
""" Given the counter_id, look up for the metrics metadata to check the vsphere
type of the counter and apply pre-reporting transformation if needed.
"""
i_key = self._instance_key(instance)
if counter_id in self.metrics_metadata[i_key]:
unit = self.metrics_metadata[i_key][counter_id]['unit']
if unit == 'percent':
return float(value) / 100
# Defaults to return the value without transformation
return value
@atomic_method
def _collect_metrics_atomic(self, instance, mor):
""" Task that collects the metrics listed in the morlist for one MOR
"""
### <TEST-INSTRUMENTATION>
t = Timer()
### </TEST-INSTRUMENTATION>
i_key = self._instance_key(instance)
server_instance = self._get_server_instance(instance)
perfManager = server_instance.content.perfManager
query = vim.PerformanceManager.QuerySpec(maxSample=1,
entity=mor['mor'],
metricId=mor['metrics'],
intervalId=20,
format='normal')
results = perfManager.QueryPerf(querySpec=[query])
if results:
for result in results[0].value:
if result.id.counterId not in self.metrics_metadata[i_key]:
self.log.debug("Skipping this metric value, because there is no metadata about it")
continue
instance_name = result.id.instance or "none"
value = self._transform_value(instance, result.id.counterId, result.value[0])
self.gauge(
"vsphere.%s" % self.metrics_metadata[i_key][result.id.counterId]['name'],
value,
hostname=mor['hostname'],
tags=['instance:%s' % instance_name]
)
### <TEST-INSTRUMENTATION>
self.histogram('datadog.agent.vsphere.metric_colection.time', t.total())
### </TEST-INSTRUMENTATION>
def collect_metrics(self, instance):
""" Calls asynchronously _collect_metrics_atomic on all MORs, as the
job queue is processed the Aggregator will receive the metrics.
"""
i_key = self._instance_key(instance)
if i_key not in self.morlist:
self.log.debug("Not collecting metrics for this instance, nothing to do yet: {0}".format(i_key))
return
mors = self.morlist[i_key].items()
self.log.debug("Collecting metrics of %d mors" % len(mors))
vm_count = 0
for mor_name, mor in mors:
if mor['mor_type'] == 'vm':
vm_count += 1
if 'metrics' not in mor:
# self.log.debug("Skipping entity %s collection because we didn't cache its metrics yet" % mor['hostname'])
continue
self.pool.apply_async(self._collect_metrics_atomic, args=(instance, mor))
self.gauge('vsphere.vm.count', vm_count, tags=["vcenter_server:%s" % instance.get('name')])
def check(self, instance):
if not self.pool_started:
self.start_pool()
### <TEST-INSTRUMENTATION>
self.gauge('datadog.agent.vsphere.queue_size', self.pool._workq.qsize(), tags=['instant:initial'])
### </TEST-INSTRUMENTATION>
# First part: make sure our object repository is neat & clean
if self._should_cache(instance, METRICS_METADATA):
self._cache_metrics_metadata(instance)
if self._should_cache(instance, MORLIST):
self._cache_morlist_raw(instance)
self._cache_morlist_process(instance)
self._vacuum_morlist(instance)
# Second part: do the job
self.collect_metrics(instance)
self._query_event(instance)
# For our own sanity
self._clean()
thread_crashed = False
try:
while True:
self.log.critical(self.exceptionq.get_nowait())
thread_crashed = True
except Empty:
pass
if thread_crashed:
self.stop_pool()
raise Exception("One thread in the pool crashed, check the logs")
### <TEST-INSTRUMENTATION>
self.gauge('datadog.agent.vsphere.queue_size', self.pool._workq.qsize(), tags=['instant:final'])
### </TEST-INSTRUMENTATION>
if __name__ == '__main__':
check, _instances = VSphereCheck.from_yaml('conf.d/vsphere.yaml')
try:
for i in xrange(200):
print "Loop %d" % i
for instance in check.instances:
check.check(instance)
if check.has_events():
print 'Events: %s' % (check.get_events())
print 'Metrics: %d' % (len(check.get_metrics()))
time.sleep(10)
except Exception as e:
print "Whoops something happened {0}".format(traceback.format_exc())
finally:
check.stop()
| bsd-3-clause |
openstack/ironic | ironic/objects/port.py | 1 | 23131 | # coding=utf-8
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import netutils
from oslo_utils import strutils
from oslo_utils import uuidutils
from oslo_utils import versionutils
from oslo_versionedobjects import base as object_base
from ironic.common import exception
from ironic.common import utils
from ironic.db import api as dbapi
from ironic.objects import base
from ironic.objects import fields as object_fields
from ironic.objects import notification
@base.IronicObjectRegistry.register
class Port(base.IronicObject, object_base.VersionedObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: Add get() and get_by_id() and get_by_address() and
# make get_by_uuid() only work with a uuid
# Version 1.2: Add create() and destroy()
# Version 1.3: Add list()
# Version 1.4: Add list_by_node_id()
# Version 1.5: Add list_by_portgroup_id() and new fields
# local_link_connection, portgroup_id and pxe_enabled
# Version 1.6: Add internal_info field
# Version 1.7: Add physical_network field
# Version 1.8: Migrate/copy extra['vif_port_id'] to
# internal_info['tenant_vif_port_id'] (not an explicit db
# change)
# Version 1.9: Add support for Smart NIC port
# Version 1.10: Add name field
VERSION = '1.10'
dbapi = dbapi.get_instance()
fields = {
'id': object_fields.IntegerField(),
'uuid': object_fields.UUIDField(nullable=True),
'node_id': object_fields.IntegerField(nullable=True),
'address': object_fields.MACAddressField(nullable=True),
'extra': object_fields.FlexibleDictField(nullable=True),
'local_link_connection': object_fields.FlexibleDictField(
nullable=True),
'portgroup_id': object_fields.IntegerField(nullable=True),
'pxe_enabled': object_fields.BooleanField(),
'internal_info': object_fields.FlexibleDictField(nullable=True),
'physical_network': object_fields.StringField(nullable=True),
'is_smartnic': object_fields.BooleanField(nullable=True,
default=False),
'name': object_fields.StringField(nullable=True),
}
def _convert_name_field(self, target_version,
remove_unavailable_fields=True):
name_is_set = self.obj_attr_is_set('name')
if target_version >= (1, 10):
# Target version supports name. Set it to its default
# value if it is not set.
if not name_is_set:
self.name = None
elif name_is_set:
# Target version does not support name, and it is set.
if remove_unavailable_fields:
# (De)serialising: remove unavailable fields.
delattr(self, 'name')
elif self.name is not None:
# DB: set unavailable fields to their default.
self.name = None
def _convert_to_version(self, target_version,
remove_unavailable_fields=True):
"""Convert to the target version.
Convert the object to the target version. The target version may be
the same, older, or newer than the version of the object. This is
used for DB interactions as well as for serialization/deserialization.
Version 1.7: physical_network field was added. Its default value is
None. For versions prior to this, it should be set to None (or
removed).
Version 1.8: if extra['vif_port_id'] is specified (non-null) and
internal_info['tenant_vif_port_id'] is not specified, copy the
.extra value to internal_info. There is nothing to do here when
downgrading to an older version.
Version 1.9: remove is_smartnic field for unsupported versions if
remove_unavailable_fields is True.
Version 1.10: remove name field for unsupported versions if
remove_unavailable_fields is True.
:param target_version: the desired version of the object
:param remove_unavailable_fields: True to remove fields that are
unavailable in the target version; set this to True when
(de)serializing. False to set the unavailable fields to appropriate
values; set this to False for DB interactions.
"""
target_version = versionutils.convert_version_to_tuple(target_version)
if target_version >= (1, 8):
if self.obj_attr_is_set('extra'):
vif = self.extra.get('vif_port_id')
if vif:
internal_info = (self.internal_info
if self.obj_attr_is_set('internal_info')
else {})
if 'tenant_vif_port_id' not in internal_info:
internal_info['tenant_vif_port_id'] = vif
self.internal_info = internal_info
# Convert the physical_network field.
physnet_is_set = self.obj_attr_is_set('physical_network')
if target_version >= (1, 7):
# Target version supports physical_network. Set it to its default
# value if it is not set.
if not physnet_is_set:
self.physical_network = None
elif physnet_is_set:
# Target version does not support physical_network, and it is set.
if remove_unavailable_fields:
# (De)serialising: remove unavailable fields.
delattr(self, 'physical_network')
elif self.physical_network is not None:
# DB: set unavailable fields to their default.
self.physical_network = None
# Convert is_smartnic field.
is_smartnic_set = self.obj_attr_is_set('is_smartnic')
if target_version >= (1, 9):
# Target version supports is_smartnic. Set it to its default
# value if it is not set.
if not is_smartnic_set:
self.is_smartnic = False
# handle is_smartnic field in older version
elif is_smartnic_set:
# Target version does not support is_smartnic, and it is set.
if remove_unavailable_fields:
# (De)serialising: remove unavailable fields.
delattr(self, 'is_smartnic')
elif self.is_smartnic is not False:
# DB: set unavailable fields to their default.
self.is_smartnic = False
# Convert the name field.
self._convert_name_field(target_version, remove_unavailable_fields)
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable_classmethod
@classmethod
def get(cls, context, port_id):
"""Find a port.
Find a port based on its id or uuid or name or MAC address and return
a Port object.
:param context: Security context
:param port_id: the id *or* uuid *or* name *or* MAC address of a port.
:returns: a :class:`Port` object.
:raises: InvalidIdentity
"""
if strutils.is_int_like(port_id):
return cls.get_by_id(context, port_id)
elif uuidutils.is_uuid_like(port_id):
return cls.get_by_uuid(context, port_id)
elif netutils.is_valid_mac(port_id):
return cls.get_by_address(context, port_id)
elif utils.is_valid_logical_name(port_id):
return cls.get_by_name(context, port_id)
else:
raise exception.InvalidIdentity(identity=port_id)
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable_classmethod
@classmethod
def get_by_id(cls, context, port_id):
"""Find a port based on its integer ID and return a Port object.
:param cls: the :class:`Port`
:param context: Security context
:param port_id: the ID of a port.
:returns: a :class:`Port` object.
:raises: PortNotFound
"""
db_port = cls.dbapi.get_port_by_id(port_id)
port = cls._from_db_object(context, cls(), db_port)
return port
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable_classmethod
@classmethod
def get_by_uuid(cls, context, uuid):
"""Find a port based on UUID and return a :class:`Port` object.
:param cls: the :class:`Port`
:param context: Security context
:param uuid: the UUID of a port.
:returns: a :class:`Port` object.
:raises: PortNotFound
"""
db_port = cls.dbapi.get_port_by_uuid(uuid)
port = cls._from_db_object(context, cls(), db_port)
return port
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable_classmethod
@classmethod
def get_by_address(cls, context, address, owner=None, project=None):
"""Find a port based on address and return a :class:`Port` object.
:param cls: the :class:`Port`
:param context: Security context
:param address: the address of a port.
:param owner: DEPRECATED a node owner to match against
:param project: a node owner or lessee to match against
:returns: a :class:`Port` object.
:raises: PortNotFound
"""
if owner and not project:
project = owner
db_port = cls.dbapi.get_port_by_address(address, project=project)
port = cls._from_db_object(context, cls(), db_port)
return port
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable_classmethod
@classmethod
def get_by_name(cls, context, name):
"""Find a port based on name and return a :class:`Port` object.
:param cls: the :class:`Port`
:param context: Security context
:param name: the name of a port.
:returns: a :class:`Port` object.
:raises: PortNotFound
"""
db_port = cls.dbapi.get_port_by_name(name)
port = cls._from_db_object(context, cls(), db_port)
return port
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable_classmethod
@classmethod
def list(cls, context, limit=None, marker=None,
sort_key=None, sort_dir=None, owner=None, project=None):
"""Return a list of Port objects.
:param context: Security context.
:param limit: maximum number of resources to return in a single result.
:param marker: pagination marker for large data sets.
:param sort_key: column to sort results by.
:param sort_dir: direction to sort. "asc" or "desc".
:param owner: DEPRECATED a node owner to match against
:param project: a node owner or lessee to match against
:returns: a list of :class:`Port` object.
:raises: InvalidParameterValue
"""
if owner and not project:
project = owner
db_ports = cls.dbapi.get_port_list(limit=limit,
marker=marker,
sort_key=sort_key,
sort_dir=sort_dir,
project=project)
return cls._from_db_object_list(context, db_ports)
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable_classmethod
@classmethod
def list_by_node_id(cls, context, node_id, limit=None, marker=None,
sort_key=None, sort_dir=None, owner=None,
project=None):
"""Return a list of Port objects associated with a given node ID.
:param context: Security context.
:param node_id: the ID of the node.
:param limit: maximum number of resources to return in a single result.
:param marker: pagination marker for large data sets.
:param sort_key: column to sort results by.
:param sort_dir: direction to sort. "asc" or "desc".
:param owner: DEPRECATED a node owner to match against
:param project: a node owner or lessee to match against
:returns: a list of :class:`Port` object.
"""
if owner and not project:
project = owner
db_ports = cls.dbapi.get_ports_by_node_id(node_id, limit=limit,
marker=marker,
sort_key=sort_key,
sort_dir=sort_dir,
project=project)
return cls._from_db_object_list(context, db_ports)
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable_classmethod
@classmethod
def list_by_portgroup_id(cls, context, portgroup_id, limit=None,
marker=None, sort_key=None, sort_dir=None,
owner=None, project=None):
"""Return a list of Port objects associated with a given portgroup ID.
:param context: Security context.
:param portgroup_id: the ID of the portgroup.
:param limit: maximum number of resources to return in a single result.
:param marker: pagination marker for large data sets.
:param sort_key: column to sort results by.
:param sort_dir: direction to sort. "asc" or "desc".
:param owner: DEPRECATED a node owner to match against
:param project: a node owner or lessee to match against
:returns: a list of :class:`Port` object.
"""
if owner and not project:
project = owner
db_ports = cls.dbapi.get_ports_by_portgroup_id(portgroup_id,
limit=limit,
marker=marker,
sort_key=sort_key,
sort_dir=sort_dir,
project=project)
return cls._from_db_object_list(context, db_ports)
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable
def create(self, context=None):
"""Create a Port record in the DB.
:param context: Security context. NOTE: This should only
be used internally by the indirection_api.
Unfortunately, RPC requires context as the first
argument, even though we don't use it.
A context should be set when instantiating the
object, e.g.: Port(context)
:raises: MACAlreadyExists if 'address' column is not unique
:raises: PortAlreadyExists if 'uuid' column is not unique
"""
values = self.do_version_changes_for_db()
db_port = self.dbapi.create_port(values)
self._from_db_object(self._context, self, db_port)
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable
def destroy(self, context=None):
"""Delete the Port from the DB.
:param context: Security context. NOTE: This should only
be used internally by the indirection_api.
Unfortunately, RPC requires context as the first
argument, even though we don't use it.
A context should be set when instantiating the
object, e.g.: Port(context)
:raises: PortNotFound
"""
self.dbapi.destroy_port(self.uuid)
self.obj_reset_changes()
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable
def save(self, context=None):
"""Save updates to this Port.
Updates will be made column by column based on the result
of self.what_changed().
:param context: Security context. NOTE: This should only
be used internally by the indirection_api.
Unfortunately, RPC requires context as the first
argument, even though we don't use it.
A context should be set when instantiating the
object, e.g.: Port(context)
:raises: PortNotFound
:raises: MACAlreadyExists if 'address' column is not unique
"""
updates = self.do_version_changes_for_db()
updated_port = self.dbapi.update_port(self.uuid, updates)
self._from_db_object(self._context, self, updated_port)
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable
def refresh(self, context=None):
"""Loads updates for this Port.
Loads a port with the same uuid from the database and
checks for updated attributes. Updates are applied from
the loaded port column by column, if there are any updates.
:param context: Security context. NOTE: This should only
be used internally by the indirection_api.
Unfortunately, RPC requires context as the first
argument, even though we don't use it.
A context should be set when instantiating the
object, e.g.: Port(context)
:raises: PortNotFound
"""
current = self.get_by_uuid(self._context, uuid=self.uuid)
self.obj_refresh(current)
self.obj_reset_changes()
@classmethod
def supports_physical_network(cls):
"""Return whether the physical_network field is supported.
:returns: Whether the physical_network field is supported
:raises: ovo_exception.IncompatibleObjectVersion
"""
return cls.supports_version((1, 7))
@classmethod
def supports_is_smartnic(cls):
"""Return whether is_smartnic field is supported.
:returns: Whether is_smartnic field is supported
:raises: ovo_exception.IncompatibleObjectVersion
"""
return cls.supports_version((1, 9))
@base.IronicObjectRegistry.register
class PortCRUDNotification(notification.NotificationBase):
"""Notification emitted when ironic creates, updates or deletes a port."""
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'payload': object_fields.ObjectField('PortCRUDPayload')
}
@base.IronicObjectRegistry.register
class PortCRUDPayload(notification.NotificationPayloadBase):
# Version 1.0: Initial version
# Version 1.1: Add "portgroup_uuid" field
# Version 1.2: Add "physical_network" field
# Version 1.3: Add "is_smartnic" field
# Version 1.4: Add "name" field
VERSION = '1.4'
SCHEMA = {
'address': ('port', 'address'),
'extra': ('port', 'extra'),
'local_link_connection': ('port', 'local_link_connection'),
'pxe_enabled': ('port', 'pxe_enabled'),
'physical_network': ('port', 'physical_network'),
'created_at': ('port', 'created_at'),
'updated_at': ('port', 'updated_at'),
'uuid': ('port', 'uuid'),
'is_smartnic': ('port', 'is_smartnic'),
'name': ('port', 'name'),
}
fields = {
'address': object_fields.MACAddressField(nullable=True),
'extra': object_fields.FlexibleDictField(nullable=True),
'local_link_connection': object_fields.FlexibleDictField(
nullable=True),
'pxe_enabled': object_fields.BooleanField(nullable=True),
'node_uuid': object_fields.UUIDField(),
'portgroup_uuid': object_fields.UUIDField(nullable=True),
'physical_network': object_fields.StringField(nullable=True),
'created_at': object_fields.DateTimeField(nullable=True),
'updated_at': object_fields.DateTimeField(nullable=True),
'uuid': object_fields.UUIDField(),
'is_smartnic': object_fields.BooleanField(nullable=True,
default=False),
'name': object_fields.StringField(nullable=True),
}
def __init__(self, port, node_uuid, portgroup_uuid):
super(PortCRUDPayload, self).__init__(node_uuid=node_uuid,
portgroup_uuid=portgroup_uuid)
self.populate_schema(port=port)
| apache-2.0 |
pforret/python-for-android | python-build/python-libs/gdata/build/lib/gdata/apps/service.py | 136 | 16595 | #!/usr/bin/python
#
# Copyright (C) 2007 SIOS Technology, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = '[email protected] (Takashi MATSUO)'
try:
from xml.etree import cElementTree as ElementTree
except ImportError:
try:
import cElementTree as ElementTree
except ImportError:
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import urllib
import gdata
import atom.service
import gdata.service
import gdata.apps
import atom
API_VER="2.0"
HTTP_OK=200
UNKOWN_ERROR=1000
USER_DELETED_RECENTLY=1100
USER_SUSPENDED=1101
DOMAIN_USER_LIMIT_EXCEEDED=1200
DOMAIN_ALIAS_LIMIT_EXCEEDED=1201
DOMAIN_SUSPENDED=1202
DOMAIN_FEATURE_UNAVAILABLE=1203
ENTITY_EXISTS=1300
ENTITY_DOES_NOT_EXIST=1301
ENTITY_NAME_IS_RESERVED=1302
ENTITY_NAME_NOT_VALID=1303
INVALID_GIVEN_NAME=1400
INVALID_FAMILY_NAME=1401
INVALID_PASSWORD=1402
INVALID_USERNAME=1403
INVALID_HASH_FUNCTION_NAME=1404
INVALID_HASH_DIGGEST_LENGTH=1405
INVALID_EMAIL_ADDRESS=1406
INVALID_QUERY_PARAMETER_VALUE=1407
TOO_MANY_RECIPIENTS_ON_EMAIL_LIST=1500
DEFAULT_QUOTA_LIMIT='2048'
class Error(Exception):
pass
class AppsForYourDomainException(Error):
def __init__(self, response):
Error.__init__(self, response)
try:
self.element_tree = ElementTree.fromstring(response['body'])
self.error_code = int(self.element_tree[0].attrib['errorCode'])
self.reason = self.element_tree[0].attrib['reason']
self.invalidInput = self.element_tree[0].attrib['invalidInput']
except:
self.error_code = UNKOWN_ERROR
class AppsService(gdata.service.GDataService):
"""Client for the Google Apps Provisioning service."""
def __init__(self, email=None, password=None, domain=None, source=None,
server='apps-apis.google.com', additional_headers=None,
**kwargs):
"""Creates a client for the Google Apps Provisioning service.
Args:
email: string (optional) The user's email address, used for
authentication.
password: string (optional) The user's password.
domain: string (optional) The Google Apps domain name.
source: string (optional) The name of the user's application.
server: string (optional) The name of the server to which a connection
will be opened. Default value: 'apps-apis.google.com'.
**kwargs: The other parameters to pass to gdata.service.GDataService
constructor.
"""
gdata.service.GDataService.__init__(
self, email=email, password=password, service='apps', source=source,
server=server, additional_headers=additional_headers, **kwargs)
self.ssl = True
self.port = 443
self.domain = domain
def _baseURL(self):
return "/a/feeds/%s" % self.domain
def GetGeneratorFromLinkFinder(self, link_finder, func):
"""returns a generator for pagination"""
yield link_finder
next = link_finder.GetNextLink()
while next is not None:
next_feed = func(str(self.Get(next.href)))
yield next_feed
next = next_feed.GetNextLink()
def AddAllElementsFromAllPages(self, link_finder, func):
"""retrieve all pages and add all elements"""
next = link_finder.GetNextLink()
while next is not None:
next_feed = self.Get(next.href, converter=func)
for a_entry in next_feed.entry:
link_finder.entry.append(a_entry)
next = next_feed.GetNextLink()
return link_finder
def RetrievePageOfEmailLists(self, start_email_list_name=None):
"""Retrieve one page of email list"""
uri = "%s/emailList/%s" % (self._baseURL(), API_VER)
if start_email_list_name is not None:
uri += "?startEmailListName=%s" % start_email_list_name
try:
return gdata.apps.EmailListFeedFromString(str(self.Get(uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def RetrieveAllEmailLists(self):
"""Retrieve all email list of a domain."""
ret = self.RetrievePageOfEmailLists()
# pagination
return self.AddAllElementsFromAllPages(
ret, gdata.apps.EmailListFeedFromString)
def RetrieveEmailList(self, list_name):
"""Retreive a single email list by the list's name."""
uri = "%s/emailList/%s/%s" % (
self._baseURL(), API_VER, list_name)
try:
return self.Get(uri, converter=gdata.apps.EmailListEntryFromString)
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def RetrieveEmailLists(self, recipient):
"""Retrieve All Email List Subscriptions for an Email Address."""
uri = "%s/emailList/%s?recipient=%s" % (
self._baseURL(), API_VER, recipient)
try:
ret = gdata.apps.EmailListFeedFromString(str(self.Get(uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
# pagination
return self.AddAllElementsFromAllPages(
ret, gdata.apps.EmailListFeedFromString)
def RemoveRecipientFromEmailList(self, recipient, list_name):
"""Remove recipient from email list."""
uri = "%s/emailList/%s/%s/recipient/%s" % (
self._baseURL(), API_VER, list_name, recipient)
try:
self.Delete(uri)
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def RetrievePageOfRecipients(self, list_name, start_recipient=None):
"""Retrieve one page of recipient of an email list. """
uri = "%s/emailList/%s/%s/recipient" % (
self._baseURL(), API_VER, list_name)
if start_recipient is not None:
uri += "?startRecipient=%s" % start_recipient
try:
return gdata.apps.EmailListRecipientFeedFromString(str(self.Get(uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def RetrieveAllRecipients(self, list_name):
"""Retrieve all recipient of an email list."""
ret = self.RetrievePageOfRecipients(list_name)
# pagination
return self.AddAllElementsFromAllPages(
ret, gdata.apps.EmailListRecipientFeedFromString)
def AddRecipientToEmailList(self, recipient, list_name):
"""Add a recipient to a email list."""
uri = "%s/emailList/%s/%s/recipient" % (
self._baseURL(), API_VER, list_name)
recipient_entry = gdata.apps.EmailListRecipientEntry()
recipient_entry.who = gdata.apps.Who(email=recipient)
try:
return gdata.apps.EmailListRecipientEntryFromString(
str(self.Post(recipient_entry, uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def DeleteEmailList(self, list_name):
"""Delete a email list"""
uri = "%s/emailList/%s/%s" % (self._baseURL(), API_VER, list_name)
try:
self.Delete(uri)
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def CreateEmailList(self, list_name):
"""Create a email list. """
uri = "%s/emailList/%s" % (self._baseURL(), API_VER)
email_list_entry = gdata.apps.EmailListEntry()
email_list_entry.email_list = gdata.apps.EmailList(name=list_name)
try:
return gdata.apps.EmailListEntryFromString(
str(self.Post(email_list_entry, uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def DeleteNickname(self, nickname):
"""Delete a nickname"""
uri = "%s/nickname/%s/%s" % (self._baseURL(), API_VER, nickname)
try:
self.Delete(uri)
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def RetrievePageOfNicknames(self, start_nickname=None):
"""Retrieve one page of nicknames in the domain"""
uri = "%s/nickname/%s" % (self._baseURL(), API_VER)
if start_nickname is not None:
uri += "?startNickname=%s" % start_nickname
try:
return gdata.apps.NicknameFeedFromString(str(self.Get(uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def RetrieveAllNicknames(self):
"""Retrieve all nicknames in the domain"""
ret = self.RetrievePageOfNicknames()
# pagination
return self.AddAllElementsFromAllPages(
ret, gdata.apps.NicknameFeedFromString)
def RetrieveNicknames(self, user_name):
"""Retrieve nicknames of the user"""
uri = "%s/nickname/%s?username=%s" % (self._baseURL(), API_VER, user_name)
try:
ret = gdata.apps.NicknameFeedFromString(str(self.Get(uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
# pagination
return self.AddAllElementsFromAllPages(
ret, gdata.apps.NicknameFeedFromString)
def RetrieveNickname(self, nickname):
"""Retrieve a nickname.
Args:
nickname: string The nickname to retrieve
Returns:
gdata.apps.NicknameEntry
"""
uri = "%s/nickname/%s/%s" % (self._baseURL(), API_VER, nickname)
try:
return gdata.apps.NicknameEntryFromString(str(self.Get(uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def CreateNickname(self, user_name, nickname):
"""Create a nickname"""
uri = "%s/nickname/%s" % (self._baseURL(), API_VER)
nickname_entry = gdata.apps.NicknameEntry()
nickname_entry.login = gdata.apps.Login(user_name=user_name)
nickname_entry.nickname = gdata.apps.Nickname(name=nickname)
try:
return gdata.apps.NicknameEntryFromString(
str(self.Post(nickname_entry, uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def DeleteUser(self, user_name):
"""Delete a user account"""
uri = "%s/user/%s/%s" % (self._baseURL(), API_VER, user_name)
try:
return self.Delete(uri)
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def UpdateUser(self, user_name, user_entry):
"""Update a user account."""
uri = "%s/user/%s/%s" % (self._baseURL(), API_VER, user_name)
try:
return gdata.apps.UserEntryFromString(str(self.Put(user_entry, uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def CreateUser(self, user_name, family_name, given_name, password,
suspended='false', quota_limit=None,
password_hash_function=None):
"""Create a user account. """
uri = "%s/user/%s" % (self._baseURL(), API_VER)
user_entry = gdata.apps.UserEntry()
user_entry.login = gdata.apps.Login(
user_name=user_name, password=password, suspended=suspended,
hash_function_name=password_hash_function)
user_entry.name = gdata.apps.Name(family_name=family_name,
given_name=given_name)
if quota_limit is not None:
user_entry.quota = gdata.apps.Quota(limit=str(quota_limit))
try:
return gdata.apps.UserEntryFromString(str(self.Post(user_entry, uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def SuspendUser(self, user_name):
user_entry = self.RetrieveUser(user_name)
if user_entry.login.suspended != 'true':
user_entry.login.suspended = 'true'
user_entry = self.UpdateUser(user_name, user_entry)
return user_entry
def RestoreUser(self, user_name):
user_entry = self.RetrieveUser(user_name)
if user_entry.login.suspended != 'false':
user_entry.login.suspended = 'false'
user_entry = self.UpdateUser(user_name, user_entry)
return user_entry
def RetrieveUser(self, user_name):
"""Retrieve an user account.
Args:
user_name: string The user name to retrieve
Returns:
gdata.apps.UserEntry
"""
uri = "%s/user/%s/%s" % (self._baseURL(), API_VER, user_name)
try:
return gdata.apps.UserEntryFromString(str(self.Get(uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def RetrievePageOfUsers(self, start_username=None):
"""Retrieve one page of users in this domain."""
uri = "%s/user/%s" % (self._baseURL(), API_VER)
if start_username is not None:
uri += "?startUsername=%s" % start_username
try:
return gdata.apps.UserFeedFromString(str(self.Get(uri)))
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def GetGeneratorForAllUsers(self):
"""Retrieve a generator for all users in this domain."""
first_page = self.RetrievePageOfUsers()
return self.GetGeneratorFromLinkFinder(first_page,
gdata.apps.UserFeedFromString)
def RetrieveAllUsers(self):
"""Retrieve all users in this domain. OBSOLETE"""
ret = self.RetrievePageOfUsers()
# pagination
return self.AddAllElementsFromAllPages(
ret, gdata.apps.UserFeedFromString)
class PropertyService(gdata.service.GDataService):
"""Client for the Google Apps Property service."""
def __init__(self, email=None, password=None, domain=None, source=None,
server='apps-apis.google.com', additional_headers=None):
gdata.service.GDataService.__init__(self, email=email, password=password,
service='apps', source=source,
server=server,
additional_headers=additional_headers)
self.ssl = True
self.port = 443
self.domain = domain
def AddAllElementsFromAllPages(self, link_finder, func):
"""retrieve all pages and add all elements"""
next = link_finder.GetNextLink()
while next is not None:
next_feed = self.Get(next.href, converter=func)
for a_entry in next_feed.entry:
link_finder.entry.append(a_entry)
next = next_feed.GetNextLink()
return link_finder
def _GetPropertyEntry(self, properties):
property_entry = gdata.apps.PropertyEntry()
property = []
for name, value in properties.iteritems():
if name is not None and value is not None:
property.append(gdata.apps.Property(name=name, value=value))
property_entry.property = property
return property_entry
def _PropertyEntry2Dict(self, property_entry):
properties = {}
for i, property in enumerate(property_entry.property):
properties[property.name] = property.value
return properties
def _GetPropertyFeed(self, uri):
try:
return gdata.apps.PropertyFeedFromString(str(self.Get(uri)))
except gdata.service.RequestError, e:
raise gdata.apps.service.AppsForYourDomainException(e.args[0])
def _GetPropertiesList(self, uri):
property_feed = self._GetPropertyFeed(uri)
# pagination
property_feed = self.AddAllElementsFromAllPages(
property_feed, gdata.apps.PropertyFeedFromString)
properties_list = []
for property_entry in property_feed.entry:
properties_list.append(self._PropertyEntry2Dict(property_entry))
return properties_list
def _GetProperties(self, uri):
try:
return self._PropertyEntry2Dict(gdata.apps.PropertyEntryFromString(
str(self.Get(uri))))
except gdata.service.RequestError, e:
raise gdata.apps.service.AppsForYourDomainException(e.args[0])
def _PostProperties(self, uri, properties):
property_entry = self._GetPropertyEntry(properties)
try:
return self._PropertyEntry2Dict(gdata.apps.PropertyEntryFromString(
str(self.Post(property_entry, uri))))
except gdata.service.RequestError, e:
raise gdata.apps.service.AppsForYourDomainException(e.args[0])
def _PutProperties(self, uri, properties):
property_entry = self._GetPropertyEntry(properties)
try:
return self._PropertyEntry2Dict(gdata.apps.PropertyEntryFromString(
str(self.Put(property_entry, uri))))
except gdata.service.RequestError, e:
raise gdata.apps.service.AppsForYourDomainException(e.args[0])
def _DeleteProperties(self, uri):
try:
self.Delete(uri)
except gdata.service.RequestError, e:
raise gdata.apps.service.AppsForYourDomainException(e.args[0])
| apache-2.0 |
talbarda/kaggle_predict_house_prices | Build Model.py | 1 | 2629 | import matplotlib.pyplot as plt
import matplotlib.animation as animation
import numpy as np
import pandas as pd
import sklearn.linear_model as lm
from sklearn.model_selection import learning_curve
from sklearn.metrics import accuracy_score
from sklearn.metrics import make_scorer
from sklearn.model_selection import GridSearchCV
def get_model(estimator, parameters, X_train, y_train, scoring):
model = GridSearchCV(estimator, param_grid=parameters, scoring=scoring)
model.fit(X_train, y_train)
return model.best_estimator_
def plot_learning_curve(estimator, title, X, y, ylim=None, cv=None,
n_jobs=1, train_sizes=np.linspace(.1, 1.0, 5), scoring='accuracy'):
plt.figure(figsize=(10,6))
plt.title(title)
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel("Training examples")
plt.ylabel(scoring)
train_sizes, train_scores, test_scores = learning_curve(estimator, X, y, cv=cv, scoring=scoring,
n_jobs=n_jobs, train_sizes=train_sizes)
train_scores_mean = np.mean(train_scores, axis=1)
train_scores_std = np.std(train_scores, axis=1)
test_scores_mean = np.mean(test_scores, axis=1)
test_scores_std = np.std(test_scores, axis=1)
plt.grid()
plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std, alpha=0.1,
color="r")
plt.fill_between(train_sizes, test_scores_mean - test_scores_std,
test_scores_mean + test_scores_std, alpha=0.1, color="g")
plt.plot(train_sizes, train_scores_mean, 'o-', color="r",
label="Training score")
plt.plot(train_sizes, test_scores_mean, 'o-', color="g",
label="Cross-validation score")
plt.legend(loc="best")
return plt
train = pd.read_csv('input/train.csv')
test = pd.read_csv('input/test.csv')
for c in train:
train[c] = pd.Categorical(train[c].values).codes
X = train.drop(['SalePrice'], axis=1)
X = train[['OverallQual', 'GarageArea', 'GarageCars', 'TotalBsmtSF', 'TotRmsAbvGrd', 'FullBath', 'GrLivArea']]
y = train.SalePrice
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0)
scoring = make_scorer(accuracy_score, greater_is_better=True)
from sklearn.linear_model import RidgeCV
RidgeCV.fit(X, y, sample_weight=None)
clf_ridge = RidgeCV()
print (accuracy_score(y_test, clf_ridge.predict(X_test)))
print (clf_ridge)
plt = plot_learning_curve(clf_ridge, 'GaussianNB', X, y, cv=4);
plt.show() | mit |
XueqingLin/tensorflow | tensorflow/python/training/proximal_gradient_descent.py | 33 | 3580 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ProximalGradientDescent for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
# pylint: disable=unused-import
from tensorflow.python.ops import math_ops
# pylint: enable=unused-import
from tensorflow.python.training import optimizer
from tensorflow.python.training import training_ops
class ProximalGradientDescentOptimizer(optimizer.Optimizer):
# pylint: disable=line-too-long
"""Optimizer that implements the proximal gradient descent algorithm.
See this [paper](http://papers.nips.cc/paper/3793-efficient-learning-using-forward-backward-splitting.pdf).
@@__init__
"""
def __init__(self, learning_rate, l1_regularization_strength=0.0,
l2_regularization_strength=0.0, use_locking=False,
name="ProximalGradientDescent"):
"""Construct a new proximal gradient descent optimizer.
Args:
learning_rate: A Tensor or a floating point value. The learning
rate to use.
l1_regularization_strength: A float value, must be greater than or
equal to zero.
l2_regularization_strength: A float value, must be greater than or
equal to zero.
use_locking: If True use locks for update operations.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "GradientDescent".
"""
super(ProximalGradientDescentOptimizer, self).__init__(use_locking, name)
self._learning_rate = learning_rate
self._l1_regularization_strength = l1_regularization_strength
self._l2_regularization_strength = l2_regularization_strength
self._l1_regularization_strength_tensor = None
self._l2_regularization_strength_tensor = None
def _apply_dense(self, grad, var):
return training_ops.apply_proximal_gradient_descent(
var,
self._learning_rate_tensor,
self._l1_regularization_strength_tensor,
self._l2_regularization_strength_tensor,
grad,
use_locking=self._use_locking).op
def _apply_sparse(self, grad, var):
return training_ops.sparse_apply_proximal_gradient_descent(
var,
self._learning_rate_tensor,
self._l1_regularization_strength_tensor,
self._l2_regularization_strength_tensor,
grad.values,
grad.indices,
use_locking=self._use_locking).op
def _prepare(self):
self._learning_rate_tensor = ops.convert_to_tensor(self._learning_rate,
name="learning_rate")
self._l1_regularization_strength_tensor = ops.convert_to_tensor(
self._l1_regularization_strength, name="l1_regularization_strength")
self._l2_regularization_strength_tensor = ops.convert_to_tensor(
self._l2_regularization_strength, name="l2_regularization_strength")
| apache-2.0 |
tanium/pytan | lib/libs_external/any/urllib3/filepost.py | 292 | 2321 | from __future__ import absolute_import
import codecs
from uuid import uuid4
from io import BytesIO
from .packages import six
from .packages.six import b
from .fields import RequestField
writer = codecs.lookup('utf-8')[3]
def choose_boundary():
"""
Our embarrassingly-simple replacement for mimetools.choose_boundary.
"""
return uuid4().hex
def iter_field_objects(fields):
"""
Iterate over fields.
Supports list of (k, v) tuples and dicts, and lists of
:class:`~urllib3.fields.RequestField`.
"""
if isinstance(fields, dict):
i = six.iteritems(fields)
else:
i = iter(fields)
for field in i:
if isinstance(field, RequestField):
yield field
else:
yield RequestField.from_tuples(*field)
def iter_fields(fields):
"""
.. deprecated:: 1.6
Iterate over fields.
The addition of :class:`~urllib3.fields.RequestField` makes this function
obsolete. Instead, use :func:`iter_field_objects`, which returns
:class:`~urllib3.fields.RequestField` objects.
Supports list of (k, v) tuples and dicts.
"""
if isinstance(fields, dict):
return ((k, v) for k, v in six.iteritems(fields))
return ((k, v) for k, v in fields)
def encode_multipart_formdata(fields, boundary=None):
"""
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
:param fields:
Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
:param boundary:
If not specified, then a random boundary will be generated using
:func:`mimetools.choose_boundary`.
"""
body = BytesIO()
if boundary is None:
boundary = choose_boundary()
for field in iter_field_objects(fields):
body.write(b('--%s\r\n' % (boundary)))
writer(body).write(field.render_headers())
data = field.data
if isinstance(data, int):
data = str(data) # Backwards compatibility
if isinstance(data, six.text_type):
writer(body).write(data)
else:
body.write(data)
body.write(b'\r\n')
body.write(b('--%s--\r\n' % (boundary)))
content_type = str('multipart/form-data; boundary=%s' % boundary)
return body.getvalue(), content_type
| mit |
Danielhiversen/home-assistant | homeassistant/components/switch/anel_pwrctrl.py | 8 | 3373 | """
Support for ANEL PwrCtrl switches.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.pwrctrl/
"""
import logging
import socket
from datetime import timedelta
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA)
from homeassistant.const import (CONF_HOST, CONF_PASSWORD, CONF_USERNAME)
from homeassistant.util import Throttle
REQUIREMENTS = ['anel_pwrctrl-homeassistant==0.0.1.dev2']
_LOGGER = logging.getLogger(__name__)
CONF_PORT_RECV = 'port_recv'
CONF_PORT_SEND = 'port_send'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_PORT_RECV): cv.port,
vol.Required(CONF_PORT_SEND): cv.port,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_HOST): cv.string,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up PwrCtrl devices/switches."""
host = config.get(CONF_HOST, None)
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
port_recv = config.get(CONF_PORT_RECV)
port_send = config.get(CONF_PORT_SEND)
from anel_pwrctrl import DeviceMaster
try:
master = DeviceMaster(
username=username, password=password, read_port=port_send,
write_port=port_recv)
master.query(ip_addr=host)
except socket.error as ex:
_LOGGER.error("Unable to discover PwrCtrl device: %s", str(ex))
return False
devices = []
for device in master.devices.values():
parent_device = PwrCtrlDevice(device)
devices.extend(
PwrCtrlSwitch(switch, parent_device)
for switch in device.switches.values()
)
add_entities(devices)
class PwrCtrlSwitch(SwitchDevice):
"""Representation of a PwrCtrl switch."""
def __init__(self, port, parent_device):
"""Initialize the PwrCtrl switch."""
self._port = port
self._parent_device = parent_device
@property
def should_poll(self):
"""Return the polling state."""
return True
@property
def unique_id(self):
"""Return the unique ID of the device."""
return '{device}-{switch_idx}'.format(
device=self._port.device.host,
switch_idx=self._port.get_index()
)
@property
def name(self):
"""Return the name of the device."""
return self._port.label
@property
def is_on(self):
"""Return true if the device is on."""
return self._port.get_state()
def update(self):
"""Trigger update for all switches on the parent device."""
self._parent_device.update()
def turn_on(self, **kwargs):
"""Turn the switch on."""
self._port.on()
def turn_off(self, **kwargs):
"""Turn the switch off."""
self._port.off()
class PwrCtrlDevice:
"""Device representation for per device throttling."""
def __init__(self, device):
"""Initialize the PwrCtrl device."""
self._device = device
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Update the device and all its switches."""
self._device.update()
| mit |
oopy/micropython | docs/sphinx_selective_exclude/search_auto_exclude.py | 40 | 1397 | #
# This is a Sphinx documentation tool extension which allows to
# automatically exclude from full-text search index document
# which are not referenced via toctree::. It's intended to be
# used with toctrees conditional on only:: directive, with the
# idea being that if you didn't include it in the ToC, you don't
# want the docs being findable by search either (for example,
# because these docs contain information not pertinent to a
# particular product configuration).
#
# This extension depends on "eager_only" extension and won't work
# without it.
#
# Copyright (c) 2016 Paul Sokolovsky
# Licensed under the terms of BSD license, see LICENSE file.
#
import sphinx
org_StandaloneHTMLBuilder_index_page = None
def StandaloneHTMLBuilder_index_page(self, pagename, doctree, title):
if pagename not in self.env.files_to_rebuild:
if pagename != self.env.config.master_doc and 'orphan' not in self.env.metadata[pagename]:
print("Excluding %s from full-text index because it's not referenced in ToC" % pagename)
return
return org_StandaloneHTMLBuilder_index_page(self, pagename, doctree, title)
def setup(app):
global org_StandaloneHTMLBuilder_index_page
org_StandaloneHTMLBuilder_index_page = sphinx.builders.html.StandaloneHTMLBuilder.index_page
sphinx.builders.html.StandaloneHTMLBuilder.index_page = StandaloneHTMLBuilder_index_page
| mit |
Workday/OpenFrame | tools/json_schema_compiler/highlighters/pygments_highlighter.py | 179 | 1273 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
try:
import pygments
from pygments.lexers import CppLexer
from pygments.formatters import HtmlFormatter
PYGMENTS_IMPORTED = True
except ImportError:
print('It appears that Pygments is not installed. '
'Can be installed using easy_install Pygments or from http://pygments.org.')
PYGMENTS_IMPORTED = False
class PygmentsHighlighter(object):
def __init__(self):
if not PYGMENTS_IMPORTED:
raise ImportError('Pygments not installed')
"""Highlighter that uses the python pygments library to highlight code.
"""
def GetCSS(self, style):
formatter = HtmlFormatter(linenos=True,
style=pygments.styles.get_style_by_name(style))
return formatter.get_style_defs('.highlight')
def GetCodeElement(self, code, style):
formatter = HtmlFormatter(linenos=True,
style=pygments.styles.get_style_by_name(style))
return pygments.highlight(code, CppLexer(), formatter)
def DisplayName(self):
return 'pygments' + ('' if PYGMENTS_IMPORTED else ' (not installed)')
def GetStyles(self):
return list(pygments.styles.get_all_styles())
| bsd-3-clause |
Poles/Poles | platforms/windows/JsonCpp/scons-local-2.3.0/SCons/Tool/BitKeeper.py | 11 | 2498 | """SCons.Tool.BitKeeper.py
Tool-specific initialization for the BitKeeper source code control
system.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/BitKeeper.py 2013/03/03 09:48:35 garyo"
import SCons.Action
import SCons.Builder
import SCons.Util
def generate(env):
"""Add a Builder factory function and construction variables for
BitKeeper to an Environment."""
def BitKeeperFactory(env=env):
""" """
import SCons.Warnings as W
W.warn(W.DeprecatedSourceCodeWarning, """The BitKeeper() factory is deprecated and there is no replacement.""")
act = SCons.Action.Action("$BITKEEPERCOM", "$BITKEEPERCOMSTR")
return SCons.Builder.Builder(action = act, env = env)
#setattr(env, 'BitKeeper', BitKeeperFactory)
env.BitKeeper = BitKeeperFactory
env['BITKEEPER'] = 'bk'
env['BITKEEPERGET'] = '$BITKEEPER get'
env['BITKEEPERGETFLAGS'] = SCons.Util.CLVar('')
env['BITKEEPERCOM'] = '$BITKEEPERGET $BITKEEPERGETFLAGS $TARGET'
def exists(env):
return env.Detect('bk')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| gpl-3.0 |
SaschaMester/delicium | tools/perf/page_sets/mse_cases.py | 1 | 2010 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry import story
class MseCasesPage(page_module.Page):
def __init__(self, url, page_set):
super(MseCasesPage, self).__init__(url=url, page_set=page_set)
def RunNavigateSteps(self, action_runner):
super(MseCasesPage, self).RunNavigateSteps(action_runner)
action_runner.WaitForJavaScriptCondition('window.__testDone == true')
class MseCasesPageSet(story.StorySet):
""" Media source extensions perf benchmark """
def __init__(self):
super(MseCasesPageSet, self).__init__(
cloud_storage_bucket=story.PUBLIC_BUCKET)
urls_list = [
'file://mse_cases/startup_test.html?testType=AV',
'file://mse_cases/startup_test.html?testType=AV&useAppendStream=true',
# pylint: disable=C0301
'file://mse_cases/startup_test.html?testType=AV&doNotWaitForBodyOnLoad=true',
# pylint: disable=C0301
'file://mse_cases/startup_test.html?testType=AV&useAppendStream=true&doNotWaitForBodyOnLoad=true',
'file://mse_cases/startup_test.html?testType=V',
'file://mse_cases/startup_test.html?testType=V&useAppendStream=true',
# pylint: disable=C0301
'file://mse_cases/startup_test.html?testType=V&doNotWaitForBodyOnLoad=true',
# pylint: disable=C0301
'file://mse_cases/startup_test.html?testType=V&useAppendStream=true&doNotWaitForBodyOnLoad=true',
'file://mse_cases/startup_test.html?testType=A',
'file://mse_cases/startup_test.html?testType=A&useAppendStream=true',
# pylint: disable=C0301
'file://mse_cases/startup_test.html?testType=A&doNotWaitForBodyOnLoad=true',
# pylint: disable=C0301
'file://mse_cases/startup_test.html?testType=A&useAppendStream=true&doNotWaitForBodyOnLoad=true',
]
for url in urls_list:
self.AddUserStory(MseCasesPage(url, self))
| bsd-3-clause |
RPGOne/scikit-learn | sklearn/externals/funcsigs.py | 118 | 29982 | # Copyright 2001-2013 Python Software Foundation; All Rights Reserved
"""Function signature objects for callables
Back port of Python 3.3's function signature tools from the inspect module,
modified to be compatible with Python 2.6, 2.7 and 3.2+.
"""
from __future__ import absolute_import, division, print_function
import itertools
import functools
import re
import types
try:
from collections import OrderedDict
except ImportError:
from .odict import OrderedDict
__version__ = "0.4"
__all__ = ['BoundArguments', 'Parameter', 'Signature', 'signature']
_WrapperDescriptor = type(type.__call__)
_MethodWrapper = type(all.__call__)
_NonUserDefinedCallables = (_WrapperDescriptor,
_MethodWrapper,
types.BuiltinFunctionType)
def formatannotation(annotation, base_module=None):
if isinstance(annotation, type):
if annotation.__module__ in ('builtins', '__builtin__', base_module):
return annotation.__name__
return annotation.__module__+'.'+annotation.__name__
return repr(annotation)
def _get_user_defined_method(cls, method_name, *nested):
try:
if cls is type:
return
meth = getattr(cls, method_name)
for name in nested:
meth = getattr(meth, name, meth)
except AttributeError:
return
else:
if not isinstance(meth, _NonUserDefinedCallables):
# Once '__signature__' will be added to 'C'-level
# callables, this check won't be necessary
return meth
def signature(obj):
'''Get a signature object for the passed callable.'''
if not callable(obj):
raise TypeError('{0!r} is not a callable object'.format(obj))
if isinstance(obj, types.MethodType):
sig = signature(obj.__func__)
if obj.__self__ is None:
# Unbound method: the first parameter becomes positional-only
if sig.parameters:
first = sig.parameters.values()[0].replace(
kind=_POSITIONAL_ONLY)
return sig.replace(
parameters=(first,) + tuple(sig.parameters.values())[1:])
else:
return sig
else:
# In this case we skip the first parameter of the underlying
# function (usually `self` or `cls`).
return sig.replace(parameters=tuple(sig.parameters.values())[1:])
try:
sig = obj.__signature__
except AttributeError:
pass
else:
if sig is not None:
return sig
try:
# Was this function wrapped by a decorator?
wrapped = obj.__wrapped__
except AttributeError:
pass
else:
return signature(wrapped)
if isinstance(obj, types.FunctionType):
return Signature.from_function(obj)
if isinstance(obj, functools.partial):
sig = signature(obj.func)
new_params = OrderedDict(sig.parameters.items())
partial_args = obj.args or ()
partial_keywords = obj.keywords or {}
try:
ba = sig.bind_partial(*partial_args, **partial_keywords)
except TypeError as ex:
msg = 'partial object {0!r} has incorrect arguments'.format(obj)
raise ValueError(msg)
for arg_name, arg_value in ba.arguments.items():
param = new_params[arg_name]
if arg_name in partial_keywords:
# We set a new default value, because the following code
# is correct:
#
# >>> def foo(a): print(a)
# >>> print(partial(partial(foo, a=10), a=20)())
# 20
# >>> print(partial(partial(foo, a=10), a=20)(a=30))
# 30
#
# So, with 'partial' objects, passing a keyword argument is
# like setting a new default value for the corresponding
# parameter
#
# We also mark this parameter with '_partial_kwarg'
# flag. Later, in '_bind', the 'default' value of this
# parameter will be added to 'kwargs', to simulate
# the 'functools.partial' real call.
new_params[arg_name] = param.replace(default=arg_value,
_partial_kwarg=True)
elif (param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) and
not param._partial_kwarg):
new_params.pop(arg_name)
return sig.replace(parameters=new_params.values())
sig = None
if isinstance(obj, type):
# obj is a class or a metaclass
# First, let's see if it has an overloaded __call__ defined
# in its metaclass
call = _get_user_defined_method(type(obj), '__call__')
if call is not None:
sig = signature(call)
else:
# Now we check if the 'obj' class has a '__new__' method
new = _get_user_defined_method(obj, '__new__')
if new is not None:
sig = signature(new)
else:
# Finally, we should have at least __init__ implemented
init = _get_user_defined_method(obj, '__init__')
if init is not None:
sig = signature(init)
elif not isinstance(obj, _NonUserDefinedCallables):
# An object with __call__
# We also check that the 'obj' is not an instance of
# _WrapperDescriptor or _MethodWrapper to avoid
# infinite recursion (and even potential segfault)
call = _get_user_defined_method(type(obj), '__call__', 'im_func')
if call is not None:
sig = signature(call)
if sig is not None:
# For classes and objects we skip the first parameter of their
# __call__, __new__, or __init__ methods
return sig.replace(parameters=tuple(sig.parameters.values())[1:])
if isinstance(obj, types.BuiltinFunctionType):
# Raise a nicer error message for builtins
msg = 'no signature found for builtin function {0!r}'.format(obj)
raise ValueError(msg)
raise ValueError('callable {0!r} is not supported by signature'.format(obj))
class _void(object):
'''A private marker - used in Parameter & Signature'''
class _empty(object):
pass
class _ParameterKind(int):
def __new__(self, *args, **kwargs):
obj = int.__new__(self, *args)
obj._name = kwargs['name']
return obj
def __str__(self):
return self._name
def __repr__(self):
return '<_ParameterKind: {0!r}>'.format(self._name)
_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY')
_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD')
_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL')
_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY')
_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD')
class Parameter(object):
'''Represents a parameter in a function signature.
Has the following public attributes:
* name : str
The name of the parameter as a string.
* default : object
The default value for the parameter if specified. If the
parameter has no default value, this attribute is not set.
* annotation
The annotation for the parameter if specified. If the
parameter has no annotation, this attribute is not set.
* kind : str
Describes how argument values are bound to the parameter.
Possible values: `Parameter.POSITIONAL_ONLY`,
`Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`,
`Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`.
'''
__slots__ = ('_name', '_kind', '_default', '_annotation', '_partial_kwarg')
POSITIONAL_ONLY = _POSITIONAL_ONLY
POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD
VAR_POSITIONAL = _VAR_POSITIONAL
KEYWORD_ONLY = _KEYWORD_ONLY
VAR_KEYWORD = _VAR_KEYWORD
empty = _empty
def __init__(self, name, kind, default=_empty, annotation=_empty,
_partial_kwarg=False):
if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD,
_VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD):
raise ValueError("invalid value for 'Parameter.kind' attribute")
self._kind = kind
if default is not _empty:
if kind in (_VAR_POSITIONAL, _VAR_KEYWORD):
msg = '{0} parameters cannot have default values'.format(kind)
raise ValueError(msg)
self._default = default
self._annotation = annotation
if name is None:
if kind != _POSITIONAL_ONLY:
raise ValueError("None is not a valid name for a "
"non-positional-only parameter")
self._name = name
else:
name = str(name)
if kind != _POSITIONAL_ONLY and not re.match(r'[a-z_]\w*$', name, re.I):
msg = '{0!r} is not a valid parameter name'.format(name)
raise ValueError(msg)
self._name = name
self._partial_kwarg = _partial_kwarg
@property
def name(self):
return self._name
@property
def default(self):
return self._default
@property
def annotation(self):
return self._annotation
@property
def kind(self):
return self._kind
def replace(self, name=_void, kind=_void, annotation=_void,
default=_void, _partial_kwarg=_void):
'''Creates a customized copy of the Parameter.'''
if name is _void:
name = self._name
if kind is _void:
kind = self._kind
if annotation is _void:
annotation = self._annotation
if default is _void:
default = self._default
if _partial_kwarg is _void:
_partial_kwarg = self._partial_kwarg
return type(self)(name, kind, default=default, annotation=annotation,
_partial_kwarg=_partial_kwarg)
def __str__(self):
kind = self.kind
formatted = self._name
if kind == _POSITIONAL_ONLY:
if formatted is None:
formatted = ''
formatted = '<{0}>'.format(formatted)
# Add annotation and default value
if self._annotation is not _empty:
formatted = '{0}:{1}'.format(formatted,
formatannotation(self._annotation))
if self._default is not _empty:
formatted = '{0}={1}'.format(formatted, repr(self._default))
if kind == _VAR_POSITIONAL:
formatted = '*' + formatted
elif kind == _VAR_KEYWORD:
formatted = '**' + formatted
return formatted
def __repr__(self):
return '<{0} at {1:#x} {2!r}>'.format(self.__class__.__name__,
id(self), self.name)
def __hash__(self):
msg = "unhashable type: '{0}'".format(self.__class__.__name__)
raise TypeError(msg)
def __eq__(self, other):
return (issubclass(other.__class__, Parameter) and
self._name == other._name and
self._kind == other._kind and
self._default == other._default and
self._annotation == other._annotation)
def __ne__(self, other):
return not self.__eq__(other)
class BoundArguments(object):
'''Result of `Signature.bind` call. Holds the mapping of arguments
to the function's parameters.
Has the following public attributes:
* arguments : OrderedDict
An ordered mutable mapping of parameters' names to arguments' values.
Does not contain arguments' default values.
* signature : Signature
The Signature object that created this instance.
* args : tuple
Tuple of positional arguments values.
* kwargs : dict
Dict of keyword arguments values.
'''
def __init__(self, signature, arguments):
self.arguments = arguments
self._signature = signature
@property
def signature(self):
return self._signature
@property
def args(self):
args = []
for param_name, param in self._signature.parameters.items():
if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
param._partial_kwarg):
# Keyword arguments mapped by 'functools.partial'
# (Parameter._partial_kwarg is True) are mapped
# in 'BoundArguments.kwargs', along with VAR_KEYWORD &
# KEYWORD_ONLY
break
try:
arg = self.arguments[param_name]
except KeyError:
# We're done here. Other arguments
# will be mapped in 'BoundArguments.kwargs'
break
else:
if param.kind == _VAR_POSITIONAL:
# *args
args.extend(arg)
else:
# plain argument
args.append(arg)
return tuple(args)
@property
def kwargs(self):
kwargs = {}
kwargs_started = False
for param_name, param in self._signature.parameters.items():
if not kwargs_started:
if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
param._partial_kwarg):
kwargs_started = True
else:
if param_name not in self.arguments:
kwargs_started = True
continue
if not kwargs_started:
continue
try:
arg = self.arguments[param_name]
except KeyError:
pass
else:
if param.kind == _VAR_KEYWORD:
# **kwargs
kwargs.update(arg)
else:
# plain keyword argument
kwargs[param_name] = arg
return kwargs
def __hash__(self):
msg = "unhashable type: '{0}'".format(self.__class__.__name__)
raise TypeError(msg)
def __eq__(self, other):
return (issubclass(other.__class__, BoundArguments) and
self.signature == other.signature and
self.arguments == other.arguments)
def __ne__(self, other):
return not self.__eq__(other)
class Signature(object):
'''A Signature object represents the overall signature of a function.
It stores a Parameter object for each parameter accepted by the
function, as well as information specific to the function itself.
A Signature object has the following public attributes and methods:
* parameters : OrderedDict
An ordered mapping of parameters' names to the corresponding
Parameter objects (keyword-only arguments are in the same order
as listed in `code.co_varnames`).
* return_annotation : object
The annotation for the return type of the function if specified.
If the function has no annotation for its return type, this
attribute is not set.
* bind(*args, **kwargs) -> BoundArguments
Creates a mapping from positional and keyword arguments to
parameters.
* bind_partial(*args, **kwargs) -> BoundArguments
Creates a partial mapping from positional and keyword arguments
to parameters (simulating 'functools.partial' behavior.)
'''
__slots__ = ('_return_annotation', '_parameters')
_parameter_cls = Parameter
_bound_arguments_cls = BoundArguments
empty = _empty
def __init__(self, parameters=None, return_annotation=_empty,
__validate_parameters__=True):
'''Constructs Signature from the given list of Parameter
objects and 'return_annotation'. All arguments are optional.
'''
if parameters is None:
params = OrderedDict()
else:
if __validate_parameters__:
params = OrderedDict()
top_kind = _POSITIONAL_ONLY
for idx, param in enumerate(parameters):
kind = param.kind
if kind < top_kind:
msg = 'wrong parameter order: {0} before {1}'
msg = msg.format(top_kind, param.kind)
raise ValueError(msg)
else:
top_kind = kind
name = param.name
if name is None:
name = str(idx)
param = param.replace(name=name)
if name in params:
msg = 'duplicate parameter name: {0!r}'.format(name)
raise ValueError(msg)
params[name] = param
else:
params = OrderedDict(((param.name, param)
for param in parameters))
self._parameters = params
self._return_annotation = return_annotation
@classmethod
def from_function(cls, func):
'''Constructs Signature for the given python function'''
if not isinstance(func, types.FunctionType):
raise TypeError('{0!r} is not a Python function'.format(func))
Parameter = cls._parameter_cls
# Parameter information.
func_code = func.__code__
pos_count = func_code.co_argcount
arg_names = func_code.co_varnames
positional = tuple(arg_names[:pos_count])
keyword_only_count = getattr(func_code, 'co_kwonlyargcount', 0)
keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)]
annotations = getattr(func, '__annotations__', {})
defaults = func.__defaults__
kwdefaults = getattr(func, '__kwdefaults__', None)
if defaults:
pos_default_count = len(defaults)
else:
pos_default_count = 0
parameters = []
# Non-keyword-only parameters w/o defaults.
non_default_count = pos_count - pos_default_count
for name in positional[:non_default_count]:
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD))
# ... w/ defaults.
for offset, name in enumerate(positional[non_default_count:]):
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD,
default=defaults[offset]))
# *args
if func_code.co_flags & 0x04:
name = arg_names[pos_count + keyword_only_count]
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_VAR_POSITIONAL))
# Keyword-only parameters.
for name in keyword_only:
default = _empty
if kwdefaults is not None:
default = kwdefaults.get(name, _empty)
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_KEYWORD_ONLY,
default=default))
# **kwargs
if func_code.co_flags & 0x08:
index = pos_count + keyword_only_count
if func_code.co_flags & 0x04:
index += 1
name = arg_names[index]
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_VAR_KEYWORD))
return cls(parameters,
return_annotation=annotations.get('return', _empty),
__validate_parameters__=False)
@property
def parameters(self):
try:
return types.MappingProxyType(self._parameters)
except AttributeError:
return OrderedDict(self._parameters.items())
@property
def return_annotation(self):
return self._return_annotation
def replace(self, parameters=_void, return_annotation=_void):
'''Creates a customized copy of the Signature.
Pass 'parameters' and/or 'return_annotation' arguments
to override them in the new copy.
'''
if parameters is _void:
parameters = self.parameters.values()
if return_annotation is _void:
return_annotation = self._return_annotation
return type(self)(parameters,
return_annotation=return_annotation)
def __hash__(self):
msg = "unhashable type: '{0}'".format(self.__class__.__name__)
raise TypeError(msg)
def __eq__(self, other):
if (not issubclass(type(other), Signature) or
self.return_annotation != other.return_annotation or
len(self.parameters) != len(other.parameters)):
return False
other_positions = dict((param, idx)
for idx, param in enumerate(other.parameters.keys()))
for idx, (param_name, param) in enumerate(self.parameters.items()):
if param.kind == _KEYWORD_ONLY:
try:
other_param = other.parameters[param_name]
except KeyError:
return False
else:
if param != other_param:
return False
else:
try:
other_idx = other_positions[param_name]
except KeyError:
return False
else:
if (idx != other_idx or
param != other.parameters[param_name]):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def _bind(self, args, kwargs, partial=False):
'''Private method. Don't use directly.'''
arguments = OrderedDict()
parameters = iter(self.parameters.values())
parameters_ex = ()
arg_vals = iter(args)
if partial:
# Support for binding arguments to 'functools.partial' objects.
# See 'functools.partial' case in 'signature()' implementation
# for details.
for param_name, param in self.parameters.items():
if (param._partial_kwarg and param_name not in kwargs):
# Simulating 'functools.partial' behavior
kwargs[param_name] = param.default
while True:
# Let's iterate through the positional arguments and corresponding
# parameters
try:
arg_val = next(arg_vals)
except StopIteration:
# No more positional arguments
try:
param = next(parameters)
except StopIteration:
# No more parameters. That's it. Just need to check that
# we have no `kwargs` after this while loop
break
else:
if param.kind == _VAR_POSITIONAL:
# That's OK, just empty *args. Let's start parsing
# kwargs
break
elif param.name in kwargs:
if param.kind == _POSITIONAL_ONLY:
msg = '{arg!r} parameter is positional only, ' \
'but was passed as a keyword'
msg = msg.format(arg=param.name)
raise TypeError(msg)
parameters_ex = (param,)
break
elif (param.kind == _VAR_KEYWORD or
param.default is not _empty):
# That's fine too - we have a default value for this
# parameter. So, lets start parsing `kwargs`, starting
# with the current parameter
parameters_ex = (param,)
break
else:
if partial:
parameters_ex = (param,)
break
else:
msg = '{arg!r} parameter lacking default value'
msg = msg.format(arg=param.name)
raise TypeError(msg)
else:
# We have a positional argument to process
try:
param = next(parameters)
except StopIteration:
raise TypeError('too many positional arguments')
else:
if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
# Looks like we have no parameter for this positional
# argument
raise TypeError('too many positional arguments')
if param.kind == _VAR_POSITIONAL:
# We have an '*args'-like argument, let's fill it with
# all positional arguments we have left and move on to
# the next phase
values = [arg_val]
values.extend(arg_vals)
arguments[param.name] = tuple(values)
break
if param.name in kwargs:
raise TypeError('multiple values for argument '
'{arg!r}'.format(arg=param.name))
arguments[param.name] = arg_val
# Now, we iterate through the remaining parameters to process
# keyword arguments
kwargs_param = None
for param in itertools.chain(parameters_ex, parameters):
if param.kind == _POSITIONAL_ONLY:
# This should never happen in case of a properly built
# Signature object (but let's have this check here
# to ensure correct behaviour just in case)
raise TypeError('{arg!r} parameter is positional only, '
'but was passed as a keyword'. \
format(arg=param.name))
if param.kind == _VAR_KEYWORD:
# Memorize that we have a '**kwargs'-like parameter
kwargs_param = param
continue
param_name = param.name
try:
arg_val = kwargs.pop(param_name)
except KeyError:
# We have no value for this parameter. It's fine though,
# if it has a default value, or it is an '*args'-like
# parameter, left alone by the processing of positional
# arguments.
if (not partial and param.kind != _VAR_POSITIONAL and
param.default is _empty):
raise TypeError('{arg!r} parameter lacking default value'. \
format(arg=param_name))
else:
arguments[param_name] = arg_val
if kwargs:
if kwargs_param is not None:
# Process our '**kwargs'-like parameter
arguments[kwargs_param.name] = kwargs
else:
raise TypeError('too many keyword arguments')
return self._bound_arguments_cls(self, arguments)
def bind(self, *args, **kwargs):
'''Get a BoundArguments object, that maps the passed `args`
and `kwargs` to the function's signature. Raises `TypeError`
if the passed arguments can not be bound.
'''
return self._bind(args, kwargs)
def bind_partial(self, *args, **kwargs):
'''Get a BoundArguments object, that partially maps the
passed `args` and `kwargs` to the function's signature.
Raises `TypeError` if the passed arguments can not be bound.
'''
return self._bind(args, kwargs, partial=True)
def __str__(self):
result = []
render_kw_only_separator = True
for idx, param in enumerate(self.parameters.values()):
formatted = str(param)
kind = param.kind
if kind == _VAR_POSITIONAL:
# OK, we have an '*args'-like parameter, so we won't need
# a '*' to separate keyword-only arguments
render_kw_only_separator = False
elif kind == _KEYWORD_ONLY and render_kw_only_separator:
# We have a keyword-only parameter to render and we haven't
# rendered an '*args'-like parameter before, so add a '*'
# separator to the parameters list ("foo(arg1, *, arg2)" case)
result.append('*')
# This condition should be only triggered once, so
# reset the flag
render_kw_only_separator = False
result.append(formatted)
rendered = '({0})'.format(', '.join(result))
if self.return_annotation is not _empty:
anno = formatannotation(self.return_annotation)
rendered += ' -> {0}'.format(anno)
return rendered
| bsd-3-clause |
RNAcentral/rnacentral-import-pipeline | rnacentral_pipeline/cli/rfam.py | 1 | 2267 | # -*- coding: utf-8 -*-
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pathlib import Path
import click
from rnacentral_pipeline.databases import rfam
from rnacentral_pipeline.writers import entry_writer
@click.group("rfam")
def cli():
"""
Commands with processing the Rfam metadata.
"""
pass
@cli.command("parse")
@click.argument("rfam_file", type=click.File("r"))
@click.argument("mapping_file", type=click.File("r"))
@click.argument(
"output",
default=".",
type=click.Path(
writable=True,
dir_okay=True,
file_okay=False,
),
)
def process_rfam(rfam_file, mapping_file, output):
"""
Process Rfam's JSON format into the files to import.
"""
entries = rfam.parser.parse(rfam_file, mapping_file)
with entry_writer(Path(output)) as writer:
writer.write(entries)
@cli.command("families")
@click.argument("filename", default="data.tsv", type=click.File("r"))
@click.argument("output", default="rfam-families.csv", type=click.File("w"))
def rfam_group_families(filename, output):
rfam.families.from_file(filename, output)
@cli.command("clans")
@click.argument("filename", default="data.tsv", type=click.File("r"))
@click.argument("output", default="rfam-clans.csv", type=click.File("w"))
def rfam_group_clans(filename, output):
rfam.clans.from_file(filename, output)
@cli.command("ontology-terms")
@click.argument("filename", default="data.tsv", type=click.File("r"))
@click.argument(
"output",
default=".",
type=click.Path(
writable=True,
dir_okay=True,
file_okay=False,
),
)
def ontologies_rfam_terms(filename, output):
rfam.cross_references.from_file(filename, Path(output))
| apache-2.0 |
Maikflow/django_test | lib/python2.7/site-packages/Django-1.7.1-py2.7.egg/django/utils/dates.py | 115 | 2296 | "Commonly-used date structures"
from django.utils.translation import ugettext_lazy as _, pgettext_lazy
WEEKDAYS = {
0: _('Monday'), 1: _('Tuesday'), 2: _('Wednesday'), 3: _('Thursday'), 4: _('Friday'),
5: _('Saturday'), 6: _('Sunday')
}
WEEKDAYS_ABBR = {
0: _('Mon'), 1: _('Tue'), 2: _('Wed'), 3: _('Thu'), 4: _('Fri'),
5: _('Sat'), 6: _('Sun')
}
WEEKDAYS_REV = {
'monday': 0, 'tuesday': 1, 'wednesday': 2, 'thursday': 3, 'friday': 4,
'saturday': 5, 'sunday': 6
}
MONTHS = {
1: _('January'), 2: _('February'), 3: _('March'), 4: _('April'), 5: _('May'), 6: _('June'),
7: _('July'), 8: _('August'), 9: _('September'), 10: _('October'), 11: _('November'),
12: _('December')
}
MONTHS_3 = {
1: _('jan'), 2: _('feb'), 3: _('mar'), 4: _('apr'), 5: _('may'), 6: _('jun'),
7: _('jul'), 8: _('aug'), 9: _('sep'), 10: _('oct'), 11: _('nov'), 12: _('dec')
}
MONTHS_3_REV = {
'jan': 1, 'feb': 2, 'mar': 3, 'apr': 4, 'may': 5, 'jun': 6, 'jul': 7, 'aug': 8,
'sep': 9, 'oct': 10, 'nov': 11, 'dec': 12
}
MONTHS_AP = { # month names in Associated Press style
1: pgettext_lazy('abbrev. month', 'Jan.'),
2: pgettext_lazy('abbrev. month', 'Feb.'),
3: pgettext_lazy('abbrev. month', 'March'),
4: pgettext_lazy('abbrev. month', 'April'),
5: pgettext_lazy('abbrev. month', 'May'),
6: pgettext_lazy('abbrev. month', 'June'),
7: pgettext_lazy('abbrev. month', 'July'),
8: pgettext_lazy('abbrev. month', 'Aug.'),
9: pgettext_lazy('abbrev. month', 'Sept.'),
10: pgettext_lazy('abbrev. month', 'Oct.'),
11: pgettext_lazy('abbrev. month', 'Nov.'),
12: pgettext_lazy('abbrev. month', 'Dec.')
}
MONTHS_ALT = { # required for long date representation by some locales
1: pgettext_lazy('alt. month', 'January'),
2: pgettext_lazy('alt. month', 'February'),
3: pgettext_lazy('alt. month', 'March'),
4: pgettext_lazy('alt. month', 'April'),
5: pgettext_lazy('alt. month', 'May'),
6: pgettext_lazy('alt. month', 'June'),
7: pgettext_lazy('alt. month', 'July'),
8: pgettext_lazy('alt. month', 'August'),
9: pgettext_lazy('alt. month', 'September'),
10: pgettext_lazy('alt. month', 'October'),
11: pgettext_lazy('alt. month', 'November'),
12: pgettext_lazy('alt. month', 'December')
}
| gpl-2.0 |
Mistobaan/tensorflow | tensorflow/contrib/py2tf/pyct/parser.py | 4 | 1152 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Converting code to AST.
Adapted from Tangent.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import textwrap
import gast
from tensorflow.python.util import tf_inspect
def parse_object(obj):
"""Return the AST of given object."""
return parse_str(tf_inspect.getsource(obj))
def parse_str(src):
"""Return the AST of given piece of code."""
return gast.parse(textwrap.dedent(src))
| apache-2.0 |
andreaso/ansible | lib/ansible/plugins/connection/lxd.py | 133 | 4283 | # (c) 2016 Matt Clay <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from distutils.spawn import find_executable
from subprocess import call, Popen, PIPE
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins.connection import ConnectionBase
class Connection(ConnectionBase):
""" lxd based connections """
transport = "lxd"
has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self._host = self._play_context.remote_addr
self._lxc_cmd = find_executable("lxc")
if not self._lxc_cmd:
raise AnsibleError("lxc command not found in PATH")
if self._play_context.remote_user is not None and self._play_context.remote_user != 'root':
self._display.warning('lxd does not support remote_user, using container default: root')
def _connect(self):
"""connect to lxd (nothing to do here) """
super(Connection, self)._connect()
if not self._connected:
self._display.vvv(u"ESTABLISH LXD CONNECTION FOR USER: root", host=self._host)
self._connected = True
def exec_command(self, cmd, in_data=None, sudoable=True):
""" execute a command on the lxd host """
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
self._display.vvv(u"EXEC {0}".format(cmd), host=self._host)
local_cmd = [self._lxc_cmd, "exec", self._host, "--", self._play_context.executable, "-c", cmd]
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
in_data = to_bytes(in_data, errors='surrogate_or_strict', nonstring='passthru')
process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate(in_data)
stdout = to_text(stdout)
stderr = to_text(stderr)
if stderr == "error: Container is not running.\n":
raise AnsibleConnectionFailure("container not running: %s" % self._host)
if stderr == "error: not found\n":
raise AnsibleConnectionFailure("container not found: %s" % self._host)
return process.returncode, stdout, stderr
def put_file(self, in_path, out_path):
""" put a file from local to lxd """
super(Connection, self).put_file(in_path, out_path)
self._display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self._host)
if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound("input path is not a file: %s" % in_path)
local_cmd = [self._lxc_cmd, "file", "push", in_path, self._host + "/" + out_path]
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
call(local_cmd)
def fetch_file(self, in_path, out_path):
""" fetch a file from lxd to local """
super(Connection, self).fetch_file(in_path, out_path)
self._display.vvv(u"FETCH {0} TO {1}".format(in_path, out_path), host=self._host)
local_cmd = [self._lxc_cmd, "file", "pull", self._host + "/" + in_path, out_path]
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
call(local_cmd)
def close(self):
""" close the connection (nothing to do here) """
super(Connection, self).close()
self._connected = False
| gpl-3.0 |
abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/registry/browser/personproduct.py | 1 | 2189 | # Copyright 2009 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Views, menus and traversal related to PersonProducts."""
__metaclass__ = type
__all__ = [
'PersonProductBreadcrumb',
'PersonProductFacets',
'PersonProductNavigation',
]
from zope.component import queryAdapter
from zope.traversing.interfaces import IPathAdapter
from lp.app.errors import NotFoundError
from lp.code.interfaces.branchnamespace import get_branch_namespace
from lp.registry.interfaces.personproduct import IPersonProduct
from lp.services.webapp import (
canonical_url,
Link,
Navigation,
StandardLaunchpadFacets,
)
from lp.services.webapp.breadcrumb import Breadcrumb
class PersonProductNavigation(Navigation):
"""Navigation to branches for this person/product."""
usedfor = IPersonProduct
def traverse(self, branch_name):
"""Look for a branch in the person/product namespace."""
namespace = get_branch_namespace(
person=self.context.person, product=self.context.product)
branch = namespace.getByName(branch_name)
if branch is None:
raise NotFoundError
else:
return branch
class PersonProductBreadcrumb(Breadcrumb):
"""Breadcrumb for an `IPersonProduct`."""
@property
def text(self):
return self.context.product.displayname
@property
def url(self):
if self._url is None:
return canonical_url(self.context.product, rootsite=self.rootsite)
else:
return self._url
@property
def icon(self):
return queryAdapter(
self.context.product, IPathAdapter, name='image').icon()
class PersonProductFacets(StandardLaunchpadFacets):
"""The links that will appear in the facet menu for an IPerson."""
usedfor = IPersonProduct
enable_only = ['branches']
def branches(self):
text = 'Code'
summary = ('Bazaar Branches of %s owned by %s' %
(self.context.product.displayname,
self.context.person.displayname))
return Link('', text, summary)
| agpl-3.0 |
Vogeltak/pauselan | lib/python3.4/site-packages/flask/testsuite/basic.py | 406 | 43777 | # -*- coding: utf-8 -*-
"""
flask.testsuite.basic
~~~~~~~~~~~~~~~~~~~~~
The basic functionality.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import re
import uuid
import flask
import pickle
import unittest
from datetime import datetime
from threading import Thread
from flask.testsuite import FlaskTestCase, emits_module_deprecation_warning
from flask._compat import text_type
from werkzeug.exceptions import BadRequest, NotFound
from werkzeug.http import parse_date
from werkzeug.routing import BuildError
class BasicFunctionalityTestCase(FlaskTestCase):
def test_options_work(self):
app = flask.Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'Hello World'
rv = app.test_client().open('/', method='OPTIONS')
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS', 'POST'])
self.assert_equal(rv.data, b'')
def test_options_on_multiple_rules(self):
app = flask.Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'Hello World'
@app.route('/', methods=['PUT'])
def index_put():
return 'Aha!'
rv = app.test_client().open('/', method='OPTIONS')
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS', 'POST', 'PUT'])
def test_options_handling_disabled(self):
app = flask.Flask(__name__)
def index():
return 'Hello World!'
index.provide_automatic_options = False
app.route('/')(index)
rv = app.test_client().open('/', method='OPTIONS')
self.assert_equal(rv.status_code, 405)
app = flask.Flask(__name__)
def index2():
return 'Hello World!'
index2.provide_automatic_options = True
app.route('/', methods=['OPTIONS'])(index2)
rv = app.test_client().open('/', method='OPTIONS')
self.assert_equal(sorted(rv.allow), ['OPTIONS'])
def test_request_dispatching(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.request.method
@app.route('/more', methods=['GET', 'POST'])
def more():
return flask.request.method
c = app.test_client()
self.assert_equal(c.get('/').data, b'GET')
rv = c.post('/')
self.assert_equal(rv.status_code, 405)
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS'])
rv = c.head('/')
self.assert_equal(rv.status_code, 200)
self.assert_false(rv.data) # head truncates
self.assert_equal(c.post('/more').data, b'POST')
self.assert_equal(c.get('/more').data, b'GET')
rv = c.delete('/more')
self.assert_equal(rv.status_code, 405)
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS', 'POST'])
def test_url_mapping(self):
app = flask.Flask(__name__)
def index():
return flask.request.method
def more():
return flask.request.method
app.add_url_rule('/', 'index', index)
app.add_url_rule('/more', 'more', more, methods=['GET', 'POST'])
c = app.test_client()
self.assert_equal(c.get('/').data, b'GET')
rv = c.post('/')
self.assert_equal(rv.status_code, 405)
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS'])
rv = c.head('/')
self.assert_equal(rv.status_code, 200)
self.assert_false(rv.data) # head truncates
self.assert_equal(c.post('/more').data, b'POST')
self.assert_equal(c.get('/more').data, b'GET')
rv = c.delete('/more')
self.assert_equal(rv.status_code, 405)
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS', 'POST'])
def test_werkzeug_routing(self):
from werkzeug.routing import Submount, Rule
app = flask.Flask(__name__)
app.url_map.add(Submount('/foo', [
Rule('/bar', endpoint='bar'),
Rule('/', endpoint='index')
]))
def bar():
return 'bar'
def index():
return 'index'
app.view_functions['bar'] = bar
app.view_functions['index'] = index
c = app.test_client()
self.assert_equal(c.get('/foo/').data, b'index')
self.assert_equal(c.get('/foo/bar').data, b'bar')
def test_endpoint_decorator(self):
from werkzeug.routing import Submount, Rule
app = flask.Flask(__name__)
app.url_map.add(Submount('/foo', [
Rule('/bar', endpoint='bar'),
Rule('/', endpoint='index')
]))
@app.endpoint('bar')
def bar():
return 'bar'
@app.endpoint('index')
def index():
return 'index'
c = app.test_client()
self.assert_equal(c.get('/foo/').data, b'index')
self.assert_equal(c.get('/foo/bar').data, b'bar')
def test_session(self):
app = flask.Flask(__name__)
app.secret_key = 'testkey'
@app.route('/set', methods=['POST'])
def set():
flask.session['value'] = flask.request.form['value']
return 'value set'
@app.route('/get')
def get():
return flask.session['value']
c = app.test_client()
self.assert_equal(c.post('/set', data={'value': '42'}).data, b'value set')
self.assert_equal(c.get('/get').data, b'42')
def test_session_using_server_name(self):
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com/')
self.assert_in('domain=.example.com', rv.headers['set-cookie'].lower())
self.assert_in('httponly', rv.headers['set-cookie'].lower())
def test_session_using_server_name_and_port(self):
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com:8080'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com:8080/')
self.assert_in('domain=.example.com', rv.headers['set-cookie'].lower())
self.assert_in('httponly', rv.headers['set-cookie'].lower())
def test_session_using_server_name_port_and_path(self):
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com:8080',
APPLICATION_ROOT='/foo'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com:8080/foo')
self.assert_in('domain=example.com', rv.headers['set-cookie'].lower())
self.assert_in('path=/foo', rv.headers['set-cookie'].lower())
self.assert_in('httponly', rv.headers['set-cookie'].lower())
def test_session_using_application_root(self):
class PrefixPathMiddleware(object):
def __init__(self, app, prefix):
self.app = app
self.prefix = prefix
def __call__(self, environ, start_response):
environ['SCRIPT_NAME'] = self.prefix
return self.app(environ, start_response)
app = flask.Flask(__name__)
app.wsgi_app = PrefixPathMiddleware(app.wsgi_app, '/bar')
app.config.update(
SECRET_KEY='foo',
APPLICATION_ROOT='/bar'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com:8080/')
self.assert_in('path=/bar', rv.headers['set-cookie'].lower())
def test_session_using_session_settings(self):
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='www.example.com:8080',
APPLICATION_ROOT='/test',
SESSION_COOKIE_DOMAIN='.example.com',
SESSION_COOKIE_HTTPONLY=False,
SESSION_COOKIE_SECURE=True,
SESSION_COOKIE_PATH='/'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://www.example.com:8080/test/')
cookie = rv.headers['set-cookie'].lower()
self.assert_in('domain=.example.com', cookie)
self.assert_in('path=/', cookie)
self.assert_in('secure', cookie)
self.assert_not_in('httponly', cookie)
def test_missing_session(self):
app = flask.Flask(__name__)
def expect_exception(f, *args, **kwargs):
try:
f(*args, **kwargs)
except RuntimeError as e:
self.assert_true(e.args and 'session is unavailable' in e.args[0])
else:
self.assert_true(False, 'expected exception')
with app.test_request_context():
self.assert_true(flask.session.get('missing_key') is None)
expect_exception(flask.session.__setitem__, 'foo', 42)
expect_exception(flask.session.pop, 'foo')
def test_session_expiration(self):
permanent = True
app = flask.Flask(__name__)
app.secret_key = 'testkey'
@app.route('/')
def index():
flask.session['test'] = 42
flask.session.permanent = permanent
return ''
@app.route('/test')
def test():
return text_type(flask.session.permanent)
client = app.test_client()
rv = client.get('/')
self.assert_in('set-cookie', rv.headers)
match = re.search(r'\bexpires=([^;]+)(?i)', rv.headers['set-cookie'])
expires = parse_date(match.group())
expected = datetime.utcnow() + app.permanent_session_lifetime
self.assert_equal(expires.year, expected.year)
self.assert_equal(expires.month, expected.month)
self.assert_equal(expires.day, expected.day)
rv = client.get('/test')
self.assert_equal(rv.data, b'True')
permanent = False
rv = app.test_client().get('/')
self.assert_in('set-cookie', rv.headers)
match = re.search(r'\bexpires=([^;]+)', rv.headers['set-cookie'])
self.assert_true(match is None)
def test_session_stored_last(self):
app = flask.Flask(__name__)
app.secret_key = 'development-key'
app.testing = True
@app.after_request
def modify_session(response):
flask.session['foo'] = 42
return response
@app.route('/')
def dump_session_contents():
return repr(flask.session.get('foo'))
c = app.test_client()
self.assert_equal(c.get('/').data, b'None')
self.assert_equal(c.get('/').data, b'42')
def test_session_special_types(self):
app = flask.Flask(__name__)
app.secret_key = 'development-key'
app.testing = True
now = datetime.utcnow().replace(microsecond=0)
the_uuid = uuid.uuid4()
@app.after_request
def modify_session(response):
flask.session['m'] = flask.Markup('Hello!')
flask.session['u'] = the_uuid
flask.session['dt'] = now
flask.session['b'] = b'\xff'
flask.session['t'] = (1, 2, 3)
return response
@app.route('/')
def dump_session_contents():
return pickle.dumps(dict(flask.session))
c = app.test_client()
c.get('/')
rv = pickle.loads(c.get('/').data)
self.assert_equal(rv['m'], flask.Markup('Hello!'))
self.assert_equal(type(rv['m']), flask.Markup)
self.assert_equal(rv['dt'], now)
self.assert_equal(rv['u'], the_uuid)
self.assert_equal(rv['b'], b'\xff')
self.assert_equal(type(rv['b']), bytes)
self.assert_equal(rv['t'], (1, 2, 3))
def test_flashes(self):
app = flask.Flask(__name__)
app.secret_key = 'testkey'
with app.test_request_context():
self.assert_false(flask.session.modified)
flask.flash('Zap')
flask.session.modified = False
flask.flash('Zip')
self.assert_true(flask.session.modified)
self.assert_equal(list(flask.get_flashed_messages()), ['Zap', 'Zip'])
def test_extended_flashing(self):
# Be sure app.testing=True below, else tests can fail silently.
#
# Specifically, if app.testing is not set to True, the AssertionErrors
# in the view functions will cause a 500 response to the test client
# instead of propagating exceptions.
app = flask.Flask(__name__)
app.secret_key = 'testkey'
app.testing = True
@app.route('/')
def index():
flask.flash(u'Hello World')
flask.flash(u'Hello World', 'error')
flask.flash(flask.Markup(u'<em>Testing</em>'), 'warning')
return ''
@app.route('/test/')
def test():
messages = flask.get_flashed_messages()
self.assert_equal(len(messages), 3)
self.assert_equal(messages[0], u'Hello World')
self.assert_equal(messages[1], u'Hello World')
self.assert_equal(messages[2], flask.Markup(u'<em>Testing</em>'))
return ''
@app.route('/test_with_categories/')
def test_with_categories():
messages = flask.get_flashed_messages(with_categories=True)
self.assert_equal(len(messages), 3)
self.assert_equal(messages[0], ('message', u'Hello World'))
self.assert_equal(messages[1], ('error', u'Hello World'))
self.assert_equal(messages[2], ('warning', flask.Markup(u'<em>Testing</em>')))
return ''
@app.route('/test_filter/')
def test_filter():
messages = flask.get_flashed_messages(category_filter=['message'], with_categories=True)
self.assert_equal(len(messages), 1)
self.assert_equal(messages[0], ('message', u'Hello World'))
return ''
@app.route('/test_filters/')
def test_filters():
messages = flask.get_flashed_messages(category_filter=['message', 'warning'], with_categories=True)
self.assert_equal(len(messages), 2)
self.assert_equal(messages[0], ('message', u'Hello World'))
self.assert_equal(messages[1], ('warning', flask.Markup(u'<em>Testing</em>')))
return ''
@app.route('/test_filters_without_returning_categories/')
def test_filters2():
messages = flask.get_flashed_messages(category_filter=['message', 'warning'])
self.assert_equal(len(messages), 2)
self.assert_equal(messages[0], u'Hello World')
self.assert_equal(messages[1], flask.Markup(u'<em>Testing</em>'))
return ''
# Create new test client on each test to clean flashed messages.
c = app.test_client()
c.get('/')
c.get('/test/')
c = app.test_client()
c.get('/')
c.get('/test_with_categories/')
c = app.test_client()
c.get('/')
c.get('/test_filter/')
c = app.test_client()
c.get('/')
c.get('/test_filters/')
c = app.test_client()
c.get('/')
c.get('/test_filters_without_returning_categories/')
def test_request_processing(self):
app = flask.Flask(__name__)
evts = []
@app.before_request
def before_request():
evts.append('before')
@app.after_request
def after_request(response):
response.data += b'|after'
evts.append('after')
return response
@app.route('/')
def index():
self.assert_in('before', evts)
self.assert_not_in('after', evts)
return 'request'
self.assert_not_in('after', evts)
rv = app.test_client().get('/').data
self.assert_in('after', evts)
self.assert_equal(rv, b'request|after')
def test_after_request_processing(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
@flask.after_this_request
def foo(response):
response.headers['X-Foo'] = 'a header'
return response
return 'Test'
c = app.test_client()
resp = c.get('/')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.headers['X-Foo'], 'a header')
def test_teardown_request_handler(self):
called = []
app = flask.Flask(__name__)
@app.teardown_request
def teardown_request(exc):
called.append(True)
return "Ignored"
@app.route('/')
def root():
return "Response"
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 200)
self.assert_in(b'Response', rv.data)
self.assert_equal(len(called), 1)
def test_teardown_request_handler_debug_mode(self):
called = []
app = flask.Flask(__name__)
app.testing = True
@app.teardown_request
def teardown_request(exc):
called.append(True)
return "Ignored"
@app.route('/')
def root():
return "Response"
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 200)
self.assert_in(b'Response', rv.data)
self.assert_equal(len(called), 1)
def test_teardown_request_handler_error(self):
called = []
app = flask.Flask(__name__)
@app.teardown_request
def teardown_request1(exc):
self.assert_equal(type(exc), ZeroDivisionError)
called.append(True)
# This raises a new error and blows away sys.exc_info(), so we can
# test that all teardown_requests get passed the same original
# exception.
try:
raise TypeError()
except:
pass
@app.teardown_request
def teardown_request2(exc):
self.assert_equal(type(exc), ZeroDivisionError)
called.append(True)
# This raises a new error and blows away sys.exc_info(), so we can
# test that all teardown_requests get passed the same original
# exception.
try:
raise TypeError()
except:
pass
@app.route('/')
def fails():
1 // 0
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_in(b'Internal Server Error', rv.data)
self.assert_equal(len(called), 2)
def test_before_after_request_order(self):
called = []
app = flask.Flask(__name__)
@app.before_request
def before1():
called.append(1)
@app.before_request
def before2():
called.append(2)
@app.after_request
def after1(response):
called.append(4)
return response
@app.after_request
def after2(response):
called.append(3)
return response
@app.teardown_request
def finish1(exc):
called.append(6)
@app.teardown_request
def finish2(exc):
called.append(5)
@app.route('/')
def index():
return '42'
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'42')
self.assert_equal(called, [1, 2, 3, 4, 5, 6])
def test_error_handling(self):
app = flask.Flask(__name__)
@app.errorhandler(404)
def not_found(e):
return 'not found', 404
@app.errorhandler(500)
def internal_server_error(e):
return 'internal server error', 500
@app.route('/')
def index():
flask.abort(404)
@app.route('/error')
def error():
1 // 0
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.status_code, 404)
self.assert_equal(rv.data, b'not found')
rv = c.get('/error')
self.assert_equal(rv.status_code, 500)
self.assert_equal(b'internal server error', rv.data)
def test_before_request_and_routing_errors(self):
app = flask.Flask(__name__)
@app.before_request
def attach_something():
flask.g.something = 'value'
@app.errorhandler(404)
def return_something(error):
return flask.g.something, 404
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 404)
self.assert_equal(rv.data, b'value')
def test_user_error_handling(self):
class MyException(Exception):
pass
app = flask.Flask(__name__)
@app.errorhandler(MyException)
def handle_my_exception(e):
self.assert_true(isinstance(e, MyException))
return '42'
@app.route('/')
def index():
raise MyException()
c = app.test_client()
self.assert_equal(c.get('/').data, b'42')
def test_trapping_of_bad_request_key_errors(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/fail')
def fail():
flask.request.form['missing_key']
c = app.test_client()
self.assert_equal(c.get('/fail').status_code, 400)
app.config['TRAP_BAD_REQUEST_ERRORS'] = True
c = app.test_client()
try:
c.get('/fail')
except KeyError as e:
self.assert_true(isinstance(e, BadRequest))
else:
self.fail('Expected exception')
def test_trapping_of_all_http_exceptions(self):
app = flask.Flask(__name__)
app.testing = True
app.config['TRAP_HTTP_EXCEPTIONS'] = True
@app.route('/fail')
def fail():
flask.abort(404)
c = app.test_client()
try:
c.get('/fail')
except NotFound as e:
pass
else:
self.fail('Expected exception')
def test_enctype_debug_helper(self):
from flask.debughelpers import DebugFilesKeyError
app = flask.Flask(__name__)
app.debug = True
@app.route('/fail', methods=['POST'])
def index():
return flask.request.files['foo'].filename
# with statement is important because we leave an exception on the
# stack otherwise and we want to ensure that this is not the case
# to not negatively affect other tests.
with app.test_client() as c:
try:
c.post('/fail', data={'foo': 'index.txt'})
except DebugFilesKeyError as e:
self.assert_in('no file contents were transmitted', str(e))
self.assert_in('This was submitted: "index.txt"', str(e))
else:
self.fail('Expected exception')
def test_response_creation(self):
app = flask.Flask(__name__)
@app.route('/unicode')
def from_unicode():
return u'Hällo Wörld'
@app.route('/string')
def from_string():
return u'Hällo Wörld'.encode('utf-8')
@app.route('/args')
def from_tuple():
return 'Meh', 400, {
'X-Foo': 'Testing',
'Content-Type': 'text/plain; charset=utf-8'
}
c = app.test_client()
self.assert_equal(c.get('/unicode').data, u'Hällo Wörld'.encode('utf-8'))
self.assert_equal(c.get('/string').data, u'Hällo Wörld'.encode('utf-8'))
rv = c.get('/args')
self.assert_equal(rv.data, b'Meh')
self.assert_equal(rv.headers['X-Foo'], 'Testing')
self.assert_equal(rv.status_code, 400)
self.assert_equal(rv.mimetype, 'text/plain')
def test_make_response(self):
app = flask.Flask(__name__)
with app.test_request_context():
rv = flask.make_response()
self.assert_equal(rv.status_code, 200)
self.assert_equal(rv.data, b'')
self.assert_equal(rv.mimetype, 'text/html')
rv = flask.make_response('Awesome')
self.assert_equal(rv.status_code, 200)
self.assert_equal(rv.data, b'Awesome')
self.assert_equal(rv.mimetype, 'text/html')
rv = flask.make_response('W00t', 404)
self.assert_equal(rv.status_code, 404)
self.assert_equal(rv.data, b'W00t')
self.assert_equal(rv.mimetype, 'text/html')
def test_make_response_with_response_instance(self):
app = flask.Flask(__name__)
with app.test_request_context():
rv = flask.make_response(
flask.jsonify({'msg': 'W00t'}), 400)
self.assertEqual(rv.status_code, 400)
self.assertEqual(rv.data, b'{\n "msg": "W00t"\n}')
self.assertEqual(rv.mimetype, 'application/json')
rv = flask.make_response(
flask.Response(''), 400)
self.assertEqual(rv.status_code, 400)
self.assertEqual(rv.data, b'')
self.assertEqual(rv.mimetype, 'text/html')
rv = flask.make_response(
flask.Response('', headers={'Content-Type': 'text/html'}),
400, [('X-Foo', 'bar')])
self.assertEqual(rv.status_code, 400)
self.assertEqual(rv.headers['Content-Type'], 'text/html')
self.assertEqual(rv.headers['X-Foo'], 'bar')
def test_url_generation(self):
app = flask.Flask(__name__)
@app.route('/hello/<name>', methods=['POST'])
def hello():
pass
with app.test_request_context():
self.assert_equal(flask.url_for('hello', name='test x'), '/hello/test%20x')
self.assert_equal(flask.url_for('hello', name='test x', _external=True),
'http://localhost/hello/test%20x')
def test_build_error_handler(self):
app = flask.Flask(__name__)
# Test base case, a URL which results in a BuildError.
with app.test_request_context():
self.assertRaises(BuildError, flask.url_for, 'spam')
# Verify the error is re-raised if not the current exception.
try:
with app.test_request_context():
flask.url_for('spam')
except BuildError as err:
error = err
try:
raise RuntimeError('Test case where BuildError is not current.')
except RuntimeError:
self.assertRaises(BuildError, app.handle_url_build_error, error, 'spam', {})
# Test a custom handler.
def handler(error, endpoint, values):
# Just a test.
return '/test_handler/'
app.url_build_error_handlers.append(handler)
with app.test_request_context():
self.assert_equal(flask.url_for('spam'), '/test_handler/')
def test_custom_converters(self):
from werkzeug.routing import BaseConverter
class ListConverter(BaseConverter):
def to_python(self, value):
return value.split(',')
def to_url(self, value):
base_to_url = super(ListConverter, self).to_url
return ','.join(base_to_url(x) for x in value)
app = flask.Flask(__name__)
app.url_map.converters['list'] = ListConverter
@app.route('/<list:args>')
def index(args):
return '|'.join(args)
c = app.test_client()
self.assert_equal(c.get('/1,2,3').data, b'1|2|3')
def test_static_files(self):
app = flask.Flask(__name__)
app.testing = True
rv = app.test_client().get('/static/index.html')
self.assert_equal(rv.status_code, 200)
self.assert_equal(rv.data.strip(), b'<h1>Hello World!</h1>')
with app.test_request_context():
self.assert_equal(flask.url_for('static', filename='index.html'),
'/static/index.html')
rv.close()
def test_none_response(self):
app = flask.Flask(__name__)
@app.route('/')
def test():
return None
try:
app.test_client().get('/')
except ValueError as e:
self.assert_equal(str(e), 'View function did not return a response')
pass
else:
self.assert_true("Expected ValueError")
def test_request_locals(self):
self.assert_equal(repr(flask.g), '<LocalProxy unbound>')
self.assertFalse(flask.g)
def test_test_app_proper_environ(self):
app = flask.Flask(__name__)
app.config.update(
SERVER_NAME='localhost.localdomain:5000'
)
@app.route('/')
def index():
return 'Foo'
@app.route('/', subdomain='foo')
def subdomain():
return 'Foo SubDomain'
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'Foo')
rv = app.test_client().get('/', 'http://localhost.localdomain:5000')
self.assert_equal(rv.data, b'Foo')
rv = app.test_client().get('/', 'https://localhost.localdomain:5000')
self.assert_equal(rv.data, b'Foo')
app.config.update(SERVER_NAME='localhost.localdomain')
rv = app.test_client().get('/', 'https://localhost.localdomain')
self.assert_equal(rv.data, b'Foo')
try:
app.config.update(SERVER_NAME='localhost.localdomain:443')
rv = app.test_client().get('/', 'https://localhost.localdomain')
# Werkzeug 0.8
self.assert_equal(rv.status_code, 404)
except ValueError as e:
# Werkzeug 0.7
self.assert_equal(str(e), "the server name provided " +
"('localhost.localdomain:443') does not match the " + \
"server name from the WSGI environment ('localhost.localdomain')")
try:
app.config.update(SERVER_NAME='localhost.localdomain')
rv = app.test_client().get('/', 'http://foo.localhost')
# Werkzeug 0.8
self.assert_equal(rv.status_code, 404)
except ValueError as e:
# Werkzeug 0.7
self.assert_equal(str(e), "the server name provided " + \
"('localhost.localdomain') does not match the " + \
"server name from the WSGI environment ('foo.localhost')")
rv = app.test_client().get('/', 'http://foo.localhost.localdomain')
self.assert_equal(rv.data, b'Foo SubDomain')
def test_exception_propagation(self):
def apprunner(configkey):
app = flask.Flask(__name__)
@app.route('/')
def index():
1 // 0
c = app.test_client()
if config_key is not None:
app.config[config_key] = True
try:
resp = c.get('/')
except Exception:
pass
else:
self.fail('expected exception')
else:
self.assert_equal(c.get('/').status_code, 500)
# we have to run this test in an isolated thread because if the
# debug flag is set to true and an exception happens the context is
# not torn down. This causes other tests that run after this fail
# when they expect no exception on the stack.
for config_key in 'TESTING', 'PROPAGATE_EXCEPTIONS', 'DEBUG', None:
t = Thread(target=apprunner, args=(config_key,))
t.start()
t.join()
def test_max_content_length(self):
app = flask.Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 64
@app.before_request
def always_first():
flask.request.form['myfile']
self.assert_true(False)
@app.route('/accept', methods=['POST'])
def accept_file():
flask.request.form['myfile']
self.assert_true(False)
@app.errorhandler(413)
def catcher(error):
return '42'
c = app.test_client()
rv = c.post('/accept', data={'myfile': 'foo' * 100})
self.assert_equal(rv.data, b'42')
def test_url_processors(self):
app = flask.Flask(__name__)
@app.url_defaults
def add_language_code(endpoint, values):
if flask.g.lang_code is not None and \
app.url_map.is_endpoint_expecting(endpoint, 'lang_code'):
values.setdefault('lang_code', flask.g.lang_code)
@app.url_value_preprocessor
def pull_lang_code(endpoint, values):
flask.g.lang_code = values.pop('lang_code', None)
@app.route('/<lang_code>/')
def index():
return flask.url_for('about')
@app.route('/<lang_code>/about')
def about():
return flask.url_for('something_else')
@app.route('/foo')
def something_else():
return flask.url_for('about', lang_code='en')
c = app.test_client()
self.assert_equal(c.get('/de/').data, b'/de/about')
self.assert_equal(c.get('/de/about').data, b'/foo')
self.assert_equal(c.get('/foo').data, b'/en/about')
def test_inject_blueprint_url_defaults(self):
app = flask.Flask(__name__)
bp = flask.Blueprint('foo.bar.baz', __name__,
template_folder='template')
@bp.url_defaults
def bp_defaults(endpoint, values):
values['page'] = 'login'
@bp.route('/<page>')
def view(page): pass
app.register_blueprint(bp)
values = dict()
app.inject_url_defaults('foo.bar.baz.view', values)
expected = dict(page='login')
self.assert_equal(values, expected)
with app.test_request_context('/somepage'):
url = flask.url_for('foo.bar.baz.view')
expected = '/login'
self.assert_equal(url, expected)
def test_nonascii_pathinfo(self):
app = flask.Flask(__name__)
app.testing = True
@app.route(u'/киртест')
def index():
return 'Hello World!'
c = app.test_client()
rv = c.get(u'/киртест')
self.assert_equal(rv.data, b'Hello World!')
def test_debug_mode_complains_after_first_request(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/')
def index():
return 'Awesome'
self.assert_false(app.got_first_request)
self.assert_equal(app.test_client().get('/').data, b'Awesome')
try:
@app.route('/foo')
def broken():
return 'Meh'
except AssertionError as e:
self.assert_in('A setup function was called', str(e))
else:
self.fail('Expected exception')
app.debug = False
@app.route('/foo')
def working():
return 'Meh'
self.assert_equal(app.test_client().get('/foo').data, b'Meh')
self.assert_true(app.got_first_request)
def test_before_first_request_functions(self):
got = []
app = flask.Flask(__name__)
@app.before_first_request
def foo():
got.append(42)
c = app.test_client()
c.get('/')
self.assert_equal(got, [42])
c.get('/')
self.assert_equal(got, [42])
self.assert_true(app.got_first_request)
def test_routing_redirect_debugging(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/foo/', methods=['GET', 'POST'])
def foo():
return 'success'
with app.test_client() as c:
try:
c.post('/foo', data={})
except AssertionError as e:
self.assert_in('http://localhost/foo/', str(e))
self.assert_in('Make sure to directly send your POST-request '
'to this URL', str(e))
else:
self.fail('Expected exception')
rv = c.get('/foo', data={}, follow_redirects=True)
self.assert_equal(rv.data, b'success')
app.debug = False
with app.test_client() as c:
rv = c.post('/foo', data={}, follow_redirects=True)
self.assert_equal(rv.data, b'success')
def test_route_decorator_custom_endpoint(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/foo/')
def foo():
return flask.request.endpoint
@app.route('/bar/', endpoint='bar')
def for_bar():
return flask.request.endpoint
@app.route('/bar/123', endpoint='123')
def for_bar_foo():
return flask.request.endpoint
with app.test_request_context():
assert flask.url_for('foo') == '/foo/'
assert flask.url_for('bar') == '/bar/'
assert flask.url_for('123') == '/bar/123'
c = app.test_client()
self.assertEqual(c.get('/foo/').data, b'foo')
self.assertEqual(c.get('/bar/').data, b'bar')
self.assertEqual(c.get('/bar/123').data, b'123')
def test_preserve_only_once(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/fail')
def fail_func():
1 // 0
c = app.test_client()
for x in range(3):
with self.assert_raises(ZeroDivisionError):
c.get('/fail')
self.assert_true(flask._request_ctx_stack.top is not None)
self.assert_true(flask._app_ctx_stack.top is not None)
# implicit appctx disappears too
flask._request_ctx_stack.top.pop()
self.assert_true(flask._request_ctx_stack.top is None)
self.assert_true(flask._app_ctx_stack.top is None)
def test_preserve_remembers_exception(self):
app = flask.Flask(__name__)
app.debug = True
errors = []
@app.route('/fail')
def fail_func():
1 // 0
@app.route('/success')
def success_func():
return 'Okay'
@app.teardown_request
def teardown_handler(exc):
errors.append(exc)
c = app.test_client()
# After this failure we did not yet call the teardown handler
with self.assert_raises(ZeroDivisionError):
c.get('/fail')
self.assert_equal(errors, [])
# But this request triggers it, and it's an error
c.get('/success')
self.assert_equal(len(errors), 2)
self.assert_true(isinstance(errors[0], ZeroDivisionError))
# At this point another request does nothing.
c.get('/success')
self.assert_equal(len(errors), 3)
self.assert_equal(errors[1], None)
def test_get_method_on_g(self):
app = flask.Flask(__name__)
app.testing = True
with app.app_context():
self.assert_equal(flask.g.get('x'), None)
self.assert_equal(flask.g.get('x', 11), 11)
flask.g.x = 42
self.assert_equal(flask.g.get('x'), 42)
self.assert_equal(flask.g.x, 42)
def test_g_iteration_protocol(self):
app = flask.Flask(__name__)
app.testing = True
with app.app_context():
flask.g.foo = 23
flask.g.bar = 42
self.assert_equal('foo' in flask.g, True)
self.assert_equal('foos' in flask.g, False)
self.assert_equal(sorted(flask.g), ['bar', 'foo'])
class SubdomainTestCase(FlaskTestCase):
def test_basic_support(self):
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost'
@app.route('/')
def normal_index():
return 'normal index'
@app.route('/', subdomain='test')
def test_index():
return 'test index'
c = app.test_client()
rv = c.get('/', 'http://localhost/')
self.assert_equal(rv.data, b'normal index')
rv = c.get('/', 'http://test.localhost/')
self.assert_equal(rv.data, b'test index')
@emits_module_deprecation_warning
def test_module_static_path_subdomain(self):
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'example.com'
from subdomaintestmodule import mod
app.register_module(mod)
c = app.test_client()
rv = c.get('/static/hello.txt', 'http://foo.example.com/')
rv.direct_passthrough = False
self.assert_equal(rv.data.strip(), b'Hello Subdomain')
rv.close()
def test_subdomain_matching(self):
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost'
@app.route('/', subdomain='<user>')
def index(user):
return 'index for %s' % user
c = app.test_client()
rv = c.get('/', 'http://mitsuhiko.localhost/')
self.assert_equal(rv.data, b'index for mitsuhiko')
def test_subdomain_matching_with_ports(self):
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost:3000'
@app.route('/', subdomain='<user>')
def index(user):
return 'index for %s' % user
c = app.test_client()
rv = c.get('/', 'http://mitsuhiko.localhost:3000/')
self.assert_equal(rv.data, b'index for mitsuhiko')
@emits_module_deprecation_warning
def test_module_subdomain_support(self):
app = flask.Flask(__name__)
mod = flask.Module(__name__, 'test', subdomain='testing')
app.config['SERVER_NAME'] = 'localhost'
@mod.route('/test')
def test():
return 'Test'
@mod.route('/outside', subdomain='xtesting')
def bar():
return 'Outside'
app.register_module(mod)
c = app.test_client()
rv = c.get('/test', 'http://testing.localhost/')
self.assert_equal(rv.data, b'Test')
rv = c.get('/outside', 'http://xtesting.localhost/')
self.assert_equal(rv.data, b'Outside')
def test_multi_route_rules(self):
app = flask.Flask(__name__)
@app.route('/')
@app.route('/<test>/')
def index(test='a'):
return test
rv = app.test_client().open('/')
self.assert_equal(rv.data, b'a')
rv = app.test_client().open('/b/')
self.assert_equal(rv.data, b'b')
def test_multi_route_class_views(self):
class View(object):
def __init__(self, app):
app.add_url_rule('/', 'index', self.index)
app.add_url_rule('/<test>/', 'index', self.index)
def index(self, test='a'):
return test
app = flask.Flask(__name__)
_ = View(app)
rv = app.test_client().open('/')
self.assert_equal(rv.data, b'a')
rv = app.test_client().open('/b/')
self.assert_equal(rv.data, b'b')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(BasicFunctionalityTestCase))
suite.addTest(unittest.makeSuite(SubdomainTestCase))
return suite
| gpl-2.0 |
victronenergy/dbus-fronius | test/src/fronius_sim/fronius_sim.py | 1 | 1241 | import random
import time
class PowerInfo:
def __init__(self):
self._lastEnergy = 0
self._prevPower = 0
# Use time.perf_counter() instead of time.clock() when using python 3
self._lastTimeStamp = time.perf_counter()
@property
def current(self):
return random.gauss(14, 0.5)
@property
def voltage(self):
return random.gauss(230, 0.05)
@property
def power(self):
p = random.gauss(3000, 100)
t = time.perf_counter()
self._lastEnergy += (self._prevPower + p) * (t - self._lastTimeStamp) / (2 * 3600)
self._lastTimeStamp = t
self._prevPower = p
return p
@property
def nominal_power(self):
return 2000
@property
def energy(self):
p = self.power
return self._lastEnergy
class FroniusSim:
def __init__(self, id, unique_id, device_type, custom_name='', has_3phases=True, modbus_enabled=False,
max_power=5000):
self.main = PowerInfo()
self.has_3phases = has_3phases
self.modbus_enabled = modbus_enabled
self.max_power = max_power
self.power_limit = 100
if has_3phases:
self.l1 = PowerInfo()
self.l2 = PowerInfo()
self.l3 = PowerInfo()
else:
self.l1 = self.main
self.id = id
self.unique_id = unique_id
self.custom_name = custom_name
self.device_type = device_type
| mit |
tsdmgz/ansible | lib/ansible/modules/system/pamd.py | 3 | 23321 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Kenneth D. Evensen <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
module: pamd
author:
- "Kenneth D. Evensen (@kevensen)"
short_description: Manage PAM Modules
description:
- Edit PAM service's type, control, module path and module arguments.
In order for a PAM rule to be modified, the type, control and
module_path must match an existing rule. See man(5) pam.d for details.
version_added: "2.3"
options:
name:
required: true
description:
- The name generally refers to the PAM service file to
change, for example system-auth.
type:
required: true
description:
- The type of the PAM rule being modified. The type, control
and module_path all must match a rule to be modified.
control:
required: true
description:
- The control of the PAM rule being modified. This may be a
complicated control with brackets. If this is the case, be
sure to put "[bracketed controls]" in quotes. The type,
control and module_path all must match a rule to be modified.
module_path:
required: true
description:
- The module path of the PAM rule being modified. The type,
control and module_path all must match a rule to be modified.
new_type:
description:
- The new type to assign to the new rule.
new_control:
description:
- The new control to assign to the new rule.
new_module_path:
description:
- The new module path to be assigned to the new rule.
module_arguments:
description:
- When state is 'updated', the module_arguments will replace existing
module_arguments. When state is 'args_absent' args matching those
listed in module_arguments will be removed. When state is
'args_present' any args listed in module_arguments are added if
missing from the existing rule. Furthermore, if the module argument
takes a value denoted by '=', the value will be changed to that specified
in module_arguments. Note that module_arguments is a list. Please see
the examples for usage.
state:
default: updated
choices:
- updated
- before
- after
- args_present
- args_absent
- absent
description:
- The default of 'updated' will modify an existing rule if type,
control and module_path all match an existing rule. With 'before',
the new rule will be inserted before a rule matching type, control
and module_path. Similarly, with 'after', the new rule will be inserted
after an existing rule matching type, control and module_path. With
either 'before' or 'after' new_type, new_control, and new_module_path
must all be specified. If state is 'args_absent' or 'args_present',
new_type, new_control, and new_module_path will be ignored. State
'absent' will remove the rule. The 'absent' state was added in version
2.4 and is only available in Ansible versions >= 2.4.
path:
default: /etc/pam.d/
description:
- This is the path to the PAM service files
"""
EXAMPLES = """
- name: Update pamd rule's control in /etc/pam.d/system-auth
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
new_control: sufficient
- name: Update pamd rule's complex control in /etc/pam.d/system-auth
pamd:
name: system-auth
type: session
control: '[success=1 default=ignore]'
module_path: pam_succeed_if.so
new_control: '[success=2 default=ignore]'
- name: Insert a new rule before an existing rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
new_type: auth
new_control: sufficient
new_module_path: pam_faillock.so
state: before
- name: Insert a new rule pam_wheel.so with argument 'use_uid' after an \
existing rule pam_rootok.so
pamd:
name: su
type: auth
control: sufficient
module_path: pam_rootok.so
new_type: auth
new_control: required
new_module_path: pam_wheel.so
module_arguments: 'use_uid'
state: after
- name: Remove module arguments from an existing rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
module_arguments: ''
state: updated
- name: Replace all module arguments in an existing rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
module_arguments: 'preauth
silent
deny=3
unlock_time=604800
fail_interval=900'
state: updated
- name: Remove specific arguments from a rule
pamd:
name: system-auth
type: session control='[success=1 default=ignore]'
module_path: pam_succeed_if.so
module_arguments: crond,quiet
state: args_absent
- name: Ensure specific arguments are present in a rule
pamd:
name: system-auth
type: session
control: '[success=1 default=ignore]'
module_path: pam_succeed_if.so
module_arguments: crond,quiet
state: args_present
- name: Ensure specific arguments are present in a rule (alternative)
pamd:
name: system-auth
type: session
control: '[success=1 default=ignore]'
module_path: pam_succeed_if.so
module_arguments:
- crond
- quiet
state: args_present
- name: Module arguments requiring commas must be listed as a Yaml list
pamd:
name: special-module
type: account
control: required
module_path: pam_access.so
module_arguments:
- listsep=,
state: args_present
- name: Update specific argument value in a rule
pamd:
name: system-auth
type: auth
control: required
module_path: pam_faillock.so
module_arguments: 'fail_interval=300'
state: args_present
"""
RETURN = '''
change_count:
description: How many rules were changed
type: int
sample: 1
returned: success
version_added: 2.4
new_rule:
description: The changes to the rule
type: string
sample: None None None sha512 shadow try_first_pass use_authtok
returned: success
version_added: 2.4
updated_rule_(n):
description: The rule(s) that was/were changed
type: string
sample:
- password sufficient pam_unix.so sha512 shadow try_first_pass
use_authtok
returned: success
version_added: 2.4
action:
description:
- "That action that was taken and is one of: update_rule,
insert_before_rule, insert_after_rule, args_present, args_absent,
absent."
returned: always
type: string
sample: "update_rule"
version_added: 2.4
dest:
description:
- "Path to pam.d service that was changed. This is only available in
Ansible version 2.3 and was removed in 2.4."
returned: success
type: string
sample: "/etc/pam.d/system-auth"
...
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
import os
import re
import time
# The PamdRule class encapsulates a rule in a pam.d service
class PamdRule(object):
def __init__(self, rule_type,
rule_control, rule_module_path,
rule_module_args=None):
self.rule_type = rule_type
self.rule_control = rule_control
self.rule_module_path = rule_module_path
try:
if (rule_module_args is not None and
type(rule_module_args) is list):
self.rule_module_args = rule_module_args
elif (rule_module_args is not None and
type(rule_module_args) is str):
self.rule_module_args = rule_module_args.split()
except AttributeError:
self.rule_module_args = []
@classmethod
def rulefromstring(cls, stringline):
pattern = None
rule_type = ''
rule_control = ''
rule_module_path = ''
rule_module_args = ''
complicated = False
if '[' in stringline:
pattern = re.compile(
r"""([\-A-Za-z0-9_]+)\s* # Rule Type
\[([A-Za-z0-9_=\s]+)\]\s* # Rule Control
([A-Za-z0-9_\-\.]+)\s* # Rule Path
([A-Za-z0-9,_=<>\-\s\./]*)""", # Rule Args
re.X)
complicated = True
else:
pattern = re.compile(
r"""([\-A-Za-z0-9_]+)\s* # Rule Type
([A-Za-z0-9_]+)\s* # Rule Control
([A-Za-z0-9_\-\.]+)\s* # Rule Path
([A-Za-z0-9,_=<>\-\s\./]*)""", # Rule Args
re.X)
result = pattern.match(stringline)
rule_type = result.group(1)
if complicated:
rule_control = '[' + result.group(2) + ']'
else:
rule_control = result.group(2)
rule_module_path = result.group(3)
if result.group(4) is not None:
rule_module_args = result.group(4)
return cls(rule_type, rule_control, rule_module_path, rule_module_args)
def get_module_args_as_string(self):
try:
if self.rule_module_args is not None:
return ' '.join(self.rule_module_args)
except AttributeError:
pass
return ''
def __str__(self):
return "%-10s\t%s\t%s %s" % (self.rule_type,
self.rule_control,
self.rule_module_path,
self.get_module_args_as_string())
# PamdService encapsulates an entire service and contains one or more rules
class PamdService(object):
def __init__(self, ansible=None):
if ansible is not None:
self.check = ansible.check_mode
self.check = False
self.ansible = ansible
self.preamble = []
self.rules = []
self.fname = None
if ansible is not None:
self.path = self.ansible.params["path"]
self.name = self.ansible.params["name"]
def load_rules_from_file(self):
self.fname = self.path + "/" + self.name
stringline = ''
try:
for line in open(self.fname, 'r'):
stringline += line.rstrip()
stringline += '\n'
self.load_rules_from_string(stringline)
except IOError:
e = get_exception()
self.ansible.fail_json(msg='Unable to open/read PAM module \
file %s with error %s. And line %s' %
(self.fname, str(e), stringline))
def load_rules_from_string(self, stringvalue):
for line in stringvalue.splitlines():
stringline = line.rstrip()
if line.startswith('#') and not line.isspace():
self.preamble.append(line.rstrip())
elif (not line.startswith('#') and
not line.isspace() and
len(line) != 0):
self.rules.append(PamdRule.rulefromstring(stringline))
def write(self):
if self.fname is None:
self.fname = self.path + "/" + self.name
# If the file is a symbollic link, we'll write to the source.
pamd_file = os.path.realpath(self.fname)
temp_file = "/tmp/" + self.name + "_" + time.strftime("%y%m%d%H%M%S")
try:
f = open(temp_file, 'w')
f.write(str(self))
f.close()
except IOError:
self.ansible.fail_json(msg='Unable to create temporary \
file %s' % self.temp_file)
self.ansible.atomic_move(temp_file, pamd_file)
def __str__(self):
stringvalue = ''
previous_rule = None
for amble in self.preamble:
stringvalue += amble
stringvalue += '\n'
for rule in self.rules:
if (previous_rule is not None and
(previous_rule.rule_type.replace('-', '') !=
rule.rule_type.replace('-', ''))):
stringvalue += '\n'
stringvalue += str(rule).rstrip()
stringvalue += '\n'
previous_rule = rule
if stringvalue.endswith('\n'):
stringvalue = stringvalue[:-1]
return stringvalue
def update_rule(service, old_rule, new_rule):
changed = False
change_count = 0
result = {'action': 'update_rule'}
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
if (new_rule.rule_type is not None and
new_rule.rule_type != rule.rule_type):
rule.rule_type = new_rule.rule_type
changed = True
if (new_rule.rule_control is not None and
new_rule.rule_control != rule.rule_control):
rule.rule_control = new_rule.rule_control
changed = True
if (new_rule.rule_module_path is not None and
new_rule.rule_module_path != rule.rule_module_path):
rule.rule_module_path = new_rule.rule_module_path
changed = True
try:
if (new_rule.rule_module_args is not None and
new_rule.get_module_args_as_string() !=
rule.get_module_args_as_string()):
rule.rule_module_args = new_rule.rule_module_args
changed = True
except AttributeError:
pass
if changed:
result['updated_rule_' + str(change_count)] = str(rule)
result['new_rule'] = str(new_rule)
change_count += 1
result['change_count'] = change_count
return changed, result
def insert_before_rule(service, old_rule, new_rule):
index = 0
change_count = 0
result = {'action':
'insert_before_rule'}
changed = False
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
if index == 0:
service.rules.insert(0, new_rule)
changed = True
elif (new_rule.rule_type != service.rules[index - 1].rule_type or
new_rule.rule_control !=
service.rules[index - 1].rule_control or
new_rule.rule_module_path !=
service.rules[index - 1].rule_module_path):
service.rules.insert(index, new_rule)
changed = True
if changed:
result['new_rule'] = str(new_rule)
result['before_rule_' + str(change_count)] = str(rule)
change_count += 1
index += 1
result['change_count'] = change_count
return changed, result
def insert_after_rule(service, old_rule, new_rule):
index = 0
change_count = 0
result = {'action': 'insert_after_rule'}
changed = False
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
if (new_rule.rule_type != service.rules[index + 1].rule_type or
new_rule.rule_control !=
service.rules[index + 1].rule_control or
new_rule.rule_module_path !=
service.rules[index + 1].rule_module_path):
service.rules.insert(index + 1, new_rule)
changed = True
if changed:
result['new_rule'] = str(new_rule)
result['after_rule_' + str(change_count)] = str(rule)
change_count += 1
index += 1
result['change_count'] = change_count
return changed, result
def remove_module_arguments(service, old_rule, module_args):
result = {'action': 'args_absent'}
changed = False
change_count = 0
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
for arg_to_remove in module_args:
for arg in rule.rule_module_args:
if arg == arg_to_remove:
rule.rule_module_args.remove(arg)
changed = True
result['removed_arg_' + str(change_count)] = arg
result['from_rule_' + str(change_count)] = str(rule)
change_count += 1
result['change_count'] = change_count
return changed, result
def add_module_arguments(service, old_rule, module_args):
result = {'action': 'args_present'}
changed = False
change_count = 0
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
for arg_to_add in module_args:
if "=" in arg_to_add:
pre_string = arg_to_add[:arg_to_add.index('=') + 1]
indicies = [i for i, arg
in enumerate(rule.rule_module_args)
if arg.startswith(pre_string)]
if len(indicies) == 0:
rule.rule_module_args.append(arg_to_add)
changed = True
result['added_arg_' + str(change_count)] = arg_to_add
result['to_rule_' + str(change_count)] = str(rule)
change_count += 1
else:
for i in indicies:
if rule.rule_module_args[i] != arg_to_add:
rule.rule_module_args[i] = arg_to_add
changed = True
result['updated_arg_' +
str(change_count)] = arg_to_add
result['in_rule_' +
str(change_count)] = str(rule)
change_count += 1
elif arg_to_add not in rule.rule_module_args:
rule.rule_module_args.append(arg_to_add)
changed = True
result['added_arg_' + str(change_count)] = arg_to_add
result['to_rule_' + str(change_count)] = str(rule)
change_count += 1
result['change_count'] = change_count
return changed, result
def remove_rule(service, old_rule):
result = {'action': 'absent'}
changed = False
change_count = 0
for rule in service.rules:
if (old_rule.rule_type == rule.rule_type and
old_rule.rule_control == rule.rule_control and
old_rule.rule_module_path == rule.rule_module_path):
service.rules.remove(rule)
changed = True
return changed, result
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, type='str'),
type=dict(required=True,
choices=['account', 'auth',
'password', 'session']),
control=dict(required=True, type='str'),
module_path=dict(required=True, type='str'),
new_type=dict(required=False,
choices=['account', 'auth',
'password', 'session']),
new_control=dict(required=False, type='str'),
new_module_path=dict(required=False, type='str'),
module_arguments=dict(required=False, type='list'),
state=dict(required=False, default="updated",
choices=['before', 'after', 'updated',
'args_absent', 'args_present', 'absent']),
path=dict(required=False, default='/etc/pam.d', type='str')
),
supports_check_mode=True,
required_if=[
("state", "args_present", ["module_arguments"]),
("state", "args_absent", ["module_arguments"]),
("state", "before", ["new_control"]),
("state", "before", ["new_type"]),
("state", "before", ["new_module_path"]),
("state", "after", ["new_control"]),
("state", "after", ["new_type"]),
("state", "after", ["new_module_path"])
]
)
service = module.params['name']
old_type = module.params['type']
old_control = module.params['control']
old_module_path = module.params['module_path']
new_type = module.params['new_type']
new_control = module.params['new_control']
new_module_path = module.params['new_module_path']
module_arguments = module.params['module_arguments']
state = module.params['state']
path = module.params['path']
pamd = PamdService(module)
pamd.load_rules_from_file()
old_rule = PamdRule(old_type,
old_control,
old_module_path)
new_rule = PamdRule(new_type,
new_control,
new_module_path,
module_arguments)
if state == 'updated':
change, result = update_rule(pamd,
old_rule,
new_rule)
elif state == 'before':
change, result = insert_before_rule(pamd,
old_rule,
new_rule)
elif state == 'after':
change, result = insert_after_rule(pamd,
old_rule,
new_rule)
elif state == 'args_absent':
change, result = remove_module_arguments(pamd,
old_rule,
module_arguments)
elif state == 'args_present':
change, result = add_module_arguments(pamd,
old_rule,
module_arguments)
elif state == 'absent':
change, result = remove_rule(pamd,
old_rule)
if not module.check_mode and change:
pamd.write()
facts = {}
facts['pamd'] = {'changed': change, 'result': result}
module.params['dest'] = pamd.fname
module.exit_json(changed=change, ansible_facts=facts)
if __name__ == '__main__':
main()
| gpl-3.0 |
ntruchsess/Arduino-1 | arduino-core/src/processing/app/i18n/python/requests/models.py | 151 | 21105 | # -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import logging
import datetime
from io import BytesIO
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header
from .packages.urllib3.filepost import encode_multipart_formdata
from .exceptions import HTTPError, RequestException, MissingSchema, InvalidURL
from .utils import (
stream_untransfer, guess_filename, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len)
from .compat import (
cookielib, urlparse, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, json, builtin_str, basestring)
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
log = logging.getLogger(__name__)
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but abritrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but abritrary
if parameters are supplied as a dict.
"""
if (not files) or isinstance(data, str):
return None
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, list):
for v in val:
new_fields.append((field, builtin_str(v)))
else:
new_fields.append((field, builtin_str(val)))
for (k, v) in files:
# support for explicit filename
ft = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
else:
fn, fp, ft = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, str):
fp = StringIO(fp)
if isinstance(fp, bytes):
fp = BytesIO(fp)
if ft:
new_v = (fn, fp.read(), ft)
else:
new_v = (fn, fp.read())
new_fields.append((k, new_v))
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach the request. If a dictionary is provided, form-encoding will take place.
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self,
method=None,
url=None,
headers=None,
files=None,
data=dict(),
params=dict(),
auth=None,
cookies=None,
hooks=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.params = params
self.auth = auth
self.cookies = cookies
self.hooks = hooks
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare_method(self.method)
p.prepare_url(self.url, self.params)
p.prepare_headers(self.headers)
p.prepare_cookies(self.cookies)
p.prepare_body(self.data, self.files)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
p.prepare_auth(self.auth)
# This MUST go after prepare_auth. Authenticators could add a hook
p.prepare_hooks(self.hooks)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = self.method.upper()
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
try:
url = unicode(url)
except NameError:
# We're on Python 3.
url = str(url)
except UnicodeDecodeError:
pass
# Support for unicode domain names and paths.
scheme, netloc, path, _params, query, fragment = urlparse(url)
if not (scheme and netloc):
raise MissingSchema("Invalid URL %r: No schema supplied" % url)
try:
netloc = netloc.encode('idna').decode('utf-8')
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(_params, str):
_params = _params.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, _params, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
if headers:
headers = dict((name.encode('ascii'), value) for name, value in headers.items())
self.headers = CaseInsensitiveDict(headers)
else:
self.headers = CaseInsensitiveDict()
def prepare_body(self, data, files):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
length = None
is_stream = False
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, basestring),
not isinstance(data, list),
not isinstance(data, dict)
])
try:
length = str(super_len(data))
except (TypeError, AttributeError):
length = False
if is_stream:
body = data
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length:
self.headers['Content-Length'] = length
else:
self.headers['Transfer-Encoding'] = 'chunked'
# Check if file, fo, generator, iterator.
# If not, run through normal process.
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data:
body = self._encode_params(data)
if isinstance(data, str) or isinstance(data, builtin_str) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if (content_type) and (not 'content-type' in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
if hasattr(body, 'seek') and hasattr(body, 'tell'):
body.seek(0, 2)
self.headers['Content-Length'] = str(body.tell())
body.seek(0, 0)
elif body is not None:
self.headers['Content-Length'] = str(len(body))
elif self.method not in ('GET', 'HEAD'):
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth):
"""Prepares the given HTTP auth data."""
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data."""
if isinstance(cookies, cookielib.CookieJar):
cookies = cookies
else:
cookies = cookiejar_from_dict(cookies)
if 'cookie' not in self.headers:
cookie_header = get_cookie_header(cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Requires that ``stream=True` on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta)
self.elapsed = datetime.timedelta(0)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except RequestException:
return False
return True
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the lovely Charade library
(Thanks, Ian!)."""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. This avoids reading the content
at once into memory for large responses. The chunk size is the number
of bytes it should read into memory. This is not necessarily the
length of each item returned as decoding can take place.
"""
if self._content_consumed:
# simulate reading small chunks of the content
return iter_slices(self._content, chunk_size)
def generate():
while 1:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
gen = stream_untransfer(generate(), self)
if decode_unicode:
gen = stream_decode_response_unicode(gen, self)
return gen
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None):
"""Iterates over the response data, one line at a time. This
avoids reading the content at once into memory for large
responses.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size,
decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code is 0:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
if Response.encoding is None and chardet module is available, encoding
will be guessed.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
"""
if not self.encoding and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
return json.loads(self.content.decode(encoding), **kwargs)
return json.loads(self.text or self.content, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers['link']
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason)
elif 500 <= self.status_code < 600:
http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
return self.raw.release_conn()
| lgpl-2.1 |
Dandandan/wikiprogramming | jsrepl/extern/python/reloop-closured/lib/python2.7/ntpath.py | 81 | 18082 | # Module 'ntpath' -- common operations on WinNT/Win95 pathnames
"""Common pathname manipulations, WindowsNT/95 version.
Instead of importing this module directly, import os and refer to this
module as os.path.
"""
import os
import sys
import stat
import genericpath
import warnings
from genericpath import *
__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
"basename","dirname","commonprefix","getsize","getmtime",
"getatime","getctime", "islink","exists","lexists","isdir","isfile",
"ismount","walk","expanduser","expandvars","normpath","abspath",
"splitunc","curdir","pardir","sep","pathsep","defpath","altsep",
"extsep","devnull","realpath","supports_unicode_filenames","relpath"]
# strings representing various path-related bits and pieces
curdir = '.'
pardir = '..'
extsep = '.'
sep = '\\'
pathsep = ';'
altsep = '/'
defpath = '.;C:\\bin'
if 'ce' in sys.builtin_module_names:
defpath = '\\Windows'
elif 'os2' in sys.builtin_module_names:
# OS/2 w/ VACPP
altsep = '/'
devnull = 'nul'
# Normalize the case of a pathname and map slashes to backslashes.
# Other normalizations (such as optimizing '../' away) are not done
# (this is done by normpath).
def normcase(s):
"""Normalize case of pathname.
Makes all characters lowercase and all slashes into backslashes."""
return s.replace("/", "\\").lower()
# Return whether a path is absolute.
# Trivial in Posix, harder on the Mac or MS-DOS.
# For DOS it is absolute if it starts with a slash or backslash (current
# volume), or if a pathname after the volume letter and colon / UNC resource
# starts with a slash or backslash.
def isabs(s):
"""Test whether a path is absolute"""
s = splitdrive(s)[1]
return s != '' and s[:1] in '/\\'
# Join two (or more) paths.
def join(a, *p):
"""Join two or more pathname components, inserting "\\" as needed.
If any component is an absolute path, all previous path components
will be discarded."""
path = a
for b in p:
b_wins = 0 # set to 1 iff b makes path irrelevant
if path == "":
b_wins = 1
elif isabs(b):
# This probably wipes out path so far. However, it's more
# complicated if path begins with a drive letter:
# 1. join('c:', '/a') == 'c:/a'
# 2. join('c:/', '/a') == 'c:/a'
# But
# 3. join('c:/a', '/b') == '/b'
# 4. join('c:', 'd:/') = 'd:/'
# 5. join('c:/', 'd:/') = 'd:/'
if path[1:2] != ":" or b[1:2] == ":":
# Path doesn't start with a drive letter, or cases 4 and 5.
b_wins = 1
# Else path has a drive letter, and b doesn't but is absolute.
elif len(path) > 3 or (len(path) == 3 and
path[-1] not in "/\\"):
# case 3
b_wins = 1
if b_wins:
path = b
else:
# Join, and ensure there's a separator.
assert len(path) > 0
if path[-1] in "/\\":
if b and b[0] in "/\\":
path += b[1:]
else:
path += b
elif path[-1] == ":":
path += b
elif b:
if b[0] in "/\\":
path += b
else:
path += "\\" + b
else:
# path is not empty and does not end with a backslash,
# but b is empty; since, e.g., split('a/') produces
# ('a', ''), it's best if join() adds a backslash in
# this case.
path += '\\'
return path
# Split a path in a drive specification (a drive letter followed by a
# colon) and the path specification.
# It is always true that drivespec + pathspec == p
def splitdrive(p):
"""Split a pathname into drive and path specifiers. Returns a 2-tuple
"(drive,path)"; either part may be empty"""
if p[1:2] == ':':
return p[0:2], p[2:]
return '', p
# Parse UNC paths
def splitunc(p):
"""Split a pathname into UNC mount point and relative path specifiers.
Return a 2-tuple (unc, rest); either part may be empty.
If unc is not empty, it has the form '//host/mount' (or similar
using backslashes). unc+rest is always the input path.
Paths containing drive letters never have an UNC part.
"""
if p[1:2] == ':':
return '', p # Drive letter present
firstTwo = p[0:2]
if firstTwo == '//' or firstTwo == '\\\\':
# is a UNC path:
# vvvvvvvvvvvvvvvvvvvv equivalent to drive letter
# \\machine\mountpoint\directories...
# directory ^^^^^^^^^^^^^^^
normp = normcase(p)
index = normp.find('\\', 2)
if index == -1:
##raise RuntimeError, 'illegal UNC path: "' + p + '"'
return ("", p)
index = normp.find('\\', index + 1)
if index == -1:
index = len(p)
return p[:index], p[index:]
return '', p
# Split a path in head (everything up to the last '/') and tail (the
# rest). After the trailing '/' is stripped, the invariant
# join(head, tail) == p holds.
# The resulting head won't end in '/' unless it is the root.
def split(p):
"""Split a pathname.
Return tuple (head, tail) where tail is everything after the final slash.
Either part may be empty."""
d, p = splitdrive(p)
# set i to index beyond p's last slash
i = len(p)
while i and p[i-1] not in '/\\':
i = i - 1
head, tail = p[:i], p[i:] # now tail has no slashes
# remove trailing slashes from head, unless it's all slashes
head2 = head
while head2 and head2[-1] in '/\\':
head2 = head2[:-1]
head = head2 or head
return d + head, tail
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
def splitext(p):
return genericpath._splitext(p, sep, altsep, extsep)
splitext.__doc__ = genericpath._splitext.__doc__
# Return the tail (basename) part of a path.
def basename(p):
"""Returns the final component of a pathname"""
return split(p)[1]
# Return the head (dirname) part of a path.
def dirname(p):
"""Returns the directory component of a pathname"""
return split(p)[0]
# Is a path a symbolic link?
# This will always return false on systems where posix.lstat doesn't exist.
def islink(path):
"""Test for symbolic link.
On WindowsNT/95 and OS/2 always returns false
"""
return False
# alias exists to lexists
lexists = exists
# Is a path a mount point? Either a root (with or without drive letter)
# or an UNC path with at most a / or \ after the mount point.
def ismount(path):
"""Test whether a path is a mount point (defined as root of drive)"""
unc, rest = splitunc(path)
if unc:
return rest in ("", "/", "\\")
p = splitdrive(path)[1]
return len(p) == 1 and p[0] in '/\\'
# Directory tree walk.
# For each directory under top (including top itself, but excluding
# '.' and '..'), func(arg, dirname, filenames) is called, where
# dirname is the name of the directory and filenames is the list
# of files (and subdirectories etc.) in the directory.
# The func may modify the filenames list, to implement a filter,
# or to impose a different order of visiting.
def walk(top, func, arg):
"""Directory tree walk with callback function.
For each directory in the directory tree rooted at top (including top
itself, but excluding '.' and '..'), call func(arg, dirname, fnames).
dirname is the name of the directory, and fnames a list of the names of
the files and subdirectories in dirname (excluding '.' and '..'). func
may modify the fnames list in-place (e.g. via del or slice assignment),
and walk will only recurse into the subdirectories whose names remain in
fnames; this can be used to implement a filter, or to impose a specific
order of visiting. No semantics are defined for, or required of, arg,
beyond that arg is always passed to func. It can be used, e.g., to pass
a filename pattern, or a mutable object designed to accumulate
statistics. Passing None for arg is common."""
warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.",
stacklevel=2)
try:
names = os.listdir(top)
except os.error:
return
func(arg, top, names)
for name in names:
name = join(top, name)
if isdir(name):
walk(name, func, arg)
# Expand paths beginning with '~' or '~user'.
# '~' means $HOME; '~user' means that user's home directory.
# If the path doesn't begin with '~', or if the user or $HOME is unknown,
# the path is returned unchanged (leaving error reporting to whatever
# function is called with the expanded path as argument).
# See also module 'glob' for expansion of *, ? and [...] in pathnames.
# (A function should also be defined to do full *sh-style environment
# variable expansion.)
def expanduser(path):
"""Expand ~ and ~user constructs.
If user or $HOME is unknown, do nothing."""
if path[:1] != '~':
return path
i, n = 1, len(path)
while i < n and path[i] not in '/\\':
i = i + 1
if 'HOME' in os.environ:
userhome = os.environ['HOME']
elif 'USERPROFILE' in os.environ:
userhome = os.environ['USERPROFILE']
elif not 'HOMEPATH' in os.environ:
return path
else:
try:
drive = os.environ['HOMEDRIVE']
except KeyError:
drive = ''
userhome = join(drive, os.environ['HOMEPATH'])
if i != 1: #~user
userhome = join(dirname(userhome), path[1:i])
return userhome + path[i:]
# Expand paths containing shell variable substitutions.
# The following rules apply:
# - no expansion within single quotes
# - '$$' is translated into '$'
# - '%%' is translated into '%' if '%%' are not seen in %var1%%var2%
# - ${varname} is accepted.
# - $varname is accepted.
# - %varname% is accepted.
# - varnames can be made out of letters, digits and the characters '_-'
# (though is not verified in the ${varname} and %varname% cases)
# XXX With COMMAND.COM you can use any characters in a variable name,
# XXX except '^|<>='.
def expandvars(path):
"""Expand shell variables of the forms $var, ${var} and %var%.
Unknown variables are left unchanged."""
if '$' not in path and '%' not in path:
return path
import string
varchars = string.ascii_letters + string.digits + '_-'
res = ''
index = 0
pathlen = len(path)
while index < pathlen:
c = path[index]
if c == '\'': # no expansion within single quotes
path = path[index + 1:]
pathlen = len(path)
try:
index = path.index('\'')
res = res + '\'' + path[:index + 1]
except ValueError:
res = res + path
index = pathlen - 1
elif c == '%': # variable or '%'
if path[index + 1:index + 2] == '%':
res = res + c
index = index + 1
else:
path = path[index+1:]
pathlen = len(path)
try:
index = path.index('%')
except ValueError:
res = res + '%' + path
index = pathlen - 1
else:
var = path[:index]
if var in os.environ:
res = res + os.environ[var]
else:
res = res + '%' + var + '%'
elif c == '$': # variable or '$$'
if path[index + 1:index + 2] == '$':
res = res + c
index = index + 1
elif path[index + 1:index + 2] == '{':
path = path[index+2:]
pathlen = len(path)
try:
index = path.index('}')
var = path[:index]
if var in os.environ:
res = res + os.environ[var]
else:
res = res + '${' + var + '}'
except ValueError:
res = res + '${' + path
index = pathlen - 1
else:
var = ''
index = index + 1
c = path[index:index + 1]
while c != '' and c in varchars:
var = var + c
index = index + 1
c = path[index:index + 1]
if var in os.environ:
res = res + os.environ[var]
else:
res = res + '$' + var
if c != '':
index = index - 1
else:
res = res + c
index = index + 1
return res
# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A\B.
# Previously, this function also truncated pathnames to 8+3 format,
# but as this module is called "ntpath", that's obviously wrong!
def normpath(path):
"""Normalize path, eliminating double slashes, etc."""
# Preserve unicode (if path is unicode)
backslash, dot = (u'\\', u'.') if isinstance(path, unicode) else ('\\', '.')
if path.startswith(('\\\\.\\', '\\\\?\\')):
# in the case of paths with these prefixes:
# \\.\ -> device names
# \\?\ -> literal paths
# do not do any normalization, but return the path unchanged
return path
path = path.replace("/", "\\")
prefix, path = splitdrive(path)
# We need to be careful here. If the prefix is empty, and the path starts
# with a backslash, it could either be an absolute path on the current
# drive (\dir1\dir2\file) or a UNC filename (\\server\mount\dir1\file). It
# is therefore imperative NOT to collapse multiple backslashes blindly in
# that case.
# The code below preserves multiple backslashes when there is no drive
# letter. This means that the invalid filename \\\a\b is preserved
# unchanged, where a\\\b is normalised to a\b. It's not clear that there
# is any better behaviour for such edge cases.
if prefix == '':
# No drive letter - preserve initial backslashes
while path[:1] == "\\":
prefix = prefix + backslash
path = path[1:]
else:
# We have a drive letter - collapse initial backslashes
if path.startswith("\\"):
prefix = prefix + backslash
path = path.lstrip("\\")
comps = path.split("\\")
i = 0
while i < len(comps):
if comps[i] in ('.', ''):
del comps[i]
elif comps[i] == '..':
if i > 0 and comps[i-1] != '..':
del comps[i-1:i+1]
i -= 1
elif i == 0 and prefix.endswith("\\"):
del comps[i]
else:
i += 1
else:
i += 1
# If the path is now empty, substitute '.'
if not prefix and not comps:
comps.append(dot)
return prefix + backslash.join(comps)
# Return an absolute path.
try:
from nt import _getfullpathname
except ImportError: # not running on Windows - mock up something sensible
def abspath(path):
"""Return the absolute version of a path."""
if not isabs(path):
if isinstance(path, unicode):
cwd = os.getcwdu()
else:
cwd = os.getcwd()
path = join(cwd, path)
return normpath(path)
else: # use native Windows method on Windows
def abspath(path):
"""Return the absolute version of a path."""
if path: # Empty path must return current working directory.
try:
path = _getfullpathname(path)
except WindowsError:
pass # Bad path - return unchanged.
elif isinstance(path, unicode):
path = os.getcwdu()
else:
path = os.getcwd()
return normpath(path)
# realpath is a no-op on systems without islink support
realpath = abspath
# Win9x family and earlier have no Unicode filename support.
supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and
sys.getwindowsversion()[3] >= 2)
def _abspath_split(path):
abs = abspath(normpath(path))
prefix, rest = splitunc(abs)
is_unc = bool(prefix)
if not is_unc:
prefix, rest = splitdrive(abs)
return is_unc, prefix, [x for x in rest.split(sep) if x]
def relpath(path, start=curdir):
"""Return a relative version of a path"""
if not path:
raise ValueError("no path specified")
start_is_unc, start_prefix, start_list = _abspath_split(start)
path_is_unc, path_prefix, path_list = _abspath_split(path)
if path_is_unc ^ start_is_unc:
raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)"
% (path, start))
if path_prefix.lower() != start_prefix.lower():
if path_is_unc:
raise ValueError("path is on UNC root %s, start on UNC root %s"
% (path_prefix, start_prefix))
else:
raise ValueError("path is on drive %s, start on drive %s"
% (path_prefix, start_prefix))
# Work out how much of the filepath is shared by start and path.
i = 0
for e1, e2 in zip(start_list, path_list):
if e1.lower() != e2.lower():
break
i += 1
rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return curdir
return join(*rel_list)
| mit |
gustavo-guimaraes/siga | backend/appengine/lib/pip/_vendor/six.py | 322 | 22857 | """Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2014 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import operator
import sys
import types
__author__ = "Benjamin Peterson <[email protected]>"
__version__ = "1.5.2"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
# This is a bit ugly, but it avoids running this again.
delattr(obj.__class__, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
# Hack around the Django autoreloader. The reloader tries to get
# __file__ or __name__ of every module in sys.modules. This doesn't work
# well if this MovedModule is for an module that is unavailable on this
# machine (like winreg on Unix systems). Thus, we pretend __file__ and
# __name__ don't exist if the module hasn't been loaded yet. See issues
# #51 and #53.
if attr in ("__file__", "__name__") and self.mod not in sys.modules:
raise AttributeError
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
sys.modules[__name__ + ".moves." + attr.name] = attr
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
sys.modules[__name__ + ".moves.urllib_parse"] = sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
sys.modules[__name__ + ".moves.urllib_error"] = sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
sys.modules[__name__ + ".moves.urllib_request"] = sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
sys.modules[__name__ + ".moves.urllib_response"] = sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
sys.modules[__name__ + ".moves.urllib_robotparser"] = sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
parse = sys.modules[__name__ + ".moves.urllib_parse"]
error = sys.modules[__name__ + ".moves.urllib_error"]
request = sys.modules[__name__ + ".moves.urllib_request"]
response = sys.modules[__name__ + ".moves.urllib_response"]
robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
_iterlists = "lists"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
_iterlists = "iterlists"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
def iterkeys(d, **kw):
"""Return an iterator over the keys of a dictionary."""
return iter(getattr(d, _iterkeys)(**kw))
def itervalues(d, **kw):
"""Return an iterator over the values of a dictionary."""
return iter(getattr(d, _itervalues)(**kw))
def iteritems(d, **kw):
"""Return an iterator over the (key, value) pairs of a dictionary."""
return iter(getattr(d, _iteritems)(**kw))
def iterlists(d, **kw):
"""Return an iterator over the (key, [values]) pairs of a dictionary."""
return iter(getattr(d, _iterlists)(**kw))
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
def iterbytes(buf):
return (ord(byte) for byte in buf)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
return meta("NewBase", bases, {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
| mit |
tareqalayan/ansible | test/units/modules/network/onyx/test_onyx_lldp_interface.py | 50 | 3151 | #
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests.mock import patch
from ansible.modules.network.onyx import onyx_lldp_interface
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxLldpInterfaceModule(TestOnyxModule):
module = onyx_lldp_interface
def setUp(self):
super(TestOnyxLldpInterfaceModule, self).setUp()
self.mock_get_config = patch.object(
onyx_lldp_interface.OnyxLldpInterfaceModule,
"_get_lldp_config")
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestOnyxLldpInterfaceModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
config_file = 'onyx_lldp_interface_show.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
def test_lldp_no_change(self):
set_module_args(dict(name='Eth1/1', state='present'))
self.execute_module(changed=False)
def test_no_lldp_no_change(self):
set_module_args(dict(name='Eth1/2', state='absent'))
self.execute_module(changed=False)
def test_no_lldp_change(self):
set_module_args(dict(name='Eth1/2', state='present'))
commands = ['interface ethernet 1/2 lldp receive',
'interface ethernet 1/2 lldp transmit']
self.execute_module(changed=True, commands=commands)
def test_lldp_change(self):
set_module_args(dict(name='Eth1/1', state='absent'))
commands = ['interface ethernet 1/1 no lldp receive',
'interface ethernet 1/1 no lldp transmit']
self.execute_module(changed=True, commands=commands)
def test_lldp_aggregate(self):
aggregate = [dict(name='Eth1/1'), dict(name='Eth1/2')]
set_module_args(dict(aggregate=aggregate, state='present'))
commands = ['interface ethernet 1/2 lldp receive',
'interface ethernet 1/2 lldp transmit']
self.execute_module(changed=True, commands=commands)
def test_lldp_aggregate_purge(self):
aggregate = [dict(name='Eth1/3'), dict(name='Eth1/2')]
set_module_args(dict(aggregate=aggregate, state='present', purge=True))
commands = ['interface ethernet 1/2 lldp receive',
'interface ethernet 1/2 lldp transmit',
'interface ethernet 1/3 lldp receive',
'interface ethernet 1/3 lldp transmit',
'interface ethernet 1/1 no lldp receive',
'interface ethernet 1/1 no lldp transmit']
self.execute_module(changed=True, commands=commands)
| gpl-3.0 |
sgraf812/Celero | test/gtest-1.7.0/test/gtest_help_test.py | 2968 | 5856 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the --help flag of Google C++ Testing Framework.
SYNOPSIS
gtest_help_test.py --build_dir=BUILD/DIR
# where BUILD/DIR contains the built gtest_help_test_ file.
gtest_help_test.py
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import re
import gtest_test_utils
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
IS_WINDOWS = os.name == 'nt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_')
FLAG_PREFIX = '--gtest_'
DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style'
STREAM_RESULT_TO_FLAG = FLAG_PREFIX + 'stream_result_to'
UNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing'
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
INCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG),
re.sub('^--', '/', LIST_TESTS_FLAG),
re.sub('_', '-', LIST_TESTS_FLAG)]
INTERNAL_FLAG_FOR_TESTING = FLAG_PREFIX + 'internal_flag_for_testing'
SUPPORTS_DEATH_TESTS = "DeathTest" in gtest_test_utils.Subprocess(
[PROGRAM_PATH, LIST_TESTS_FLAG]).output
# The help message must match this regex.
HELP_REGEX = re.compile(
FLAG_PREFIX + r'list_tests.*' +
FLAG_PREFIX + r'filter=.*' +
FLAG_PREFIX + r'also_run_disabled_tests.*' +
FLAG_PREFIX + r'repeat=.*' +
FLAG_PREFIX + r'shuffle.*' +
FLAG_PREFIX + r'random_seed=.*' +
FLAG_PREFIX + r'color=.*' +
FLAG_PREFIX + r'print_time.*' +
FLAG_PREFIX + r'output=.*' +
FLAG_PREFIX + r'break_on_failure.*' +
FLAG_PREFIX + r'throw_on_failure.*' +
FLAG_PREFIX + r'catch_exceptions=0.*',
re.DOTALL)
def RunWithFlag(flag):
"""Runs gtest_help_test_ with the given flag.
Returns:
the exit code and the text output as a tuple.
Args:
flag: the command-line flag to pass to gtest_help_test_, or None.
"""
if flag is None:
command = [PROGRAM_PATH]
else:
command = [PROGRAM_PATH, flag]
child = gtest_test_utils.Subprocess(command)
return child.exit_code, child.output
class GTestHelpTest(gtest_test_utils.TestCase):
"""Tests the --help flag and its equivalent forms."""
def TestHelpFlag(self, flag):
"""Verifies correct behavior when help flag is specified.
The right message must be printed and the tests must
skipped when the given flag is specified.
Args:
flag: A flag to pass to the binary or None.
"""
exit_code, output = RunWithFlag(flag)
self.assertEquals(0, exit_code)
self.assert_(HELP_REGEX.search(output), output)
if IS_LINUX:
self.assert_(STREAM_RESULT_TO_FLAG in output, output)
else:
self.assert_(STREAM_RESULT_TO_FLAG not in output, output)
if SUPPORTS_DEATH_TESTS and not IS_WINDOWS:
self.assert_(DEATH_TEST_STYLE_FLAG in output, output)
else:
self.assert_(DEATH_TEST_STYLE_FLAG not in output, output)
def TestNonHelpFlag(self, flag):
"""Verifies correct behavior when no help flag is specified.
Verifies that when no help flag is specified, the tests are run
and the help message is not printed.
Args:
flag: A flag to pass to the binary or None.
"""
exit_code, output = RunWithFlag(flag)
self.assert_(exit_code != 0)
self.assert_(not HELP_REGEX.search(output), output)
def testPrintsHelpWithFullFlag(self):
self.TestHelpFlag('--help')
def testPrintsHelpWithShortFlag(self):
self.TestHelpFlag('-h')
def testPrintsHelpWithQuestionFlag(self):
self.TestHelpFlag('-?')
def testPrintsHelpWithWindowsStyleQuestionFlag(self):
self.TestHelpFlag('/?')
def testPrintsHelpWithUnrecognizedGoogleTestFlag(self):
self.TestHelpFlag(UNKNOWN_FLAG)
def testPrintsHelpWithIncorrectFlagStyle(self):
for incorrect_flag in INCORRECT_FLAG_VARIANTS:
self.TestHelpFlag(incorrect_flag)
def testRunsTestsWithoutHelpFlag(self):
"""Verifies that when no help flag is specified, the tests are run
and the help message is not printed."""
self.TestNonHelpFlag(None)
def testRunsTestsWithGtestInternalFlag(self):
"""Verifies that the tests are run and no help message is printed when
a flag starting with Google Test prefix and 'internal_' is supplied."""
self.TestNonHelpFlag(INTERNAL_FLAG_FOR_TESTING)
if __name__ == '__main__':
gtest_test_utils.Main()
| apache-2.0 |
openstack/keystone | keystone/common/sql/migrate_repo/versions/108_add_failed_auth_columns.py | 5 | 1065 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sql
def upgrade(migrate_engine):
meta = sql.MetaData()
meta.bind = migrate_engine
failed_auth_count = sql.Column('failed_auth_count', sql.Integer,
nullable=True)
failed_auth_at = sql.Column('failed_auth_at', sql.DateTime(),
nullable=True)
local_user_table = sql.Table('local_user', meta, autoload=True)
local_user_table.create_column(failed_auth_count)
local_user_table.create_column(failed_auth_at)
| apache-2.0 |
platinhom/ManualHom | Coding/Python/scipy-html-0.16.1/generated/scipy-stats-probplot-1.py | 1 | 1101 | from scipy import stats
import matplotlib.pyplot as plt
nsample = 100
np.random.seed(7654321)
# A t distribution with small degrees of freedom:
ax1 = plt.subplot(221)
x = stats.t.rvs(3, size=nsample)
res = stats.probplot(x, plot=plt)
# A t distribution with larger degrees of freedom:
ax2 = plt.subplot(222)
x = stats.t.rvs(25, size=nsample)
res = stats.probplot(x, plot=plt)
# A mixture of two normal distributions with broadcasting:
ax3 = plt.subplot(223)
x = stats.norm.rvs(loc=[0,5], scale=[1,1.5],
size=(nsample/2.,2)).ravel()
res = stats.probplot(x, plot=plt)
# A standard normal distribution:
ax4 = plt.subplot(224)
x = stats.norm.rvs(loc=0, scale=1, size=nsample)
res = stats.probplot(x, plot=plt)
# Produce a new figure with a loggamma distribution, using the ``dist`` and
# ``sparams`` keywords:
fig = plt.figure()
ax = fig.add_subplot(111)
x = stats.loggamma.rvs(c=2.5, size=500)
stats.probplot(x, dist=stats.loggamma, sparams=(2.5,), plot=ax)
ax.set_title("Probplot for loggamma dist with shape parameter 2.5")
# Show the results with Matplotlib:
plt.show()
| gpl-2.0 |
joebowen/movement_validation_cloud | djangodev/lib/python2.7/site-packages/boto/beanstalk/__init__.py | 145 | 1680 | # Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the AWS Elastic Beanstalk service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
import boto.beanstalk.layer1
return get_regions(
'elasticbeanstalk',
connection_cls=boto.beanstalk.layer1.Layer1
)
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| mit |
danieluct/ntv2generator | ntv2generator/ntv2writer.py | 1 | 15705 | """
This file is part of ntv2generator.
ntv2generator is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ntv2generator is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with ntv2generator. If not, see <http://www.gnu.org/licenses/>.
"""
import datetime
import os
import struct
def _format_8bit_str(input_string):
return "{0:<8}".format(input_string[:8])
def _format_ntv2_record(name, value, type_='f', binary_format=True):
if name == "RECORD":
if binary_format:
return struct.pack("<4f",*value)
else:
return " ".join(["{0:6f}".format(x) for x in value]) + "\n"
else:
if type_ == "s":
if binary_format:
return struct.pack("<8s8s",
_format_8bit_str(name),
_format_8bit_str(value))
else:
return (_format_8bit_str(name) + " " +
_format_8bit_str(value) + "\n")
elif type_ == "i":
if binary_format:
return struct.pack("<8si4x",
_format_8bit_str(name),
value)
else:
return _format_8bit_str(name) + " " + str(int(value)) + "\n"
elif type_ == "f":
if binary_format:
return struct.pack("<8sd",
_format_8bit_str(name),
value)
else:
return (_format_8bit_str(name) + " " +
"{0:4f}".format(value) + "\n")
else:
raise Exception("Unknown record format!")
class CRSDef:
def __init__(self, name, major_axis, minor_axis):
self.name = name
self.major_axis = major_axis
self.minor_axis = minor_axis
ETRS89_CRS = CRSDef("ETRS89", 6378137.000, 6356752.314)
class BoundingBox:
def __init__(self, north, south, west, east):
self.north = north
self.south = south
self.east = east
self.west = west
class NTv2File:
def __init__(self,
coord_unit="SECONDS"):
self.has_overview = False
self.added_sub_files = 0
self.subfiles_dict ={}
if coord_unit not in ["SECONDS", "MINUTES", "DEGREES"]:
raise Exception("Unknown unit for coordinates!")
else:
self.gridshift_data_type = coord_unit
def set_ref_systems(self, crs_from, crs_to, overwrite=False):
if self.has_overview and not overwrite:
raise Exception("Header was previously set!")
self.crs_from = crs_from
self.crs_to = crs_to
self.has_overview = True
def add_subfile(self, subFile, overwrite=False):
if subFile.name in self.subfiles_dict.keys() and not overwrite:
raise Exception(
"Subfile with name {0} already exists!".format(subFile.name)
)
if (subFile.parent != "NONE"
and subFile.parent not in self.subfiles_dict.keys()):
raise Exception(
"Parent with name {0} was not defined!".format(subFile.name)
)
self.subfiles_dict[subFile.name] = subFile
def create_subfile(self, name, parent='NONE'):
if name in self.subfiles_dict.keys() and not overwrite:
raise Exception(
"Subfile with name {0} already exists!".format(subFile.name)
)
if parent!= "NONE" and subFile.parent not in self.subfiles_dict.keys():
raise Exception(
"Parent with name {0} was not defined!".format(subFile.name)
)
subFile = NTv2SubFile(name, parent)
self.subfiles_dict[name] = subFile
return subFile
def write_to_file(self, path, name, f_format='b',
overwrite=False):
self.file_name = os.path.join(path, name)
if os.path.exists(self.file_name) and not overwrite:
raise Exception("File already exists!")
if f_format == 'a' or f_format == 'A':
binary_format = False
elif f_format == 'b' or f_format == 'B':
binary_format = True
else:
raise Exception("Unknown format!")
if not self.has_overview:
raise Exception("Header info was not set!")
if not self.subfiles_dict.keys():
raise Exception("No subfiles have been defined!")
if binary_format:
output_file = open(self.file_name, "wb")
else:
output_file = open(self.file_name, "w")
self._write_header(output_file, binary_format)
for key in self.subfiles_dict.keys():
self.subfiles_dict[key].write_to_file(output_file, binary_format)
self._write_eof(output_file, binary_format)
output_file.close()
def _write_eof(self, output_file, binary_format=True):
if binary_format:
output_file.write(struct.pack("<8s8x", "END "))
else:
output_file.write("END")
def _write_header(self, output_file, binary_format=True):
if not self.has_overview:
raise Exception("No overview file defined!")
output_file.write(_format_ntv2_record("NUM_OREC", 11,
'i', binary_format))
output_file.write(_format_ntv2_record("NUM_SREC", 11,
'i', binary_format))
output_file.write(_format_ntv2_record("NUM_FILE",
len(self.subfiles_dict.keys()),
'i', binary_format))
output_file.write(_format_ntv2_record("GS_TYPE",
self.gridshift_data_type,
's', binary_format))
output_file.write(_format_ntv2_record("VERSION", "NTv2.0",
's', binary_format))
output_file.write(_format_ntv2_record("SYSTEM_F", self.crs_from.name,
's', binary_format))
output_file.write(_format_ntv2_record("SYSTEM_T", self.crs_to.name,
's', binary_format))
output_file.write(_format_ntv2_record("MAJOR_F ",
self.crs_from.major_axis,
'f', binary_format))
output_file.write(_format_ntv2_record("MINOR_F ",
self.crs_from.minor_axis,
'f', binary_format))
output_file.write(_format_ntv2_record("MAJOR_T ",
self.crs_to.major_axis,
'f', binary_format))
output_file.write(_format_ntv2_record("MINOR_T ",
self.crs_to.minor_axis,
'f', binary_format))
if not binary_format:
output_file.write("\n")
class NTv2SubFile:
def __init__(self, name, parent ='NONE'):
self.name = name
self.parent = parent
self.bbox_set = False
self.inc_set = False
self.dates_set = False
self.gs_count = 0
self.gs_list = []
def set_limits(self, bounding_box, overwrite=False):
if self.bbox_set and not overwrite:
raise Exception("Subfile limits have already been set!")
self.bounding_box = bounding_box
self.bbox_set = True
def set_coord_increment(self, lat_increment,
long_increment, overwrite=False):
if not self.bbox_set:
raise Exception(
"Subfile limits have to be set before setting increments!"
)
if self.inc_set and not overwrite:
raise Exception(
"Subfile coordinate increments have already been set!"
)
self.lat_increase = lat_increment
self.long_increase = long_increment
self.inc_set = True
self.gs_count = int(
(abs(self.bounding_box.north-self.bounding_box.south)/
self.lat_increase)
+ 1
)* int(
(abs(self.bounding_box.east-self.bounding_box.west)/
self.long_increase)
+ 1)
def set_dates(self, create_date, update_date=None, overwrite=False):
if self.dates_set and not overwrite:
raise Exception("Subfile date have already been set!")
self.date_created = create_date
if update_date is None:
self.date_updated = self.date_created
else:
self.date_updated = update_date
self.dates_set = True
def set_gridshifts(grid_shift_array, overwrite=False):
if not self.bbox_set or not self.inc_set:
raise Exception(
"Subfile limits and increments have to be set before "
"setting grid shifts!"
)
if self.gs_list and not overwrite:
raise Exception("Grid shift have already been set!")
if len(grid_shift_array) < self.gs_count:
raise Exception(
"Input array does not contain enough grid shifts. "
"Required entries: {0}.".format(self.gc_count)
)
self.gs_list = grid_shift_array
def clear_gridshifts():
self.gs_list = []
def add_gridshift(latitude_shift, longitude_shift,
latitude_accuracy, longitude_accuracy):
if len(self.gs_list) + 1 > self.gs_count:
raise Exception("All grid shifts have already been added!")
else:
self.gs_list.append([
latitude_shift, longitude_shift,
latitude_accuracy, longitude_accuracy
])
def write_to_file(self, output_file, binary_format=True):
if not self.bbox_set:
raise Exception(
"Subfile limits have to be set before saving subfile!"
)
if not self.inc_set:
raise Exception(
"Subfile increments have to be set before saving subfile!"
)
if not self.dates_set:
raise Exception(
"Subfile dates have to be set before saving subfile!"
)
if len(self.gs_list) < self.gs_count:
raise Exception(
"All grid shift points have to be added before saving "
"subfile " + self.name + "! "
"Current entries: {0}. Expected: {1}".format(len(self.gs_list),
self.gs_count))
self._write_header(output_file, binary_format)
for grid_shift in self.gs_list:
self._write_record(output_file,
grid_shift[0], grid_shift[1],
grid_shift[2], grid_shift[3],
binary_format)
if not binary_format:
output_file.write("\n")
def _write_header(self, output_file, binary_format=True):
if not self.bbox_set:
raise Exception(
"Subfile limits have not been set!"
)
if not self.inc_set:
raise Exception(
"Subfile coordinate increments have not been set!"
)
if not self.dates_set:
raise Exception(
"Subfile dates have not been set!"
)
if self.gs_count == 0:
raise Exception(
"There is something wrong with the limits and/or increments!"
)
output_file.write(_format_ntv2_record("SUB_NAME", self.name,
"s", binary_format))
output_file.write(_format_ntv2_record("PARENT", self.parent,
"s", binary_format))
output_file.write(_format_ntv2_record("CREATED ",
self.date_created.strftime("%d%m%Y"),
"s", binary_format))
output_file.write(_format_ntv2_record("UPDATED ",
self.date_updated.strftime("%d%m%Y"),
"s", binary_format))
output_file.write(_format_ntv2_record("S_LAT", self.bounding_box.south,
"f", binary_format))
output_file.write(_format_ntv2_record("N_LAT", self.bounding_box.north,
"f", binary_format))
output_file.write(_format_ntv2_record("E_LONG",
self.bounding_box.east*-1,
"f", binary_format))
output_file.write(_format_ntv2_record("W_LONG",
self.bounding_box.west*-1,
"f", binary_format))
output_file.write(_format_ntv2_record("LAT_INC", self.lat_increase,
"f", binary_format))
output_file.write(_format_ntv2_record("LONG_INC", self.long_increase,
"f", binary_format))
output_file.write(_format_ntv2_record("GS_COUNT", self.gs_count,
"i", binary_format))
if not binary_format:
output_file.write("\n")
def _write_record(output_file,
latitude_shift, longitude_shift,
latitude_accuracy, longitude_accuracy,
binary_format=True):
output_file.write(_format_ntv2_record("RECORD",
[
latitude_shift, longitude_shift,
latitude_accuracy, longitude_accuracy
],
"f", binary_format))
def _test():
f_test = NTv2File()
crs_from = CRSDef("Stereo70", 6378245.0, 6356863.019)
crs_to = ETRS89_CRS
f_test.set_ref_systems(crs_from, crs_to)
subFile = f_test.create_subfile("ANCPI+TNS")
bounding_box = BoundingBox(174422.502, 156677.502, 72415.3775, 107465.3775)
lat_inc = 35.0
long_inc = 50.0
subFile.set_limits(bounding_box)
subFile.set_coord_increment(lat_inc, long_inc)
subFile.set_dates(datetime.datetime.now())
f_test.write_to_file(r"D:\Data\Data\ntv2",
"test2.txt", f_format='a',
overwrite=True)
| gpl-2.0 |
sunlightlabs/django-meetup | meetup/models.py | 1 | 3123 | from django.conf import settings
from django.db import models
from meetup.api import MeetupClient
import datetime
STATUSES = [(s, s) for s in ('past','pending','upcoming')]
API_KEY = getattr(settings, 'MEETUP_KEY', None)
class Account(models.Model):
key = models.CharField(max_length=128)
description = models.CharField(max_length=128)
slug = models.SlugField()
container_id = models.CharField(max_length=16, blank=True)
meetup_url = models.URLField(verify_exists=False, blank=True)
sync = models.BooleanField(default=True)
def __unicode__(self):
return self.slug
def past_events(self):
return self.events.filter(status='past')
def upcoming_events(self):
return self.events.exclude(status='past')
class EventManager(models.Manager):
def past(self):
return Event.objects.filter(status='past')
def upcoming(self):
return Event.objects.exclude(status='past')
class Event(models.Model):
objects = EventManager()
account = models.ForeignKey(Account, related_name="events")
# Meetup.com fields
id = models.CharField(max_length=255, primary_key=True)
meetup_url = models.URLField(verify_exists=False)
title = models.CharField(max_length=255, blank=True)
description = models.TextField(blank=True)
start_time = models.DateTimeField(blank=True, null=True)
location = models.CharField(max_length=255, blank=True)
address = models.CharField(max_length=128, blank=True)
city = models.CharField(max_length=64, blank=True)
state = models.CharField(max_length=64, blank=True)
zipcode = models.CharField(max_length=10, blank=True)
latitude = models.CharField(max_length=16, blank=True)
longitude = models.CharField(max_length=16, blank=True)
url = models.URLField(verify_exists=False, max_length=255, blank=True)
rsvp_count = models.IntegerField(default=0)
timestamp = models.DateTimeField()
status = models.CharField(max_length=16, choices=STATUSES)
organizer_id = models.CharField(max_length=32, blank=True)
organizer_name = models.CharField(max_length=128, blank=True)
# user defined fields
# none for now, add tags later
class Meta:
ordering = ('start_time',)
def __unicode__(self):
return self.pk
def save(self, sync=True, **kwargs):
super(Event, self).save(**kwargs)
# if sync:
# api_client = MeetupClient(self.account.key)
# api_client.update_event(self.pk, udf_category=self.category)
def city_state(self):
if self.city:
if self.state:
return "%s, %s" % (self.city, self.state)
else:
return self.city
elif self.state:
return self.state
else:
return ''
def short_description(self, length=64):
if len(self.description) > length:
desc = self.description[:length]
if desc.endswith(' '):
desc = desc[:-1]
return desc + '...'
return self.description | bsd-3-clause |
Creworker/FreeCAD | src/Mod/Path/PathScripts/PathSelection.py | 15 | 8856 | # -*- coding: utf-8 -*-
#***************************************************************************
#* *
#* Copyright (c) 2015 Dan Falck <[email protected]> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
'''Path selection function select a face or faces, two edges, etc to get a dictionary with what was selected in order '''
import FreeCAD,FreeCADGui
import Part
from FreeCAD import Vector
def equals(p1,p2):
'''returns True if vertexes have same coordinates within precision amount of digits '''
precision = 12 #hardcoded
p=precision
u = Vector(p1.X,p1.Y,p1.Z)
v = Vector(p2.X,p2.Y,p2.Z)
vector = (u.sub(v))
isNull = (round(vector.x,p)==0 and round(vector.y,p)==0 and round(vector.z,p)==0)
return isNull
def Sort2Edges(edgelist):
'''Sort2Edges(edgelist) simple function to reorder the start and end pts of two edges
based on their selection order. Returns the list, the start point,
and their common point, => edgelist, vertex, vertex'''
if len(edgelist)>=2:
vlist = []
e0 = edgelist[0]
e1=edgelist[1]
a0 = e0.Vertexes[0]
a1 = e0.Vertexes[1]
b0 = e1.Vertexes[0]
b1 = e1.Vertexes[1]
# comparison routine to order two edges:
if equals(a1,b0):
vlist.append((a0.Point.x,a0.Point.y))
vlist.append((a1.Point.x,a1.Point.y))
vlist.append((b1.Point.x,b1.Point.y))
if equals(a0,b0):
vlist.append((a1.Point.x,a1.Point.y))
vlist.append((a0.Point.x,a0.Point.y))
vlist.append((b1.Point.x,b1.Point.y))
if equals(a0,b1):
vlist.append((a1.Point.x,a1.Point.y))
vlist.append((a0.Point.x,a0.Point.y))
vlist.append((b0.Point.x,b0.Point.y))
if equals(a1,b1):
vlist.append((a0.Point.x,a0.Point.y))
vlist.append((a1.Point.x,a1.Point.y))
vlist.append((b0.Point.x,b0.Point.y))
edgestart = Vector(vlist[0][0],vlist[0][1],e0.Vertexes[1].Z)
edgecommon = Vector(vlist[1][0],vlist[1][1],e0.Vertexes[1].Z)
return vlist,edgestart,edgecommon
def segments(poly):
''' A sequence of (x,y) numeric coordinates pairs '''
return zip(poly, poly[1:] + [poly[0]])
def check_clockwise(poly):
'''
check_clockwise(poly) a function for returning a boolean if the selected wire is clockwise or counter clockwise
based on point order. poly = [(x1,y1),(x2,y2),(x3,y3)]
'''
clockwise = False
if (sum(x0*y1 - x1*y0 for ((x0, y0), (x1, y1)) in segments(poly))) < 0:
clockwise = not clockwise
return clockwise
def multiSelect():
'''
multiSelect() A function for selecting elements of an object for CNC path operations.
Select just a face, an edge,or two edges to indicate direction, a vertex on the object, a point not on the object,
or some combination. Returns a dictionary.
'''
sel = FreeCADGui.Selection.getSelectionEx()
numobjs = len([selobj.Object for selobj in sel])
if numobjs == 0:
FreeCAD.Console.PrintError('Please select some objects and try again.\n')
return
goodselect = False
for s in sel:
for i in s.SubObjects:
if i.ShapeType == 'Face':
goodselect = True
if i.ShapeType == 'Edge':
goodselect = True
if i.ShapeType == 'Vertex':
goodselect = True
if not goodselect:
FreeCAD.Console.PrintError('Please select a face and/or edges along with points (optional) and try again.\n')
return
selItems = {}
selItems['objname']=None #the parent object name - a 3D solid
selItems['pointlist']=None #start and end points
selItems['pointnames']=None #names of points for document object
selItems['facenames']=None # the selected face name
selItems['facelist']=None #list of faces selected
selItems['edgelist']=None #some edges that could be selected along with points and faces
selItems['edgenames']=None
selItems['pathwire']=None #the whole wire around edges of the face
selItems['clockwise']=None
selItems['circles']=None
facenames = []
edgelist =[]
edgenames=[]
ptlist=[]
ptnames=[]
circlelist=[]
face = False
edges = False
points = False
wireobj = False
circles = False
facelist= []
for s in sel:
if s.Object.Shape.ShapeType in ['Solid','Compound','Wire','Vertex']:
if not (s.Object.Shape.ShapeType =='Vertex'):
objname = s.ObjectName
selItems['objname'] =objname
if s.Object.Shape.ShapeType == 'Wire':
wireobj = True
if s.Object.Shape.ShapeType == 'Vertex':
ptnames.append(s.ObjectName)
# ptlist.append(s.Object)
points = True
for sub in s.SubObjects:
if sub.ShapeType =='Face':
facelist.append(sub)
face = True
if sub.ShapeType =='Edge':
edge = sub
edgelist.append(edge)
edges = True
if isinstance(sub.Curve,Part.Circle):
circlelist.append(edge)
circles = True
if sub.ShapeType =='Vertex':
ptlist.append(sub)
points = True
for sub in s.SubElementNames:
if 'Face' in sub:
facename = sub
facenames.append(facename)
if 'Edge' in sub:
edgenames.append(sub)
# now indicate which wire is going to be processed, based on which edges are selected
if facelist:
selItems['facelist']=facelist
if edges:
if face:
selItems['edgelist'] =edgelist
for fw in facelist[0].Wires:
for e in fw.Edges:
if e.isSame(edge):
pathwire = fw
selItems['pathwire'] =pathwire
elif wireobj:
selItems['pathwire'] =s.Object.Shape
selItems['edgelist'] =edgelist
else:
for w in s.Object.Shape.Wires:
for e in w.Edges:
if e.BoundBox.ZMax == e.BoundBox.ZMin: #if they are on same plane in Z as sel edge
if e.isSame(edge):
pathwire = w
selItems['pathwire'] =pathwire
selItems['edgelist'] =edgelist
if not edges:
if face:
selItems['pathwire'] =facelist[0].OuterWire
if edges and (len(edgelist)>=2):
vlist,edgestart,edgecommon=Sort2Edges(edgelist)
edgepts ={}
edgepts['vlist'] = vlist
edgepts['edgestart']=edgestart # start point of edges selected
edgepts['edgecommon']=edgecommon # point where two edges join- will be last point in in first gcode line
selItems['edgepts']=edgepts
if check_clockwise(vlist):
selItems['clockwise']=True
elif check_clockwise(vlist) == False:
selItems['clockwise']=False
if points:
selItems['pointlist'] = ptlist
selItems['pointnames'] = ptnames
if edges:
selItems['edgenames']=edgenames
if face:
selItems['facenames'] = facenames
if circles:
selItems['circles'] = circlelist
return selItems
| lgpl-2.1 |
Huskerboy/startbootstrap-freelancer | freelancer_env/Lib/site-packages/pip/utils/__init__.py | 323 | 27187 | from __future__ import absolute_import
from collections import deque
import contextlib
import errno
import io
import locale
# we have a submodule named 'logging' which would shadow this if we used the
# regular name:
import logging as std_logging
import re
import os
import posixpath
import shutil
import stat
import subprocess
import sys
import tarfile
import zipfile
from pip.exceptions import InstallationError
from pip.compat import console_to_str, expanduser, stdlib_pkgs
from pip.locations import (
site_packages, user_site, running_under_virtualenv, virtualenv_no_global,
write_delete_marker_file,
)
from pip._vendor import pkg_resources
from pip._vendor.six.moves import input
from pip._vendor.six import PY2
from pip._vendor.retrying import retry
if PY2:
from io import BytesIO as StringIO
else:
from io import StringIO
__all__ = ['rmtree', 'display_path', 'backup_dir',
'ask', 'splitext',
'format_size', 'is_installable_dir',
'is_svn_page', 'file_contents',
'split_leading_dir', 'has_leading_dir',
'normalize_path',
'renames', 'get_terminal_size', 'get_prog',
'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
'captured_stdout', 'ensure_dir',
'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS',
'get_installed_version']
logger = std_logging.getLogger(__name__)
BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
ZIP_EXTENSIONS = ('.zip', '.whl')
TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
ARCHIVE_EXTENSIONS = (
ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
try:
import bz2 # noqa
SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
except ImportError:
logger.debug('bz2 module is not available')
try:
# Only for Python 3.3+
import lzma # noqa
SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
except ImportError:
logger.debug('lzma module is not available')
def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs):
try:
return __import__(pkg_or_module_string)
except ImportError:
raise ExceptionType(*args, **kwargs)
def ensure_dir(path):
"""os.path.makedirs without EEXIST."""
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def get_prog():
try:
if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'):
return "%s -m pip" % sys.executable
except (AttributeError, TypeError, IndexError):
pass
return 'pip'
# Retry every half second for up to 3 seconds
@retry(stop_max_delay=3000, wait_fixed=500)
def rmtree(dir, ignore_errors=False):
shutil.rmtree(dir, ignore_errors=ignore_errors,
onerror=rmtree_errorhandler)
def rmtree_errorhandler(func, path, exc_info):
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
remove them, an exception is thrown. We catch that here, remove the
read-only attribute, and hopefully continue without problems."""
# if file type currently read only
if os.stat(path).st_mode & stat.S_IREAD:
# convert to read/write
os.chmod(path, stat.S_IWRITE)
# use the original function to repeat the operation
func(path)
return
else:
raise
def display_path(path):
"""Gives the display value for a given path, making it relative to cwd
if possible."""
path = os.path.normcase(os.path.abspath(path))
if sys.version_info[0] == 2:
path = path.decode(sys.getfilesystemencoding(), 'replace')
path = path.encode(sys.getdefaultencoding(), 'replace')
if path.startswith(os.getcwd() + os.path.sep):
path = '.' + path[len(os.getcwd()):]
return path
def backup_dir(dir, ext='.bak'):
"""Figure out the name of a directory to back up the given dir to
(adding .bak, .bak2, etc)"""
n = 1
extension = ext
while os.path.exists(dir + extension):
n += 1
extension = ext + str(n)
return dir + extension
def ask_path_exists(message, options):
for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
if action in options:
return action
return ask(message, options)
def ask(message, options):
"""Ask the message interactively, with the given possible responses"""
while 1:
if os.environ.get('PIP_NO_INPUT'):
raise Exception(
'No input was expected ($PIP_NO_INPUT set); question: %s' %
message
)
response = input(message)
response = response.strip().lower()
if response not in options:
print(
'Your response (%r) was not one of the expected responses: '
'%s' % (response, ', '.join(options))
)
else:
return response
def format_size(bytes):
if bytes > 1000 * 1000:
return '%.1fMB' % (bytes / 1000.0 / 1000)
elif bytes > 10 * 1000:
return '%ikB' % (bytes / 1000)
elif bytes > 1000:
return '%.1fkB' % (bytes / 1000.0)
else:
return '%ibytes' % bytes
def is_installable_dir(path):
"""Return True if `path` is a directory containing a setup.py file."""
if not os.path.isdir(path):
return False
setup_py = os.path.join(path, 'setup.py')
if os.path.isfile(setup_py):
return True
return False
def is_svn_page(html):
"""
Returns true if the page appears to be the index page of an svn repository
"""
return (re.search(r'<title>[^<]*Revision \d+:', html) and
re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
def file_contents(filename):
with open(filename, 'rb') as fp:
return fp.read().decode('utf-8')
def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
"""Yield pieces of data from a file-like object until EOF."""
while True:
chunk = file.read(size)
if not chunk:
break
yield chunk
def split_leading_dir(path):
path = path.lstrip('/').lstrip('\\')
if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
'\\' not in path):
return path.split('/', 1)
elif '\\' in path:
return path.split('\\', 1)
else:
return path, ''
def has_leading_dir(paths):
"""Returns true if all the paths have the same leading path name
(i.e., everything is in one subdirectory in an archive)"""
common_prefix = None
for path in paths:
prefix, rest = split_leading_dir(path)
if not prefix:
return False
elif common_prefix is None:
common_prefix = prefix
elif prefix != common_prefix:
return False
return True
def normalize_path(path, resolve_symlinks=True):
"""
Convert a path to its canonical, case-normalized, absolute version.
"""
path = expanduser(path)
if resolve_symlinks:
path = os.path.realpath(path)
else:
path = os.path.abspath(path)
return os.path.normcase(path)
def splitext(path):
"""Like os.path.splitext, but take off .tar too"""
base, ext = posixpath.splitext(path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def renames(old, new):
"""Like os.renames(), but handles renaming across devices."""
# Implementation borrowed from os.renames().
head, tail = os.path.split(new)
if head and tail and not os.path.exists(head):
os.makedirs(head)
shutil.move(old, new)
head, tail = os.path.split(old)
if head and tail:
try:
os.removedirs(head)
except OSError:
pass
def is_local(path):
"""
Return True if path is within sys.prefix, if we're running in a virtualenv.
If we're not in a virtualenv, all paths are considered "local."
"""
if not running_under_virtualenv():
return True
return normalize_path(path).startswith(normalize_path(sys.prefix))
def dist_is_local(dist):
"""
Return True if given Distribution object is installed locally
(i.e. within current virtualenv).
Always True if we're not in a virtualenv.
"""
return is_local(dist_location(dist))
def dist_in_usersite(dist):
"""
Return True if given Distribution is installed in user site.
"""
norm_path = normalize_path(dist_location(dist))
return norm_path.startswith(normalize_path(user_site))
def dist_in_site_packages(dist):
"""
Return True if given Distribution is installed in
distutils.sysconfig.get_python_lib().
"""
return normalize_path(
dist_location(dist)
).startswith(normalize_path(site_packages))
def dist_is_editable(dist):
"""Is distribution an editable install?"""
for path_item in sys.path:
egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
if os.path.isfile(egg_link):
return True
return False
def get_installed_distributions(local_only=True,
skip=stdlib_pkgs,
include_editables=True,
editables_only=False,
user_only=False):
"""
Return a list of installed Distribution objects.
If ``local_only`` is True (default), only return installations
local to the current virtualenv, if in a virtualenv.
``skip`` argument is an iterable of lower-case project names to
ignore; defaults to stdlib_pkgs
If ``editables`` is False, don't report editables.
If ``editables_only`` is True , only report editables.
If ``user_only`` is True , only report installations in the user
site directory.
"""
if local_only:
local_test = dist_is_local
else:
def local_test(d):
return True
if include_editables:
def editable_test(d):
return True
else:
def editable_test(d):
return not dist_is_editable(d)
if editables_only:
def editables_only_test(d):
return dist_is_editable(d)
else:
def editables_only_test(d):
return True
if user_only:
user_test = dist_in_usersite
else:
def user_test(d):
return True
return [d for d in pkg_resources.working_set
if local_test(d) and
d.key not in skip and
editable_test(d) and
editables_only_test(d) and
user_test(d)
]
def egg_link_path(dist):
"""
Return the path for the .egg-link file if it exists, otherwise, None.
There's 3 scenarios:
1) not in a virtualenv
try to find in site.USER_SITE, then site_packages
2) in a no-global virtualenv
try to find in site_packages
3) in a yes-global virtualenv
try to find in site_packages, then site.USER_SITE
(don't look in global location)
For #1 and #3, there could be odd cases, where there's an egg-link in 2
locations.
This method will just return the first one found.
"""
sites = []
if running_under_virtualenv():
if virtualenv_no_global():
sites.append(site_packages)
else:
sites.append(site_packages)
if user_site:
sites.append(user_site)
else:
if user_site:
sites.append(user_site)
sites.append(site_packages)
for site in sites:
egglink = os.path.join(site, dist.project_name) + '.egg-link'
if os.path.isfile(egglink):
return egglink
def dist_location(dist):
"""
Get the site-packages location of this distribution. Generally
this is dist.location, except in the case of develop-installed
packages, where dist.location is the source code location, and we
want to know where the egg-link file is.
"""
egg_link = egg_link_path(dist)
if egg_link:
return egg_link
return dist.location
def get_terminal_size():
"""Returns a tuple (x, y) representing the width(x) and the height(x)
in characters of the terminal window."""
def ioctl_GWINSZ(fd):
try:
import fcntl
import termios
import struct
cr = struct.unpack(
'hh',
fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')
)
except:
return None
if cr == (0, 0):
return None
return cr
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr:
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
return int(cr[1]), int(cr[0])
def current_umask():
"""Get the current umask which involves having to set it temporarily."""
mask = os.umask(0)
os.umask(mask)
return mask
def unzip_file(filename, location, flatten=True):
"""
Unzip the file (with path `filename`) to the destination `location`. All
files are written based on system defaults and umask (i.e. permissions are
not preserved), except that regular file members with any execute
permissions (user, group, or world) have "chmod +x" applied after being
written. Note that for windows, any execute changes using os.chmod are
no-ops per the python docs.
"""
ensure_dir(location)
zipfp = open(filename, 'rb')
try:
zip = zipfile.ZipFile(zipfp, allowZip64=True)
leading = has_leading_dir(zip.namelist()) and flatten
for info in zip.infolist():
name = info.filename
data = zip.read(name)
fn = name
if leading:
fn = split_leading_dir(name)[1]
fn = os.path.join(location, fn)
dir = os.path.dirname(fn)
if fn.endswith('/') or fn.endswith('\\'):
# A directory
ensure_dir(fn)
else:
ensure_dir(dir)
fp = open(fn, 'wb')
try:
fp.write(data)
finally:
fp.close()
mode = info.external_attr >> 16
# if mode and regular file and any execute permissions for
# user/group/world?
if mode and stat.S_ISREG(mode) and mode & 0o111:
# make dest file have execute for user/group/world
# (chmod +x) no-op on windows per python docs
os.chmod(fn, (0o777 - current_umask() | 0o111))
finally:
zipfp.close()
def untar_file(filename, location):
"""
Untar the file (with path `filename`) to the destination `location`.
All files are written based on system defaults and umask (i.e. permissions
are not preserved), except that regular file members with any execute
permissions (user, group, or world) have "chmod +x" applied after being
written. Note that for windows, any execute changes using os.chmod are
no-ops per the python docs.
"""
ensure_dir(location)
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
mode = 'r:gz'
elif filename.lower().endswith(BZ2_EXTENSIONS):
mode = 'r:bz2'
elif filename.lower().endswith(XZ_EXTENSIONS):
mode = 'r:xz'
elif filename.lower().endswith('.tar'):
mode = 'r'
else:
logger.warning(
'Cannot determine compression type for file %s', filename,
)
mode = 'r:*'
tar = tarfile.open(filename, mode)
try:
# note: python<=2.5 doesn't seem to know about pax headers, filter them
leading = has_leading_dir([
member.name for member in tar.getmembers()
if member.name != 'pax_global_header'
])
for member in tar.getmembers():
fn = member.name
if fn == 'pax_global_header':
continue
if leading:
fn = split_leading_dir(fn)[1]
path = os.path.join(location, fn)
if member.isdir():
ensure_dir(path)
elif member.issym():
try:
tar._extract_member(member, path)
except Exception as exc:
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warning(
'In the tar file %s the member %s is invalid: %s',
filename, member.name, exc,
)
continue
else:
try:
fp = tar.extractfile(member)
except (KeyError, AttributeError) as exc:
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warning(
'In the tar file %s the member %s is invalid: %s',
filename, member.name, exc,
)
continue
ensure_dir(os.path.dirname(path))
with open(path, 'wb') as destfp:
shutil.copyfileobj(fp, destfp)
fp.close()
# Update the timestamp (useful for cython compiled files)
tar.utime(member, path)
# member have any execute permissions for user/group/world?
if member.mode & 0o111:
# make dest file have execute for user/group/world
# no-op on windows per python docs
os.chmod(path, (0o777 - current_umask() | 0o111))
finally:
tar.close()
def unpack_file(filename, location, content_type, link):
filename = os.path.realpath(filename)
if (content_type == 'application/zip' or
filename.lower().endswith(ZIP_EXTENSIONS) or
zipfile.is_zipfile(filename)):
unzip_file(
filename,
location,
flatten=not filename.endswith('.whl')
)
elif (content_type == 'application/x-gzip' or
tarfile.is_tarfile(filename) or
filename.lower().endswith(
TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):
untar_file(filename, location)
elif (content_type and content_type.startswith('text/html') and
is_svn_page(file_contents(filename))):
# We don't really care about this
from pip.vcs.subversion import Subversion
Subversion('svn+' + link.url).unpack(location)
else:
# FIXME: handle?
# FIXME: magic signatures?
logger.critical(
'Cannot unpack file %s (downloaded from %s, content-type: %s); '
'cannot detect archive format',
filename, location, content_type,
)
raise InstallationError(
'Cannot determine archive format of %s' % location
)
def call_subprocess(cmd, show_stdout=True, cwd=None,
on_returncode='raise',
command_desc=None,
extra_environ=None, spinner=None):
# This function's handling of subprocess output is confusing and I
# previously broke it terribly, so as penance I will write a long comment
# explaining things.
#
# The obvious thing that affects output is the show_stdout=
# kwarg. show_stdout=True means, let the subprocess write directly to our
# stdout. Even though it is nominally the default, it is almost never used
# inside pip (and should not be used in new code without a very good
# reason); as of 2016-02-22 it is only used in a few places inside the VCS
# wrapper code. Ideally we should get rid of it entirely, because it
# creates a lot of complexity here for a rarely used feature.
#
# Most places in pip set show_stdout=False. What this means is:
# - We connect the child stdout to a pipe, which we read.
# - By default, we hide the output but show a spinner -- unless the
# subprocess exits with an error, in which case we show the output.
# - If the --verbose option was passed (= loglevel is DEBUG), then we show
# the output unconditionally. (But in this case we don't want to show
# the output a second time if it turns out that there was an error.)
#
# stderr is always merged with stdout (even if show_stdout=True).
if show_stdout:
stdout = None
else:
stdout = subprocess.PIPE
if command_desc is None:
cmd_parts = []
for part in cmd:
if ' ' in part or '\n' in part or '"' in part or "'" in part:
part = '"%s"' % part.replace('"', '\\"')
cmd_parts.append(part)
command_desc = ' '.join(cmd_parts)
logger.debug("Running command %s", command_desc)
env = os.environ.copy()
if extra_environ:
env.update(extra_environ)
try:
proc = subprocess.Popen(
cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
cwd=cwd, env=env)
except Exception as exc:
logger.critical(
"Error %s while executing command %s", exc, command_desc,
)
raise
if stdout is not None:
all_output = []
while True:
line = console_to_str(proc.stdout.readline())
if not line:
break
line = line.rstrip()
all_output.append(line + '\n')
if logger.getEffectiveLevel() <= std_logging.DEBUG:
# Show the line immediately
logger.debug(line)
else:
# Update the spinner
if spinner is not None:
spinner.spin()
proc.wait()
if spinner is not None:
if proc.returncode:
spinner.finish("error")
else:
spinner.finish("done")
if proc.returncode:
if on_returncode == 'raise':
if (logger.getEffectiveLevel() > std_logging.DEBUG and
not show_stdout):
logger.info(
'Complete output from command %s:', command_desc,
)
logger.info(
''.join(all_output) +
'\n----------------------------------------'
)
raise InstallationError(
'Command "%s" failed with error code %s in %s'
% (command_desc, proc.returncode, cwd))
elif on_returncode == 'warn':
logger.warning(
'Command "%s" had error code %s in %s',
command_desc, proc.returncode, cwd,
)
elif on_returncode == 'ignore':
pass
else:
raise ValueError('Invalid value: on_returncode=%s' %
repr(on_returncode))
if not show_stdout:
return ''.join(all_output)
def read_text_file(filename):
"""Return the contents of *filename*.
Try to decode the file contents with utf-8, the preferred system encoding
(e.g., cp1252 on some Windows machines), and latin1, in that order.
Decoding a byte string with latin1 will never raise an error. In the worst
case, the returned string will contain some garbage characters.
"""
with open(filename, 'rb') as fp:
data = fp.read()
encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
for enc in encodings:
try:
data = data.decode(enc)
except UnicodeDecodeError:
continue
break
assert type(data) != bytes # Latin1 should have worked.
return data
def _make_build_dir(build_dir):
os.makedirs(build_dir)
write_delete_marker_file(build_dir)
class FakeFile(object):
"""Wrap a list of lines in an object with readline() to make
ConfigParser happy."""
def __init__(self, lines):
self._gen = (l for l in lines)
def readline(self):
try:
try:
return next(self._gen)
except NameError:
return self._gen.next()
except StopIteration:
return ''
def __iter__(self):
return self._gen
class StreamWrapper(StringIO):
@classmethod
def from_stream(cls, orig_stream):
cls.orig_stream = orig_stream
return cls()
# compileall.compile_dir() needs stdout.encoding to print to stdout
@property
def encoding(self):
return self.orig_stream.encoding
@contextlib.contextmanager
def captured_output(stream_name):
"""Return a context manager used by captured_stdout/stdin/stderr
that temporarily replaces the sys stream *stream_name* with a StringIO.
Taken from Lib/support/__init__.py in the CPython repo.
"""
orig_stdout = getattr(sys, stream_name)
setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
try:
yield getattr(sys, stream_name)
finally:
setattr(sys, stream_name, orig_stdout)
def captured_stdout():
"""Capture the output of sys.stdout:
with captured_stdout() as stdout:
print('hello')
self.assertEqual(stdout.getvalue(), 'hello\n')
Taken from Lib/support/__init__.py in the CPython repo.
"""
return captured_output('stdout')
class cached_property(object):
"""A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
"""
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None:
# We're being accessed from the class itself, not from an object
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
def get_installed_version(dist_name, lookup_dirs=None):
"""Get the installed version of dist_name avoiding pkg_resources cache"""
# Create a requirement that we'll look for inside of setuptools.
req = pkg_resources.Requirement.parse(dist_name)
# We want to avoid having this cached, so we need to construct a new
# working set each time.
if lookup_dirs is None:
working_set = pkg_resources.WorkingSet()
else:
working_set = pkg_resources.WorkingSet(lookup_dirs)
# Get the installed distribution from our working set
dist = working_set.find(req)
# Check to see if we got an installed distribution or not, if we did
# we want to return it's version.
return dist.version if dist else None
def consume(iterator):
"""Consume an iterable at C speed."""
deque(iterator, maxlen=0)
| mit |
djhenderson/ctypesgen | test/testsuite.py | 12 | 9617 | #!/usr/bin/env python
# -*- coding: ascii -*-
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
#
"""Simple test suite using unittest.
By clach04 (Chris Clark).
Calling:
python test/testsuite.py
or
cd test
./testsuite.py
Could use any unitest compatible test runner (nose, etc.)
Aims to test for regressions. Where possible use stdlib to
avoid the need to compile C code.
Known to run clean with:
* 32bit Linux (python 2.5.2, 2.6)
* 32bit Windows XP (python 2.4, 2.5, 2.6.1)
"""
import sys
import os
import ctypes
import math
import unittest
import logging
test_directory = os.path.abspath(os.path.dirname(__file__))
sys.path.append(test_directory)
sys.path.append(os.path.join(test_directory, '..'))
import ctypesgentest # TODO consider moving test() from ctypesgentest into this module
class StdlibTest(unittest.TestCase):
def setUp(self):
"""NOTE this is called once for each test* method
(it is not called once per class).
FIXME This is slightly inefficient as it is called *way* more times than it needs to be.
"""
header_str = '#include <stdlib.h>\n'
if sys.platform == "win32":
# pick something from %windir%\system32\msvc*dll that include stdlib
libraries = ["msvcrt.dll"]
libraries = ["msvcrt"]
elif sys.platform.startswith("linux"):
libraries = ["libc.so.6"]
else:
libraries = ["libc"]
self.module, output = ctypesgentest.test(header_str, libraries=libraries, all_headers=True)
def tearDown(self):
del self.module
ctypesgentest.cleanup()
def test_getenv_returns_string(self):
"""Issue 8 - Regression for crash with 64 bit and bad strings on 32 bit.
See http://code.google.com/p/ctypesgen/issues/detail?id=8
Test that we get a valid (non-NULL, non-empty) string back
"""
module = self.module
if sys.platform == "win32":
# Check a variable that is already set
env_var_name = 'USERNAME' # this is always set (as is windir, ProgramFiles, USERPROFILE, etc.)
expect_result = os.environ[env_var_name]
self.assert_(expect_result, 'this should not be None or empty')
# reason for using an existing OS variable is that unless the
# MSVCRT dll imported is the exact same one that Python was
# built with you can't share structures, see
# http://msdn.microsoft.com/en-us/library/ms235460.aspx
# "Potential Errors Passing CRT Objects Across DLL Boundaries"
else:
env_var_name = 'HELLO'
os.environ[env_var_name] = 'WORLD' # This doesn't work under win32
expect_result = 'WORLD'
result = module.getenv(env_var_name)
self.failUnlessEqual(expect_result, result)
def test_getenv_returns_null(self):
"""Related to issue 8. Test getenv of unset variable.
"""
module = self.module
env_var_name = 'NOT SET'
expect_result = None
try:
# ensure variable is not set, ignoring not set errors
del os.environ[env_var_name]
except KeyError:
pass
result = module.getenv(env_var_name)
self.failUnlessEqual(expect_result, result)
class StdBoolTest(unittest.TestCase):
"Test correct parsing and generation of bool type"
def setUp(self):
"""NOTE this is called once for each test* method
(it is not called once per class).
FIXME This is slightly inefficient as it is called *way* more times than it needs to be.
"""
header_str = '''
#include <stdbool.h>
struct foo
{
bool is_bar;
int a;
};
'''
self.module, _ = ctypesgentest.test(header_str)#, all_headers=True)
def tearDown(self):
del self.module
ctypesgentest.cleanup()
def test_stdbool_type(self):
"""Test is bool is correctly parsed"""
module = self.module
struct_foo = module.struct_foo
self.failUnlessEqual(struct_foo._fields_, [("is_bar", ctypes.c_bool), ("a", ctypes.c_int)])
class SimpleMacrosTest(unittest.TestCase):
"""Based on simple_macros.py
"""
def setUp(self):
"""NOTE this is called once for each test* method
(it is not called once per class).
FIXME This is slightly inefficient as it is called *way* more times than it needs to be.
"""
header_str = '''
#define A 1
#define B(x,y) x+y
#define C(a,b,c) a?b:c
#define funny(x) "funny" #x
#define multipler_macro(x,y) x*y
#define minus_macro(x,y) x-y
#define divide_macro(x,y) x/y
#define mod_macro(x,y) x%y
'''
libraries = None
self.module, output = ctypesgentest.test(header_str)
def tearDown(self):
del self.module
ctypesgentest.cleanup()
def test_macro_constant_int(self):
"""Tests from simple_macros.py
"""
module = self.module
self.failUnlessEqual(module.A, 1)
def test_macro_addition(self):
"""Tests from simple_macros.py
"""
module = self.module
self.failUnlessEqual(module.B(2, 2), 4)
def test_macro_ternary_true(self):
"""Tests from simple_macros.py
"""
module = self.module
self.failUnlessEqual(module.C(True, 1, 2), 1)
def test_macro_ternary_false(self):
"""Tests from simple_macros.py
"""
module = self.module
self.failUnlessEqual(module.C(False, 1, 2), 2)
def test_macro_ternary_true_complex(self):
"""Test ?: with true, using values that can not be confused between True and 1
"""
module = self.module
self.failUnlessEqual(module.C(True, 99, 100), 99)
def test_macro_ternary_false_complex(self):
"""Test ?: with false, using values that can not be confused between True and 1
"""
module = self.module
self.failUnlessEqual(module.C(False, 99, 100), 100)
def test_macro_string_compose(self):
"""Tests from simple_macros.py
"""
module = self.module
self.failUnlessEqual(module.funny("bunny"), "funnybunny")
def test_macro_math_multipler(self):
module = self.module
x, y = 2, 5
self.failUnlessEqual(module.multipler_macro(x, y), x * y)
def test_macro_math_minus(self):
module = self.module
x, y = 2, 5
self.failUnlessEqual(module.minus_macro(x, y), x - y)
def test_macro_math_divide(self):
module = self.module
x, y = 2, 5
self.failUnlessEqual(module.divide_macro(x, y), x / y)
def test_macro_math_mod(self):
module = self.module
x, y = 2, 5
self.failUnlessEqual(module.mod_macro(x, y), x % y)
class StructuresTest(unittest.TestCase):
"""Based on structures.py
"""
def setUp(self):
"""NOTE this is called once for each test* method
(it is not called once per class).
FIXME This is slightly inefficient as it is called *way* more times than it needs to be.
"""
header_str = '''
struct foo
{
int a;
int b;
int c;
};
'''
libraries = None
self.module, output = ctypesgentest.test(header_str)
def tearDown(self):
del self.module
ctypesgentest.cleanup()
def test_structures(self):
"""Tests from structures.py
"""
module = self.module
struct_foo = module.struct_foo
self.failUnlessEqual(struct_foo._fields_, [("a", ctypes.c_int), ("b", ctypes.c_int), ("c", ctypes.c_int)])
class MathTest(unittest.TestCase):
"""Based on math_functions.py"""
def setUp(self):
"""NOTE this is called once for each test* method
(it is not called once per class).
FIXME This is slightly inefficient as it is called *way* more times than it needs to be.
"""
header_str = '#include <math.h>\n'
if sys.platform == "win32":
# pick something from %windir%\system32\msvc*dll that include stdlib
libraries = ["msvcrt.dll"]
libraries = ["msvcrt"]
elif sys.platform.startswith("linux"):
libraries = ["libm.so.6"]
else:
libraries = ["libc"]
self.module, output = ctypesgentest.test(header_str, libraries=libraries, all_headers=True)
def tearDown(self):
del self.module
ctypesgentest.cleanup()
def test_sin(self):
"""Based on math_functions.py"""
module = self.module
self.failUnlessEqual(module.sin(2), math.sin(2))
def test_sqrt(self):
"""Based on math_functions.py"""
module = self.module
self.failUnlessEqual(module.sqrt(4), 2)
def local_test():
module.sin("foobar")
self.failUnlessRaises(ctypes.ArgumentError, local_test)
def test_bad_args_string_not_number(self):
"""Based on math_functions.py"""
module = self.module
def local_test():
module.sin("foobar")
self.failUnlessRaises(ctypes.ArgumentError, local_test)
def main(argv=None):
if argv is None:
argv = sys.argv
ctypesgentest.ctypesgencore.messages.log.setLevel(logging.CRITICAL) # do not log anything
unittest.main()
return 0
if __name__ == "__main__":
sys.exit(main())
| bsd-3-clause |
lnielsen/zenodo | zenodo/factory.py | 2 | 4550 | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo application factories."""
from __future__ import absolute_import
import os
import sys
from invenio_base.app import create_app_factory
from invenio_base.wsgi import create_wsgi_factory, wsgi_proxyfix
from invenio_config import create_conf_loader
from invenio_files_rest.app import Flask
from statsd import StatsClient
from werkzeug.contrib.fixers import HeaderRewriterFix
from wsgi_statsd import StatsdTimingMiddleware
from zenodo.modules.cache.bccache import RedisBytecodeCache
from . import config
env_prefix = 'APP'
invenio_conf_loader = create_conf_loader(config=config, env_prefix=env_prefix)
instance_path = os.getenv(env_prefix + '_INSTANCE_PATH') or \
os.path.join(sys.prefix, 'var', 'instance')
"""Path to instance folder.
Defaults to ``<virtualenv>/var/instance/``. Can be overwritten using the
environment variable ``APP_INSTANCE_PATH``.
"""
static_folder = os.getenv(env_prefix + '_STATIC_FOLDER') or \
os.path.join(instance_path, 'static')
"""Path to static folder.
Defaults to ``<virtualenv>/var/instance/static/``. Can be overwritten
using the environment variable ``APP_STATIC_FOLDER``
"""
def conf_loader(app, **kwargs_config):
"""Zenodo conf loader."""
app.url_map.strict_slashes = False # Legacy support
app.jinja_options = dict(
app.jinja_options,
cache_size=1000,
bytecode_cache=RedisBytecodeCache(app)
)
invenio_conf_loader(app, **kwargs_config)
def create_wsgi_statsd_factory(mounts_factories):
"""Create WSGI statsd factory."""
wsgi_factory = create_wsgi_factory(mounts_factories)
def create_wsgi(app, **kwargs):
application = wsgi_factory(app, **kwargs)
# Remove X-Forwarded-For headers because Flask-Security doesn't know
# how to deal with them properly. Note REMOTE_ADDR has already been
# set correctly at this point by the ``wsgi_proxyfix`` factory.
if app.config.get('WSGI_PROXIES'):
application = HeaderRewriterFix(
application,
remove_headers=['X-Forwarded-For']
)
host = app.config.get('STATSD_HOST')
port = app.config.get('STATSD_PORT', 8125)
prefix = app.config.get('STATSD_PREFIX')
if host and port and prefix:
client = StatsClient(prefix=prefix, host=host, port=port)
return StatsdTimingMiddleware(application, client)
return application
return create_wsgi
create_celery = create_app_factory(
'zenodo',
config_loader=conf_loader,
extension_entry_points=['invenio_base.apps'],
blueprint_entry_points=['invenio_base.blueprints'],
converter_entry_points=['invenio_base.converters'],
instance_path=instance_path,
static_folder=static_folder,
)
"""Create CLI/Celery application."""
create_api = create_app_factory(
'zenodo',
config_loader=conf_loader,
extension_entry_points=['invenio_base.api_apps'],
blueprint_entry_points=['invenio_base.api_blueprints'],
converter_entry_points=['invenio_base.api_converters'],
instance_path=instance_path,
app_class=Flask,
)
"""Create Flask API application."""
create_app = create_app_factory(
'zenodo',
config_loader=conf_loader,
extension_entry_points=['invenio_base.apps'],
blueprint_entry_points=['invenio_base.blueprints'],
converter_entry_points=['invenio_base.converters'],
wsgi_factory=wsgi_proxyfix(
create_wsgi_statsd_factory({'/api': create_api})),
instance_path=instance_path,
static_folder=static_folder,
)
"""Create Flask UI application."""
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.