code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
###########################################################
#
# Copyright (c) 2014, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['ScrollbarWdg', 'TestScrollbarWdg']
from tactic.ui.common import BaseRefreshWdg
from pyasm.web import DivWdg
class TestScrollbarWdg(BaseRefreshWdg):
def get_display(my):
top = my.top
top.add_style("width: 600px")
top.add_style("height: 400px")
return top
class ScrollbarWdg(BaseRefreshWdg):
def get_display(my):
top = my.top
top.add_class("spt_scrollbar_top")
content = my.kwargs.get("content")
content_class = my.kwargs.get("content_class")
if not content_class:
content_class = "spt_content"
width = 8
top.add_style("width: %s" % width)
top.add_style("position: absolute")
top.add_style("top: 0px")
top.add_style("right: 0px")
top.add_color("background", "background")
top.add_style("margin: 3px 5px")
top.add_style("opacity: 0.0")
top.add_behavior( {
'type': 'load',
'cbjs_action': my.get_onload_js()
} )
top.add_behavior( {
'type': 'load',
'content_class': content_class,
'cbjs_action': '''
var parent = bvr.src_el.getParent("." + bvr.content_class);
var size = parent.getSize();
bvr.src_el.setStyle("height", size.y);
var scrollbar = parent.getElement(".spt_scrollbar_top");
parent.addEvent("mouseenter", function() {
new Fx.Tween(scrollbar, {duration: 250}).start("opacity", 1.0);
} );
parent.addEvent("mouseleave", function() {
new Fx.Tween(scrollbar, {duration: 250}).start("opacity", 0.0);
} );
parent.addEvent("keypress", function(evt) {
new Fx.Tween(scrollbar, {duration: 250}).start("opacity", 0.0);
console.log(evt);
} );
parent.addEvent("mousewheel", function(evt) {
evt.stopPropagation();
spt.scrollbar.content = parent;
if (evt.wheel == 1) {
spt.scrollbar.scroll(15)
}
else {
spt.scrollbar.scroll(-15)
}
} );
'''
} )
bar = DivWdg()
bar.add_class("spt_scrollbar")
bar.add_class("hand")
top.add(bar)
bar.add_style("width: %s" % width)
bar.add_style("height: 30px")
bar.add_style("border: solid 1px black")
bar.add_color("background", "background3")
#bar.add_border()
bar.add_style("border-radius: 5")
bar.add_style("position: absolute")
bar.add_style("top: 0px")
top.add_behavior( {
'type': 'smart_drag',
'bvr_match_class': 'spt_scrollbar',
'ignore_default_motion' : True,
"cbjs_setup": 'spt.scrollbar.drag_setup( evt, bvr, mouse_411 );',
"cbjs_motion": 'spt.scrollbar.drag_motion( evt, bvr, mouse_411 );'
} )
return top
def get_onload_js(my):
return r'''
spt.scrollbar = {};
spt.scrollbar.mouse_start_y = null;
spt.scrollbar.el_start_y = null;
spt.scrollbar.top = null;
spt.scrollbar.content = null;
spt.scrollbar.drag_setup = function(evt, bvr, mouse_411) {
spt.scrollbar.mouse_start_y = mouse_411.curr_y;
var src_el = spt.behavior.get_bvr_src( bvr );
var pos_y = parseInt(src_el.getStyle("top").replace("px", ""));
spt.scrollbar.el_start_y = pos_y;
spt.scrollbar.content = $("spt_SCROLL");
spt.scrollbar.top = src_el.getParent(".spt_scrollbar_top")
}
spt.scrollbar.drag_motion = function(evt, bvr, mouse_411) {
var src_el = spt.behavior.get_bvr_src( bvr );
var dy = mouse_411.curr_y - spt.scrollbar.mouse_start_y;
var pos_y = spt.scrollbar.el_start_y + dy;
if (pos_y < 0) {
return;
}
var content = spt.scrollbar.content;
var content_size = spt.scrollbar.content.getSize();
var top_size = spt.scrollbar.top.getSize();
var bar_size = src_el.getSize();
if (pos_y > top_size.y - bar_size.y - 5) {
return;
}
bvr.src_el.setStyle("top", pos_y);
//var content = bvr.src_el.getParent(".spt_content");
content.setStyle("margin-top", -dy);
}
spt.scrollbar.scroll = function(dy) {
spt.scrollbar.content = $("spt_SCROLL");
var content = spt.scrollbar.content;
var pos_y = parseInt(content.getStyle("margin-top").replace("px", ""));
content.setStyle("margin-top", pos_y + dy);
}
'''
| CeltonMcGrath/TACTIC | src/tactic/ui/widget/scrollbar_wdg.py | Python | epl-1.0 | 4,809 |
#!/usr/bin/env python
from runtest import TestBase
class TestCase(TestBase):
def __init__(self):
TestBase.__init__(self, 'abc', """
# DURATION TID FUNCTION
62.202 us [28141] | __cxa_atexit();
[28141] | main() {
[28141] | a() {
[28141] | b() {
[28141] | c() {
0.753 us [28141] | getpid();
1.430 us [28141] | } /* c */
1.915 us [28141] | } /* b */
2.405 us [28141] | } /* a */
3.005 us [28141] | } /* main */
""")
| namhyung/uftrace | tests/t001_basic.py | Python | gpl-2.0 | 530 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2002 Ben Escoto <ben@emerose.org>
# Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import helper
import sys, os, unittest
from duplicity import tempdir
helper.setup()
class TempDirTest(unittest.TestCase):
def test_all(self):
td = tempdir.default()
self.assert_(td.mktemp() != td.mktemp())
dir = td.mktemp()
os.mkdir(dir)
os.rmdir(dir)
fd, fname = td.mkstemp()
os.close(fd)
os.unlink(fname)
td.forget(fname)
fo, fname = td.mkstemp_file()
fo.close() # don't forget, leave to cleanup()
td.cleanup()
if __name__ == "__main__":
unittest.main()
| AZed/duplicity | testing/tests/tempdirtest.py | Python | gpl-2.0 | 1,458 |
# Authors:
# Petr Vobornik <pvoborni@redhat.com>
#
# Copyright (C) 2013 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Range tests
"""
import ipatests.test_webui.test_trust as trust_mod
from ipatests.test_webui.ui_driver import screenshot
from ipatests.test_webui.task_range import range_tasks
ENTITY = 'idrange'
PKEY = 'itest-range'
class test_range(range_tasks):
@screenshot
def test_crud(self):
"""
Basic CRUD: range
"""
self.init_app()
self.get_shifts()
self.basic_crud(ENTITY, self.get_data(PKEY))
@screenshot
def test_types(self):
"""
Test range types
Only 'local' and 'ipa-ad-trust' types are tested since range validation
made quite hard to test the other types:
- 'ipa-ad-trust-posix' can be tested only with subdomains.
- 'ipa-ad-winsync' and 'ipa-ipa-trust' and are not supported yet
https://fedorahosted.org/freeipa/ticket/4323
"""
self.init_app()
self.get_shifts()
pkey_local = 'itest-local'
pkey_ad = 'itest-ad'
pkey_posix = 'itest-ad-posix'
pkey_winsync = 'itest-ad-winsync'
pkey_trust = 'itest-ipa-trust'
column = 'iparangetype'
add = self.get_add_data(pkey_local)
data = self.get_data(pkey_local, add_data=add)
self.add_record(ENTITY, data)
self.assert_record_value('local domain range', pkey_local, column)
if self.has_trusts():
trust_tasks = trust_mod.trust_tasks()
trust_data = trust_tasks.get_data()
self.add_record(trust_mod.ENTITY, trust_data)
domain = self.get_domain()
self.navigate_to_entity(ENTITY)
add = self.get_add_data(pkey_ad, range_type='ipa-ad-trust', domain=domain)
data = self.get_data(pkey_ad, add_data=add)
self.add_record(ENTITY, data, navigate=False)
self.assert_record_value('Active Directory domain range', pkey_ad, column)
self.delete(trust_mod.ENTITY, [trust_data])
self.navigate_to_entity(ENTITY)
self.delete_record(pkey_ad)
self.delete_record(pkey_local)
| pspacek/freeipa | ipatests/test_webui/test_range.py | Python | gpl-3.0 | 2,865 |
import os.path
import time
from django.core.management.base import BaseCommand
from django.conf import settings
import mitxmako.middleware as middleware
from django.core.mail import send_mass_mail
import sys
import datetime
middleware.MakoMiddleware()
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in xrange(0, len(l), n):
yield l[i:i + n]
class Command(BaseCommand):
help = \
'''Sends an e-mail to all users in a text file.
E.g.
manage.py userlist.txt message logfile.txt rate
userlist.txt -- list of all users
message -- prefix for template with message
logfile.txt -- where to log progress
rate -- messages per second
'''
log_file = None
def hard_log(self, text):
self.log_file.write(datetime.datetime.utcnow().isoformat() + ' -- ' + text + '\n')
def handle(self, *args, **options):
(user_file, message_base, logfilename, ratestr) = args
users = [u.strip() for u in open(user_file).readlines()]
message = middleware.lookup['main'].get_template('emails/' + message_base + "_body.txt").render()
subject = middleware.lookup['main'].get_template('emails/' + message_base + "_subject.txt").render().strip()
rate = int(ratestr)
self.log_file = open(logfilename, "a+", buffering=0)
i = 0
for users in chunks(users, rate):
emails = [(subject, message, settings.DEFAULT_FROM_EMAIL, [u]) for u in users]
self.hard_log(" ".join(users))
send_mass_mail(emails, fail_silently=False)
time.sleep(1)
print datetime.datetime.utcnow().isoformat(), i
i = i + len(users)
# Emergency interruptor
if os.path.exists("/tmp/stopemails.txt"):
self.log_file.close()
sys.exit(-1)
self.log_file.close()
| kalebhartje/schoolboost | common/djangoapps/student/management/commands/massemailtxt.py | Python | agpl-3.0 | 1,862 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2014 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import itertools
import logging
from functools import partial
from itertools import repeat
from lxml import etree
from lxml.builder import E
import openerp
from openerp import SUPERUSER_ID, models
from openerp import tools
import openerp.exceptions
from openerp.osv import fields, osv, expression
from openerp.tools.translate import _
from openerp.http import request
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# Basic res.groups and res.users
#----------------------------------------------------------
class res_groups(osv.osv):
_name = "res.groups"
_description = "Access Groups"
_rec_name = 'full_name'
_order = 'name'
def _get_full_name(self, cr, uid, ids, field, arg, context=None):
res = {}
for g in self.browse(cr, uid, ids, context):
if g.category_id:
res[g.id] = '%s / %s' % (g.category_id.name, g.name)
else:
res[g.id] = g.name
return res
def _search_group(self, cr, uid, obj, name, args, context=None):
operand = args[0][2]
operator = args[0][1]
lst = True
if isinstance(operand, bool):
domains = [[('name', operator, operand)], [('category_id.name', operator, operand)]]
if operator in expression.NEGATIVE_TERM_OPERATORS == (not operand):
return expression.AND(domains)
else:
return expression.OR(domains)
if isinstance(operand, basestring):
lst = False
operand = [operand]
where = []
for group in operand:
values = filter(bool, group.split('/'))
group_name = values.pop().strip()
category_name = values and '/'.join(values).strip() or group_name
group_domain = [('name', operator, lst and [group_name] or group_name)]
category_domain = [('category_id.name', operator, lst and [category_name] or category_name)]
if operator in expression.NEGATIVE_TERM_OPERATORS and not values:
category_domain = expression.OR([category_domain, [('category_id', '=', False)]])
if (operator in expression.NEGATIVE_TERM_OPERATORS) == (not values):
sub_where = expression.AND([group_domain, category_domain])
else:
sub_where = expression.OR([group_domain, category_domain])
if operator in expression.NEGATIVE_TERM_OPERATORS:
where = expression.AND([where, sub_where])
else:
where = expression.OR([where, sub_where])
return where
_columns = {
'name': fields.char('Name', required=True, translate=True),
'users': fields.many2many('res.users', 'res_groups_users_rel', 'gid', 'uid', 'Users'),
'model_access': fields.one2many('ir.model.access', 'group_id', 'Access Controls'),
'rule_groups': fields.many2many('ir.rule', 'rule_group_rel',
'group_id', 'rule_group_id', 'Rules', domain=[('global', '=', False)]),
'menu_access': fields.many2many('ir.ui.menu', 'ir_ui_menu_group_rel', 'gid', 'menu_id', 'Access Menu'),
'view_access': fields.many2many('ir.ui.view', 'ir_ui_view_group_rel', 'group_id', 'view_id', 'Views'),
'comment' : fields.text('Comment', size=250, translate=True),
'category_id': fields.many2one('ir.module.category', 'Application', select=True),
'full_name': fields.function(_get_full_name, type='char', string='Group Name', fnct_search=_search_group),
}
_sql_constraints = [
('name_uniq', 'unique (category_id, name)', 'The name of the group must be unique within an application!')
]
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
# add explicit ordering if search is sorted on full_name
if order and order.startswith('full_name'):
ids = super(res_groups, self).search(cr, uid, args, context=context)
gs = self.browse(cr, uid, ids, context)
gs.sort(key=lambda g: g.full_name, reverse=order.endswith('DESC'))
gs = gs[offset:offset+limit] if limit else gs[offset:]
return map(int, gs)
return super(res_groups, self).search(cr, uid, args, offset, limit, order, context, count)
def copy(self, cr, uid, id, default=None, context=None):
group_name = self.read(cr, uid, [id], ['name'])[0]['name']
default.update({'name': _('%s (copy)')%group_name})
return super(res_groups, self).copy(cr, uid, id, default, context)
def write(self, cr, uid, ids, vals, context=None):
if 'name' in vals:
if vals['name'].startswith('-'):
raise osv.except_osv(_('Error'),
_('The name of the group can not start with "-"'))
res = super(res_groups, self).write(cr, uid, ids, vals, context=context)
self.pool['ir.model.access'].call_cache_clearing_methods(cr)
self.pool['res.users'].has_group.clear_cache(self.pool['res.users'])
return res
class res_users(osv.osv):
""" User class. A res.users record models an OpenERP user and is different
from an employee.
res.users class now inherits from res.partner. The partner model is
used to store the data related to the partner: lang, name, address,
avatar, ... The user model is now dedicated to technical data.
"""
__admin_ids = {}
_uid_cache = {}
_inherits = {
'res.partner': 'partner_id',
}
_name = "res.users"
_description = 'Users'
def _set_new_password(self, cr, uid, id, name, value, args, context=None):
if value is False:
# Do not update the password if no value is provided, ignore silently.
# For example web client submits False values for all empty fields.
return
if uid == id:
# To change their own password users must use the client-specific change password wizard,
# so that the new password is immediately used for further RPC requests, otherwise the user
# will face unexpected 'Access Denied' exceptions.
raise osv.except_osv(_('Operation Canceled'), _('Please use the change password wizard (in User Preferences or User menu) to change your own password.'))
self.write(cr, uid, id, {'password': value})
def _get_password(self, cr, uid, ids, arg, karg, context=None):
return dict.fromkeys(ids, '')
_columns = {
'id': fields.integer('ID'),
'login_date': fields.date('Latest connection', select=1, copy=False),
'partner_id': fields.many2one('res.partner', required=True,
string='Related Partner', ondelete='restrict',
help='Partner-related data of the user', auto_join=True),
'login': fields.char('Login', size=64, required=True,
help="Used to log into the system"),
'password': fields.char('Password', size=64, invisible=True, copy=False,
help="Keep empty if you don't want the user to be able to connect on the system."),
'new_password': fields.function(_get_password, type='char', size=64,
fnct_inv=_set_new_password, string='Set Password',
help="Specify a value only when creating a user or if you're "\
"changing the user's password, otherwise leave empty. After "\
"a change of password, the user has to login again."),
'signature': fields.html('Signature'),
'active': fields.boolean('Active'),
'action_id': fields.many2one('ir.actions.actions', 'Home Action', help="If specified, this action will be opened at log on for this user, in addition to the standard menu."),
'groups_id': fields.many2many('res.groups', 'res_groups_users_rel', 'uid', 'gid', 'Groups'),
# Special behavior for this field: res.company.search() will only return the companies
# available to the current user (should be the user's companies?), when the user_preference
# context is set.
'company_id': fields.many2one('res.company', 'Company', required=True,
help='The company this user is currently working for.', context={'user_preference': True}),
'company_ids':fields.many2many('res.company','res_company_users_rel','user_id','cid','Companies'),
}
# overridden inherited fields to bypass access rights, in case you have
# access to the user but not its corresponding partner
name = openerp.fields.Char(related='partner_id.name', inherited=True)
email = openerp.fields.Char(related='partner_id.email', inherited=True)
def on_change_login(self, cr, uid, ids, login, context=None):
if login and tools.single_email_re.match(login):
return {'value': {'email': login}}
return {}
def onchange_state(self, cr, uid, ids, state_id, context=None):
partner_ids = [user.partner_id.id for user in self.browse(cr, uid, ids, context=context)]
return self.pool.get('res.partner').onchange_state(cr, uid, partner_ids, state_id, context=context)
def onchange_type(self, cr, uid, ids, is_company, context=None):
""" Wrapper on the user.partner onchange_type, because some calls to the
partner form view applied to the user may trigger the
partner.onchange_type method, but applied to the user object.
"""
partner_ids = [user.partner_id.id for user in self.browse(cr, uid, ids, context=context)]
return self.pool['res.partner'].onchange_type(cr, uid, partner_ids, is_company, context=context)
def onchange_address(self, cr, uid, ids, use_parent_address, parent_id, context=None):
""" Wrapper on the user.partner onchange_address, because some calls to the
partner form view applied to the user may trigger the
partner.onchange_type method, but applied to the user object.
"""
partner_ids = [user.partner_id.id for user in self.browse(cr, uid, ids, context=context)]
return self.pool['res.partner'].onchange_address(cr, uid, partner_ids, use_parent_address, parent_id, context=context)
def _check_company(self, cr, uid, ids, context=None):
return all(((this.company_id in this.company_ids) or not this.company_ids) for this in self.browse(cr, uid, ids, context))
_constraints = [
(_check_company, 'The chosen company is not in the allowed companies for this user', ['company_id', 'company_ids']),
]
_sql_constraints = [
('login_key', 'UNIQUE (login)', 'You can not have two users with the same login !')
]
def _get_company(self,cr, uid, context=None, uid2=False):
if not uid2:
uid2 = uid
# Use read() to compute default company, and pass load=_classic_write to
# avoid useless name_get() calls. This will avoid prefetching fields
# while computing default values for new db columns, as the
# db backend may not be fully initialized yet.
user_data = self.pool['res.users'].read(cr, uid, uid2, ['company_id'],
context=context, load='_classic_write')
comp_id = user_data['company_id']
return comp_id or False
def _get_companies(self, cr, uid, context=None):
c = self._get_company(cr, uid, context)
if c:
return [c]
return False
def _get_group(self,cr, uid, context=None):
dataobj = self.pool.get('ir.model.data')
result = []
try:
dummy,group_id = dataobj.get_object_reference(cr, SUPERUSER_ID, 'base', 'group_user')
result.append(group_id)
dummy,group_id = dataobj.get_object_reference(cr, SUPERUSER_ID, 'base', 'group_partner_manager')
result.append(group_id)
except ValueError:
# If these groups does not exists anymore
pass
return result
def _get_default_image(self, cr, uid, context=None):
return self.pool['res.partner']._get_default_image(cr, uid, False, colorize=True, context=context)
_defaults = {
'password': '',
'active': True,
'customer': False,
'company_id': _get_company,
'company_ids': _get_companies,
'groups_id': _get_group,
'image': _get_default_image,
}
# User can write on a few of his own fields (but not his groups for example)
SELF_WRITEABLE_FIELDS = ['password', 'signature', 'action_id', 'company_id', 'email', 'name', 'image', 'image_medium', 'image_small', 'lang', 'tz']
# User can read a few of his own fields
SELF_READABLE_FIELDS = ['signature', 'company_id', 'login', 'email', 'name', 'image', 'image_medium', 'image_small', 'lang', 'tz', 'tz_offset', 'groups_id', 'partner_id', '__last_update']
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
def override_password(o):
if 'password' in o and ('id' not in o or o['id'] != uid):
o['password'] = '********'
return o
if fields and (ids == [uid] or ids == uid):
for key in fields:
if not (key in self.SELF_READABLE_FIELDS or key.startswith('context_')):
break
else:
# safe fields only, so we read as super-user to bypass access rights
uid = SUPERUSER_ID
result = super(res_users, self).read(cr, uid, ids, fields=fields, context=context, load=load)
canwrite = self.pool['ir.model.access'].check(cr, uid, 'res.users', 'write', False)
if not canwrite:
if isinstance(ids, (int, long)):
result = override_password(result)
else:
result = map(override_password, result)
return result
def create(self, cr, uid, vals, context=None):
user_id = super(res_users, self).create(cr, uid, vals, context=context)
user = self.browse(cr, uid, user_id, context=context)
if user.partner_id.company_id:
user.partner_id.write({'company_id': user.company_id.id})
return user_id
def write(self, cr, uid, ids, values, context=None):
if not hasattr(ids, '__iter__'):
ids = [ids]
if ids == [uid]:
for key in values.keys():
if not (key in self.SELF_WRITEABLE_FIELDS or key.startswith('context_')):
break
else:
if 'company_id' in values:
user = self.browse(cr, SUPERUSER_ID, uid, context=context)
if not (values['company_id'] in user.company_ids.ids):
del values['company_id']
uid = 1 # safe fields only, so we write as super-user to bypass access rights
res = super(res_users, self).write(cr, uid, ids, values, context=context)
if 'company_id' in values:
for user in self.browse(cr, uid, ids, context=context):
# if partner is global we keep it that way
if user.partner_id.company_id and user.partner_id.company_id.id != values['company_id']:
user.partner_id.write({'company_id': user.company_id.id})
# clear caches linked to the users
self.pool['ir.model.access'].call_cache_clearing_methods(cr)
clear = partial(self.pool['ir.rule'].clear_cache, cr)
map(clear, ids)
db = cr.dbname
if db in self._uid_cache:
for id in ids:
if id in self._uid_cache[db]:
del self._uid_cache[db][id]
self.context_get.clear_cache(self)
self.has_group.clear_cache(self)
return res
def unlink(self, cr, uid, ids, context=None):
if 1 in ids:
raise osv.except_osv(_('Can not remove root user!'), _('You can not remove the admin user as it is used internally for resources created by Odoo (updates, module installation, ...)'))
db = cr.dbname
if db in self._uid_cache:
for id in ids:
if id in self._uid_cache[db]:
del self._uid_cache[db][id]
return super(res_users, self).unlink(cr, uid, ids, context=context)
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
if not args:
args=[]
if not context:
context={}
ids = []
if name and operator in ['=', 'ilike']:
ids = self.search(cr, user, [('login','=',name)]+ args, limit=limit, context=context)
if not ids:
ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit, context=context)
return self.name_get(cr, user, ids, context=context)
def copy(self, cr, uid, id, default=None, context=None):
user2copy = self.read(cr, uid, [id], ['login','name'])[0]
default = dict(default or {})
if ('name' not in default) and ('partner_id' not in default):
default['name'] = _("%s (copy)") % user2copy['name']
if 'login' not in default:
default['login'] = _("%s (copy)") % user2copy['login']
return super(res_users, self).copy(cr, uid, id, default, context)
@tools.ormcache(skiparg=2)
def context_get(self, cr, uid, context=None):
user = self.browse(cr, SUPERUSER_ID, uid, context)
result = {}
for k in self._fields:
if k.startswith('context_'):
context_key = k[8:]
elif k in ['lang', 'tz']:
context_key = k
else:
context_key = False
if context_key:
res = getattr(user, k) or False
if isinstance(res, models.BaseModel):
res = res.id
result[context_key] = res or False
return result
def action_get(self, cr, uid, context=None):
dataobj = self.pool['ir.model.data']
data_id = dataobj._get_id(cr, SUPERUSER_ID, 'base', 'action_res_users_my')
return dataobj.browse(cr, uid, data_id, context=context).res_id
def check_super(self, passwd):
if passwd == tools.config['admin_passwd']:
return True
else:
raise openerp.exceptions.AccessDenied()
def check_credentials(self, cr, uid, password):
""" Override this method to plug additional authentication methods"""
res = self.search(cr, SUPERUSER_ID, [('id','=',uid),('password','=',password)])
if not res:
raise openerp.exceptions.AccessDenied()
def _login(self, db, login, password):
if not password:
return False
user_id = False
cr = self.pool.cursor()
try:
# autocommit: our single update request will be performed atomically.
# (In this way, there is no opportunity to have two transactions
# interleaving their cr.execute()..cr.commit() calls and have one
# of them rolled back due to a concurrent access.)
cr.autocommit(True)
# check if user exists
res = self.search(cr, SUPERUSER_ID, [('login','=',login)])
if res:
user_id = res[0]
# check credentials
self.check_credentials(cr, user_id, password)
# We effectively unconditionally write the res_users line.
# Even w/ autocommit there's a chance the user row will be locked,
# in which case we can't delay the login just for the purpose of
# update the last login date - hence we use FOR UPDATE NOWAIT to
# try to get the lock - fail-fast
# Failing to acquire the lock on the res_users row probably means
# another request is holding it. No big deal, we don't want to
# prevent/delay login in that case. It will also have been logged
# as a SQL error, if anyone cares.
try:
# NO KEY introduced in PostgreSQL 9.3 http://www.postgresql.org/docs/9.3/static/release-9-3.html#AEN115299
update_clause = 'NO KEY UPDATE' if cr._cnx.server_version >= 90300 else 'UPDATE'
cr.execute("SELECT id FROM res_users WHERE id=%%s FOR %s NOWAIT" % update_clause, (user_id,), log_exceptions=False)
cr.execute("UPDATE res_users SET login_date = now() AT TIME ZONE 'UTC' WHERE id=%s", (user_id,))
self.invalidate_cache(cr, user_id, ['login_date'], [user_id])
except Exception:
_logger.debug("Failed to update last_login for db:%s login:%s", db, login, exc_info=True)
except openerp.exceptions.AccessDenied:
_logger.info("Login failed for db:%s login:%s", db, login)
user_id = False
finally:
cr.close()
return user_id
def authenticate(self, db, login, password, user_agent_env):
"""Verifies and returns the user ID corresponding to the given
``login`` and ``password`` combination, or False if there was
no matching user.
:param str db: the database on which user is trying to authenticate
:param str login: username
:param str password: user password
:param dict user_agent_env: environment dictionary describing any
relevant environment attributes
"""
uid = self._login(db, login, password)
if uid == openerp.SUPERUSER_ID:
# Successfully logged in as admin!
# Attempt to guess the web base url...
if user_agent_env and user_agent_env.get('base_location'):
cr = self.pool.cursor()
try:
base = user_agent_env['base_location']
ICP = self.pool['ir.config_parameter']
if not ICP.get_param(cr, uid, 'web.base.url.freeze'):
ICP.set_param(cr, uid, 'web.base.url', base)
cr.commit()
except Exception:
_logger.exception("Failed to update web.base.url configuration parameter")
finally:
cr.close()
return uid
def check(self, db, uid, passwd):
"""Verifies that the given (uid, password) is authorized for the database ``db`` and
raise an exception if it is not."""
if not passwd:
# empty passwords disallowed for obvious security reasons
raise openerp.exceptions.AccessDenied()
if self._uid_cache.get(db, {}).get(uid) == passwd:
return
cr = self.pool.cursor()
try:
self.check_credentials(cr, uid, passwd)
if self._uid_cache.has_key(db):
self._uid_cache[db][uid] = passwd
else:
self._uid_cache[db] = {uid:passwd}
finally:
cr.close()
def change_password(self, cr, uid, old_passwd, new_passwd, context=None):
"""Change current user password. Old password must be provided explicitly
to prevent hijacking an existing user session, or for cases where the cleartext
password is not used to authenticate requests.
:return: True
:raise: openerp.exceptions.AccessDenied when old password is wrong
:raise: except_osv when new password is not set or empty
"""
self.check(cr.dbname, uid, old_passwd)
if new_passwd:
return self.write(cr, uid, uid, {'password': new_passwd})
raise osv.except_osv(_('Warning!'), _("Setting empty passwords is not allowed for security reasons!"))
def preference_save(self, cr, uid, ids, context=None):
return {
'type': 'ir.actions.client',
'tag': 'reload_context',
}
def preference_change_password(self, cr, uid, ids, context=None):
return {
'type': 'ir.actions.client',
'tag': 'change_password',
'target': 'new',
}
@tools.ormcache(skiparg=2)
def has_group(self, cr, uid, group_ext_id):
"""Checks whether user belongs to given group.
:param str group_ext_id: external ID (XML ID) of the group.
Must be provided in fully-qualified form (``module.ext_id``), as there
is no implicit module to use..
:return: True if the current user is a member of the group with the
given external ID (XML ID), else False.
"""
assert group_ext_id and '.' in group_ext_id, "External ID must be fully qualified"
module, ext_id = group_ext_id.split('.')
cr.execute("""SELECT 1 FROM res_groups_users_rel WHERE uid=%s AND gid IN
(SELECT res_id FROM ir_model_data WHERE module=%s AND name=%s)""",
(uid, module, ext_id))
return bool(cr.fetchone())
#----------------------------------------------------------
# Implied groups
#
# Extension of res.groups and res.users with a relation for "implied"
# or "inherited" groups. Once a user belongs to a group, it
# automatically belongs to the implied groups (transitively).
#----------------------------------------------------------
class cset(object):
""" A cset (constrained set) is a set of elements that may be constrained to
be a subset of other csets. Elements added to a cset are automatically
added to its supersets. Cycles in the subset constraints are supported.
"""
def __init__(self, xs):
self.supersets = set()
self.elements = set(xs)
def subsetof(self, other):
if other is not self:
self.supersets.add(other)
other.update(self.elements)
def update(self, xs):
xs = set(xs) - self.elements
if xs: # xs will eventually be empty in case of a cycle
self.elements.update(xs)
for s in self.supersets:
s.update(xs)
def __iter__(self):
return iter(self.elements)
concat = itertools.chain.from_iterable
class groups_implied(osv.osv):
_inherit = 'res.groups'
def _get_trans_implied(self, cr, uid, ids, field, arg, context=None):
"computes the transitive closure of relation implied_ids"
memo = {} # use a memo for performance and cycle avoidance
def computed_set(g):
if g not in memo:
memo[g] = cset(g.implied_ids)
for h in g.implied_ids:
computed_set(h).subsetof(memo[g])
return memo[g]
res = {}
for g in self.browse(cr, SUPERUSER_ID, ids, context):
res[g.id] = map(int, computed_set(g))
return res
_columns = {
'implied_ids': fields.many2many('res.groups', 'res_groups_implied_rel', 'gid', 'hid',
string='Inherits', help='Users of this group automatically inherit those groups'),
'trans_implied_ids': fields.function(_get_trans_implied,
type='many2many', relation='res.groups', string='Transitively inherits'),
}
def create(self, cr, uid, values, context=None):
users = values.pop('users', None)
gid = super(groups_implied, self).create(cr, uid, values, context)
if users:
# delegate addition of users to add implied groups
self.write(cr, uid, [gid], {'users': users}, context)
return gid
def write(self, cr, uid, ids, values, context=None):
res = super(groups_implied, self).write(cr, uid, ids, values, context)
if values.get('users') or values.get('implied_ids'):
# add all implied groups (to all users of each group)
for g in self.browse(cr, uid, ids, context=context):
gids = map(int, g.trans_implied_ids)
vals = {'users': [(4, u.id) for u in g.users]}
super(groups_implied, self).write(cr, uid, gids, vals, context)
return res
class users_implied(osv.osv):
_inherit = 'res.users'
def create(self, cr, uid, values, context=None):
groups = values.pop('groups_id', None)
user_id = super(users_implied, self).create(cr, uid, values, context)
if groups:
# delegate addition of groups to add implied groups
self.write(cr, uid, [user_id], {'groups_id': groups}, context)
self.pool['ir.ui.view'].clear_cache()
return user_id
def write(self, cr, uid, ids, values, context=None):
if not isinstance(ids,list):
ids = [ids]
res = super(users_implied, self).write(cr, uid, ids, values, context)
if values.get('groups_id'):
# add implied groups for all users
for user in self.browse(cr, uid, ids):
gs = set(concat(g.trans_implied_ids for g in user.groups_id))
vals = {'groups_id': [(4, g.id) for g in gs]}
super(users_implied, self).write(cr, uid, [user.id], vals, context)
self.pool['ir.ui.view'].clear_cache()
return res
#----------------------------------------------------------
# Vitrual checkbox and selection for res.user form view
#
# Extension of res.groups and res.users for the special groups view in the users
# form. This extension presents groups with selection and boolean widgets:
# - Groups are shown by application, with boolean and/or selection fields.
# Selection fields typically defines a role "Name" for the given application.
# - Uncategorized groups are presented as boolean fields and grouped in a
# section "Others".
#
# The user form view is modified by an inherited view (base.user_groups_view);
# the inherited view replaces the field 'groups_id' by a set of reified group
# fields (boolean or selection fields). The arch of that view is regenerated
# each time groups are changed.
#
# Naming conventions for reified groups fields:
# - boolean field 'in_group_ID' is True iff
# ID is in 'groups_id'
# - selection field 'sel_groups_ID1_..._IDk' is ID iff
# ID is in 'groups_id' and ID is maximal in the set {ID1, ..., IDk}
#----------------------------------------------------------
def name_boolean_group(id):
return 'in_group_' + str(id)
def name_selection_groups(ids):
return 'sel_groups_' + '_'.join(map(str, ids))
def is_boolean_group(name):
return name.startswith('in_group_')
def is_selection_groups(name):
return name.startswith('sel_groups_')
def is_reified_group(name):
return is_boolean_group(name) or is_selection_groups(name)
def get_boolean_group(name):
return int(name[9:])
def get_selection_groups(name):
return map(int, name[11:].split('_'))
def partition(f, xs):
"return a pair equivalent to (filter(f, xs), filter(lambda x: not f(x), xs))"
yes, nos = [], []
for x in xs:
(yes if f(x) else nos).append(x)
return yes, nos
def parse_m2m(commands):
"return a list of ids corresponding to a many2many value"
ids = []
for command in commands:
if isinstance(command, (tuple, list)):
if command[0] in (1, 4):
ids.append(command[2])
elif command[0] == 5:
ids = []
elif command[0] == 6:
ids = list(command[2])
else:
ids.append(command)
return ids
class groups_view(osv.osv):
_inherit = 'res.groups'
def create(self, cr, uid, values, context=None):
res = super(groups_view, self).create(cr, uid, values, context)
self.update_user_groups_view(cr, uid, context)
return res
def write(self, cr, uid, ids, values, context=None):
res = super(groups_view, self).write(cr, uid, ids, values, context)
self.update_user_groups_view(cr, uid, context)
return res
def unlink(self, cr, uid, ids, context=None):
res = super(groups_view, self).unlink(cr, uid, ids, context)
self.update_user_groups_view(cr, uid, context)
return res
def update_user_groups_view(self, cr, uid, context=None):
# the view with id 'base.user_groups_view' inherits the user form view,
# and introduces the reified group fields
# we have to try-catch this, because at first init the view does not exist
# but we are already creating some basic groups
view = self.pool['ir.model.data'].xmlid_to_object(cr, SUPERUSER_ID, 'base.user_groups_view', context=context)
if view and view.exists() and view._name == 'ir.ui.view':
xml1, xml2 = [], []
xml1.append(E.separator(string=_('Application'), colspan="4"))
for app, kind, gs in self.get_groups_by_application(cr, uid, context):
# hide groups in category 'Hidden' (except to group_no_one)
attrs = {'groups': 'base.group_no_one'} if app and app.xml_id == 'base.module_category_hidden' else {}
if kind == 'selection':
# application name with a selection field
field_name = name_selection_groups(map(int, gs))
xml1.append(E.field(name=field_name, **attrs))
xml1.append(E.newline())
else:
# application separator with boolean fields
app_name = app and app.name or _('Other')
xml2.append(E.separator(string=app_name, colspan="4", **attrs))
for g in gs:
field_name = name_boolean_group(g.id)
xml2.append(E.field(name=field_name, **attrs))
xml = E.field(*(xml1 + xml2), name="groups_id", position="replace")
xml.addprevious(etree.Comment("GENERATED AUTOMATICALLY BY GROUPS"))
xml_content = etree.tostring(xml, pretty_print=True, xml_declaration=True, encoding="utf-8")
view.write({'arch': xml_content})
return True
def get_application_groups(self, cr, uid, domain=None, context=None):
return self.search(cr, uid, domain or [])
def get_groups_by_application(self, cr, uid, context=None):
""" return all groups classified by application (module category), as a list of pairs:
[(app, kind, [group, ...]), ...],
where app and group are browse records, and kind is either 'boolean' or 'selection'.
Applications are given in sequence order. If kind is 'selection', the groups are
given in reverse implication order.
"""
def linearized(gs):
gs = set(gs)
# determine sequence order: a group should appear after its implied groups
order = dict.fromkeys(gs, 0)
for g in gs:
for h in gs.intersection(g.trans_implied_ids):
order[h] -= 1
# check whether order is total, i.e., sequence orders are distinct
if len(set(order.itervalues())) == len(gs):
return sorted(gs, key=lambda g: order[g])
return None
# classify all groups by application
gids = self.get_application_groups(cr, uid, context=context)
by_app, others = {}, []
for g in self.browse(cr, uid, gids, context):
if g.category_id:
by_app.setdefault(g.category_id, []).append(g)
else:
others.append(g)
# build the result
res = []
apps = sorted(by_app.iterkeys(), key=lambda a: a.sequence or 0)
for app in apps:
gs = linearized(by_app[app])
if gs:
res.append((app, 'selection', gs))
else:
res.append((app, 'boolean', by_app[app]))
if others:
res.append((False, 'boolean', others))
return res
class users_view(osv.osv):
_inherit = 'res.users'
def create(self, cr, uid, values, context=None):
values = self._remove_reified_groups(values)
return super(users_view, self).create(cr, uid, values, context)
def write(self, cr, uid, ids, values, context=None):
values = self._remove_reified_groups(values)
return super(users_view, self).write(cr, uid, ids, values, context)
def _remove_reified_groups(self, values):
""" return `values` without reified group fields """
add, rem = [], []
values1 = {}
for key, val in values.iteritems():
if is_boolean_group(key):
(add if val else rem).append(get_boolean_group(key))
elif is_selection_groups(key):
rem += get_selection_groups(key)
if val:
add.append(val)
else:
values1[key] = val
if 'groups_id' not in values and (add or rem):
# remove group ids in `rem` and add group ids in `add`
values1['groups_id'] = zip(repeat(3), rem) + zip(repeat(4), add)
return values1
def default_get(self, cr, uid, fields, context=None):
group_fields, fields = partition(is_reified_group, fields)
fields1 = (fields + ['groups_id']) if group_fields else fields
values = super(users_view, self).default_get(cr, uid, fields1, context)
self._add_reified_groups(group_fields, values)
# add "default_groups_ref" inside the context to set default value for group_id with xml values
if 'groups_id' in fields and isinstance(context.get("default_groups_ref"), list):
groups = []
ir_model_data = self.pool.get('ir.model.data')
for group_xml_id in context["default_groups_ref"]:
group_split = group_xml_id.split('.')
if len(group_split) != 2:
raise osv.except_osv(_('Invalid context value'), _('Invalid context default_groups_ref value (model.name_id) : "%s"') % group_xml_id)
try:
temp, group_id = ir_model_data.get_object_reference(cr, uid, group_split[0], group_split[1])
except ValueError:
group_id = False
groups += [group_id]
values['groups_id'] = groups
return values
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
# determine whether reified groups fields are required, and which ones
fields1 = fields or self.fields_get(cr, uid, context=context).keys()
group_fields, other_fields = partition(is_reified_group, fields1)
# read regular fields (other_fields); add 'groups_id' if necessary
drop_groups_id = False
if group_fields and fields:
if 'groups_id' not in other_fields:
other_fields.append('groups_id')
drop_groups_id = True
else:
other_fields = fields
res = super(users_view, self).read(cr, uid, ids, other_fields, context=context, load=load)
# post-process result to add reified group fields
if group_fields:
for values in (res if isinstance(res, list) else [res]):
self._add_reified_groups(group_fields, values)
if drop_groups_id:
values.pop('groups_id', None)
return res
def _add_reified_groups(self, fields, values):
""" add the given reified group fields into `values` """
gids = set(parse_m2m(values.get('groups_id') or []))
for f in fields:
if is_boolean_group(f):
values[f] = get_boolean_group(f) in gids
elif is_selection_groups(f):
selected = [gid for gid in get_selection_groups(f) if gid in gids]
values[f] = selected and selected[-1] or False
def fields_get(self, cr, uid, allfields=None, context=None, write_access=True):
res = super(users_view, self).fields_get(cr, uid, allfields, context, write_access)
# add reified groups fields
for app, kind, gs in self.pool['res.groups'].get_groups_by_application(cr, uid, context):
if kind == 'selection':
# selection group field
tips = ['%s: %s' % (g.name, g.comment) for g in gs if g.comment]
res[name_selection_groups(map(int, gs))] = {
'type': 'selection',
'string': app and app.name or _('Other'),
'selection': [(False, '')] + [(g.id, g.name) for g in gs],
'help': '\n'.join(tips),
'exportable': False,
'selectable': False,
}
else:
# boolean group fields
for g in gs:
res[name_boolean_group(g.id)] = {
'type': 'boolean',
'string': g.name,
'help': g.comment,
'exportable': False,
'selectable': False,
}
return res
#----------------------------------------------------------
# change password wizard
#----------------------------------------------------------
class change_password_wizard(osv.TransientModel):
"""
A wizard to manage the change of users' passwords
"""
_name = "change.password.wizard"
_description = "Change Password Wizard"
_columns = {
'user_ids': fields.one2many('change.password.user', 'wizard_id', string='Users'),
}
def _default_user_ids(self, cr, uid, context=None):
if context is None:
context = {}
user_model = self.pool['res.users']
user_ids = context.get('active_model') == 'res.users' and context.get('active_ids') or []
return [
(0, 0, {'user_id': user.id, 'user_login': user.login})
for user in user_model.browse(cr, uid, user_ids, context=context)
]
_defaults = {
'user_ids': _default_user_ids,
}
def change_password_button(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids, context=context)[0]
need_reload = any(uid == user.user_id.id for user in wizard.user_ids)
line_ids = [user.id for user in wizard.user_ids]
self.pool.get('change.password.user').change_password_button(cr, uid, line_ids, context=context)
if need_reload:
return {
'type': 'ir.actions.client',
'tag': 'reload'
}
return {'type': 'ir.actions.act_window_close'}
class change_password_user(osv.TransientModel):
"""
A model to configure users in the change password wizard
"""
_name = 'change.password.user'
_description = 'Change Password Wizard User'
_columns = {
'wizard_id': fields.many2one('change.password.wizard', string='Wizard', required=True),
'user_id': fields.many2one('res.users', string='User', required=True),
'user_login': fields.char('User Login', readonly=True),
'new_passwd': fields.char('New Password'),
}
_defaults = {
'new_passwd': '',
}
def change_password_button(self, cr, uid, ids, context=None):
for line in self.browse(cr, uid, ids, context=context):
line.user_id.write({'password': line.new_passwd})
# don't keep temporary passwords in the database longer than necessary
self.write(cr, uid, ids, {'new_passwd': False}, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| danielharbor/openerp | openerp/addons/base/res/res_users.py | Python | agpl-3.0 | 44,634 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('organizations', '0002_migrate_locations_to_facilities'),
('notifications', '0003_auto_20150912_2049'),
]
operations = [
migrations.AlterField(
model_name='notification',
name='location',
field=models.ForeignKey(verbose_name='facility', to='organizations.Facility'),
),
migrations.RenameField(
model_name='notification',
old_name='location',
new_name='facility',
),
migrations.AlterField(
model_name='notification',
name='facility',
field=models.ForeignKey(to='organizations.Facility'),
),
]
| alper/volunteer_planner | notifications/migrations/0004_auto_20151003_2033.py | Python | agpl-3.0 | 847 |
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
| indictranstech/erpnext | erpnext/stock/doctype/delivery_trip/test_delivery_trip.py | Python | agpl-3.0 | 2,568 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from cStringIO import StringIO
import sys
import tempfile
import unittest2 as unittest
import numpy
from nupic.encoders.base import defaultDtype
from nupic.data import SENTINEL_VALUE_FOR_MISSING_DATA
from nupic.data.fieldmeta import FieldMetaType
from nupic.support.unittesthelpers.algorithm_test_helpers import getSeed
from nupic.encoders.random_distributed_scalar import (
RandomDistributedScalarEncoder
)
try:
import capnp
except ImportError:
capnp = None
if capnp:
from nupic.encoders.random_distributed_scalar_capnp import (
RandomDistributedScalarEncoderProto
)
# Disable warnings about accessing protected members
# pylint: disable=W0212
def computeOverlap(x, y):
"""
Given two binary arrays, compute their overlap. The overlap is the number
of bits where x[i] and y[i] are both 1
"""
return (x & y).sum()
def validateEncoder(encoder, subsampling):
"""
Given an encoder, calculate overlaps statistics and ensure everything is ok.
We don't check every possible combination for speed reasons.
"""
for i in range(encoder.minIndex, encoder.maxIndex+1, 1):
for j in range(i+1, encoder.maxIndex+1, subsampling):
if not encoder._overlapOK(i, j):
return False
return True
class RandomDistributedScalarEncoderTest(unittest.TestCase):
"""
Unit tests for RandomDistributedScalarEncoder class.
"""
def testEncoding(self):
"""
Test basic encoding functionality. Create encodings without crashing and
check they contain the correct number of on and off bits. Check some
encodings for expected overlap. Test that encodings for old values don't
change once we generate new buckets.
"""
# Initialize with non-default parameters and encode with a number close to
# the offset
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
w=23, n=500, offset=0.0)
e0 = encoder.encode(-0.1)
self.assertEqual(e0.sum(), 23, "Number of on bits is incorrect")
self.assertEqual(e0.size, 500, "Width of the vector is incorrect")
self.assertEqual(encoder.getBucketIndices(0.0)[0], encoder._maxBuckets / 2,
"Offset doesn't correspond to middle bucket")
self.assertEqual(len(encoder.bucketMap), 1, "Number of buckets is not 1")
# Encode with a number that is resolution away from offset. Now we should
# have two buckets and this encoding should be one bit away from e0
e1 = encoder.encode(1.0)
self.assertEqual(len(encoder.bucketMap), 2, "Number of buckets is not 2")
self.assertEqual(e1.sum(), 23, "Number of on bits is incorrect")
self.assertEqual(e1.size, 500, "Width of the vector is incorrect")
self.assertEqual(computeOverlap(e0, e1), 22, "Overlap is not equal to w-1")
# Encode with a number that is resolution*w away from offset. Now we should
# have many buckets and this encoding should have very little overlap with
# e0
e25 = encoder.encode(25.0)
self.assertGreater(len(encoder.bucketMap), 23,
"Number of buckets is not 2")
self.assertEqual(e25.sum(), 23, "Number of on bits is incorrect")
self.assertEqual(e25.size, 500, "Width of the vector is incorrect")
self.assertLess(computeOverlap(e0, e25), 4, "Overlap is too high")
# Test encoding consistency. The encodings for previous numbers
# shouldn't change even though we have added additional buckets
self.assertTrue(numpy.array_equal(e0, encoder.encode(-0.1)),
"Encodings are not consistent - they have changed after new buckets "
"have been created")
self.assertTrue(numpy.array_equal(e1, encoder.encode(1.0)),
"Encodings are not consistent - they have changed after new buckets "
"have been created")
def testMissingValues(self):
"""
Test that missing values and NaN return all zero's.
"""
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0)
empty = encoder.encode(SENTINEL_VALUE_FOR_MISSING_DATA)
self.assertEqual(empty.sum(), 0)
empty = encoder.encode(float("nan"))
self.assertEqual(empty.sum(), 0)
def testResolution(self):
"""
Test that numbers within the same resolution return the same encoding.
Numbers outside the resolution should return different encodings.
"""
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0)
# Since 23.0 is the first encoded number, it will be the offset.
# Since resolution is 1, 22.9 and 23.4 should have the same bucket index and
# encoding.
e23 = encoder.encode(23.0)
e23p1 = encoder.encode(23.1)
e22p9 = encoder.encode(22.9)
e24 = encoder.encode(24.0)
self.assertEqual(e23.sum(), encoder.w)
self.assertEqual((e23 == e23p1).sum(), encoder.getWidth(),
"Numbers within resolution don't have the same encoding")
self.assertEqual((e23 == e22p9).sum(), encoder.getWidth(),
"Numbers within resolution don't have the same encoding")
self.assertNotEqual((e23 == e24).sum(), encoder.getWidth(),
"Numbers outside resolution have the same encoding")
e22p9 = encoder.encode(22.5)
self.assertNotEqual((e23 == e22p9).sum(), encoder.getWidth(),
"Numbers outside resolution have the same encoding")
def testMapBucketIndexToNonZeroBits(self):
"""
Test that mapBucketIndexToNonZeroBits works and that max buckets and
clipping are handled properly.
"""
encoder = RandomDistributedScalarEncoder(resolution=1.0, w=11, n=150)
# Set a low number of max buckets
encoder._initializeBucketMap(10, None)
encoder.encode(0.0)
encoder.encode(-7.0)
encoder.encode(7.0)
self.assertEqual(len(encoder.bucketMap), encoder._maxBuckets,
"_maxBuckets exceeded")
self.assertTrue(
numpy.array_equal(encoder.mapBucketIndexToNonZeroBits(-1),
encoder.bucketMap[0]),
"mapBucketIndexToNonZeroBits did not handle negative"
" index")
self.assertTrue(
numpy.array_equal(encoder.mapBucketIndexToNonZeroBits(1000),
encoder.bucketMap[9]),
"mapBucketIndexToNonZeroBits did not handle negative index")
e23 = encoder.encode(23.0)
e6 = encoder.encode(6)
self.assertEqual((e23 == e6).sum(), encoder.getWidth(),
"Values not clipped correctly during encoding")
ep8 = encoder.encode(-8)
ep7 = encoder.encode(-7)
self.assertEqual((ep8 == ep7).sum(), encoder.getWidth(),
"Values not clipped correctly during encoding")
self.assertEqual(encoder.getBucketIndices(-8)[0], 0,
"getBucketIndices returned negative bucket index")
self.assertEqual(encoder.getBucketIndices(23)[0], encoder._maxBuckets-1,
"getBucketIndices returned bucket index that is too"
" large")
def testParameterChecks(self):
"""
Test that some bad construction parameters get handled.
"""
# n must be >= 6*w
with self.assertRaises(ValueError):
RandomDistributedScalarEncoder(name="mv", resolution=1.0, n=int(5.9*21))
# n must be an int
with self.assertRaises(ValueError):
RandomDistributedScalarEncoder(name="mv", resolution=1.0, n=5.9*21)
# w can't be negative
with self.assertRaises(ValueError):
RandomDistributedScalarEncoder(name="mv", resolution=1.0, w=-1)
# resolution can't be negative
with self.assertRaises(ValueError):
RandomDistributedScalarEncoder(name="mv", resolution=-2)
def testOverlapStatistics(self):
"""
Check that the overlaps for the encodings are within the expected range.
Here we ask the encoder to create a bunch of representations under somewhat
stressful conditions, and then verify they are correct. We rely on the fact
that the _overlapOK and _countOverlapIndices methods are working correctly.
"""
seed = getSeed()
# Generate about 600 encodings. Set n relatively low to increase
# chance of false overlaps
encoder = RandomDistributedScalarEncoder(resolution=1.0, w=11, n=150,
seed=seed)
encoder.encode(0.0)
encoder.encode(-300.0)
encoder.encode(300.0)
self.assertTrue(validateEncoder(encoder, subsampling=3),
"Illegal overlap encountered in encoder")
def testGetMethods(self):
"""
Test that the getWidth, getDescription, and getDecoderOutputFieldTypes
methods work.
"""
encoder = RandomDistributedScalarEncoder(name="theName", resolution=1.0, n=500)
self.assertEqual(encoder.getWidth(), 500,
"getWidth doesn't return the correct result")
self.assertEqual(encoder.getDescription(), [("theName", 0)],
"getDescription doesn't return the correct result")
self.assertEqual(encoder.getDecoderOutputFieldTypes(),
(FieldMetaType.float, ),
"getDecoderOutputFieldTypes doesn't return the correct"
" result")
def testOffset(self):
"""
Test that offset is working properly
"""
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0)
encoder.encode(23.0)
self.assertEqual(encoder._offset, 23.0,
"Offset not specified and not initialized to first input")
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
offset=25.0)
encoder.encode(23.0)
self.assertEqual(encoder._offset, 25.0,
"Offset not initialized to specified constructor"
" parameter")
def testSeed(self):
"""
Test that initializing twice with the same seed returns identical encodings
and different when not specified
"""
encoder1 = RandomDistributedScalarEncoder(name="encoder1", resolution=1.0,
seed=42)
encoder2 = RandomDistributedScalarEncoder(name="encoder2", resolution=1.0,
seed=42)
encoder3 = RandomDistributedScalarEncoder(name="encoder3", resolution=1.0,
seed=-1)
encoder4 = RandomDistributedScalarEncoder(name="encoder4", resolution=1.0,
seed=-1)
e1 = encoder1.encode(23.0)
e2 = encoder2.encode(23.0)
e3 = encoder3.encode(23.0)
e4 = encoder4.encode(23.0)
self.assertEqual((e1 == e2).sum(), encoder1.getWidth(),
"Same seed gives rise to different encodings")
self.assertNotEqual((e1 == e3).sum(), encoder1.getWidth(),
"Different seeds gives rise to same encodings")
self.assertNotEqual((e3 == e4).sum(), encoder1.getWidth(),
"seeds of -1 give rise to same encodings")
def testCountOverlapIndices(self):
"""
Test that the internal method _countOverlapIndices works as expected.
"""
# Create a fake set of encodings.
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
w=5, n=5*20)
midIdx = encoder._maxBuckets/2
encoder.bucketMap[midIdx-2] = numpy.array(range(3, 8))
encoder.bucketMap[midIdx-1] = numpy.array(range(4, 9))
encoder.bucketMap[midIdx] = numpy.array(range(5, 10))
encoder.bucketMap[midIdx+1] = numpy.array(range(6, 11))
encoder.bucketMap[midIdx+2] = numpy.array(range(7, 12))
encoder.bucketMap[midIdx+3] = numpy.array(range(8, 13))
encoder.minIndex = midIdx - 2
encoder.maxIndex = midIdx + 3
# Indices must exist
with self.assertRaises(ValueError):
encoder._countOverlapIndices(midIdx-3, midIdx-2)
with self.assertRaises(ValueError):
encoder._countOverlapIndices(midIdx-2, midIdx-3)
# Test some overlaps
self.assertEqual(encoder._countOverlapIndices(midIdx-2, midIdx-2), 5,
"_countOverlapIndices didn't work")
self.assertEqual(encoder._countOverlapIndices(midIdx-1, midIdx-2), 4,
"_countOverlapIndices didn't work")
self.assertEqual(encoder._countOverlapIndices(midIdx+1, midIdx-2), 2,
"_countOverlapIndices didn't work")
self.assertEqual(encoder._countOverlapIndices(midIdx-2, midIdx+3), 0,
"_countOverlapIndices didn't work")
def testOverlapOK(self):
"""
Test that the internal method _overlapOK works as expected.
"""
# Create a fake set of encodings.
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
w=5, n=5*20)
midIdx = encoder._maxBuckets/2
encoder.bucketMap[midIdx-3] = numpy.array(range(4, 9)) # Not ok with
# midIdx-1
encoder.bucketMap[midIdx-2] = numpy.array(range(3, 8))
encoder.bucketMap[midIdx-1] = numpy.array(range(4, 9))
encoder.bucketMap[midIdx] = numpy.array(range(5, 10))
encoder.bucketMap[midIdx+1] = numpy.array(range(6, 11))
encoder.bucketMap[midIdx+2] = numpy.array(range(7, 12))
encoder.bucketMap[midIdx+3] = numpy.array(range(8, 13))
encoder.minIndex = midIdx - 3
encoder.maxIndex = midIdx + 3
self.assertTrue(encoder._overlapOK(midIdx, midIdx-1),
"_overlapOK didn't work")
self.assertTrue(encoder._overlapOK(midIdx-2, midIdx+3),
"_overlapOK didn't work")
self.assertFalse(encoder._overlapOK(midIdx-3, midIdx-1),
"_overlapOK didn't work")
# We'll just use our own numbers
self.assertTrue(encoder._overlapOK(100, 50, 0),
"_overlapOK didn't work for far values")
self.assertTrue(encoder._overlapOK(100, 50, encoder._maxOverlap),
"_overlapOK didn't work for far values")
self.assertFalse(encoder._overlapOK(100, 50, encoder._maxOverlap+1),
"_overlapOK didn't work for far values")
self.assertTrue(encoder._overlapOK(50, 50, 5),
"_overlapOK didn't work for near values")
self.assertTrue(encoder._overlapOK(48, 50, 3),
"_overlapOK didn't work for near values")
self.assertTrue(encoder._overlapOK(46, 50, 1),
"_overlapOK didn't work for near values")
self.assertTrue(encoder._overlapOK(45, 50, encoder._maxOverlap),
"_overlapOK didn't work for near values")
self.assertFalse(encoder._overlapOK(48, 50, 4),
"_overlapOK didn't work for near values")
self.assertFalse(encoder._overlapOK(48, 50, 2),
"_overlapOK didn't work for near values")
self.assertFalse(encoder._overlapOK(46, 50, 2),
"_overlapOK didn't work for near values")
self.assertFalse(encoder._overlapOK(50, 50, 6),
"_overlapOK didn't work for near values")
def testCountOverlap(self):
"""
Test that the internal method _countOverlap works as expected.
"""
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
n=500)
r1 = numpy.array([1, 2, 3, 4, 5, 6])
r2 = numpy.array([1, 2, 3, 4, 5, 6])
self.assertEqual(encoder._countOverlap(r1, r2), 6,
"_countOverlap result is incorrect")
r1 = numpy.array([1, 2, 3, 4, 5, 6])
r2 = numpy.array([1, 2, 3, 4, 5, 7])
self.assertEqual(encoder._countOverlap(r1, r2), 5,
"_countOverlap result is incorrect")
r1 = numpy.array([1, 2, 3, 4, 5, 6])
r2 = numpy.array([6, 5, 4, 3, 2, 1])
self.assertEqual(encoder._countOverlap(r1, r2), 6,
"_countOverlap result is incorrect")
r1 = numpy.array([1, 2, 8, 4, 5, 6])
r2 = numpy.array([1, 2, 3, 4, 9, 6])
self.assertEqual(encoder._countOverlap(r1, r2), 4,
"_countOverlap result is incorrect")
r1 = numpy.array([1, 2, 3, 4, 5, 6])
r2 = numpy.array([1, 2, 3])
self.assertEqual(encoder._countOverlap(r1, r2), 3,
"_countOverlap result is incorrect")
r1 = numpy.array([7, 8, 9, 10, 11, 12])
r2 = numpy.array([1, 2, 3, 4, 5, 6])
self.assertEqual(encoder._countOverlap(r1, r2), 0,
"_countOverlap result is incorrect")
def testVerbosity(self):
"""
Test that nothing is printed out when verbosity=0
"""
_stdout = sys.stdout
sys.stdout = _stringio = StringIO()
encoder = RandomDistributedScalarEncoder(name="mv", resolution=1.0,
verbosity=0)
output = numpy.zeros(encoder.getWidth(), dtype=defaultDtype)
encoder.encodeIntoArray(23.0, output)
encoder.getBucketIndices(23.0)
sys.stdout = _stdout
self.assertEqual(len(_stringio.getvalue()), 0,
"zero verbosity doesn't lead to zero output")
def testEncodeInvalidInputType(self):
encoder = RandomDistributedScalarEncoder(name="encoder", resolution=1.0,
verbosity=0)
with self.assertRaises(TypeError):
encoder.encode("String")
@unittest.skipUnless(
capnp, "pycapnp is not installed, skipping serialization test.")
def testWriteRead(self):
original = RandomDistributedScalarEncoder(
name="encoder", resolution=1.0, w=23, n=500, offset=0.0)
originalValue = original.encode(1)
proto1 = RandomDistributedScalarEncoderProto.new_message()
original.write(proto1)
# Write the proto to a temp file and read it back into a new proto
with tempfile.TemporaryFile() as f:
proto1.write(f)
f.seek(0)
proto2 = RandomDistributedScalarEncoderProto.read(f)
encoder = RandomDistributedScalarEncoder.read(proto2)
self.assertIsInstance(encoder, RandomDistributedScalarEncoder)
self.assertEqual(encoder.resolution, original.resolution)
self.assertEqual(encoder.w, original.w)
self.assertEqual(encoder.n, original.n)
self.assertEqual(encoder.name, original.name)
self.assertEqual(encoder.verbosity, original.verbosity)
self.assertEqual(encoder.minIndex, original.minIndex)
self.assertEqual(encoder.maxIndex, original.maxIndex)
encodedFromOriginal = original.encode(1)
encodedFromNew = encoder.encode(1)
self.assertTrue(numpy.array_equal(encodedFromNew, originalValue))
self.assertEqual(original.decode(encodedFromNew),
encoder.decode(encodedFromOriginal))
self.assertEqual(original.random.getSeed(), encoder.random.getSeed())
for key, value in original.bucketMap.items():
self.assertTrue(numpy.array_equal(value, encoder.bucketMap[key]))
if __name__ == "__main__":
unittest.main()
| badlogicmanpreet/nupic | tests/unit/nupic/encoders/random_distributed_scalar_test.py | Python | agpl-3.0 | 19,742 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyIlmbase(AutotoolsPackage):
"""The PyIlmBase libraries provides python bindings for the IlmBase libraries."""
homepage = "https://github.com/AcademySoftwareFoundation/openexr/tree/v2.3.0/PyIlmBase"
url = "https://github.com/AcademySoftwareFoundation/openexr/releases/download/v2.3.0/pyilmbase-2.3.0.tar.gz"
version('2.3.0', sha256='9c898bb16e7bc916c82bebdf32c343c0f2878fc3eacbafa49937e78f2079a425')
depends_on('ilmbase')
depends_on('boost+python')
# https://github.com/AcademySoftwareFoundation/openexr/issues/336
parallel = False
def configure_args(self):
spec = self.spec
args = [
'--with-boost-python-libname=boost_python{0}'.format(
spec['python'].version.up_to(2).joined)
]
return args
| LLNL/spack | var/spack/repos/builtin/packages/py-ilmbase/package.py | Python | lgpl-2.1 | 1,026 |
#
# This file is part of Dragonfly.
# (c) Copyright 2007, 2008 by Christo Butcher
# Licensed under the LGPL.
#
# Dragonfly is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dragonfly is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with Dragonfly. If not, see
# <http://www.gnu.org/licenses/>.
#
"""
Arabic language implementations of Integer and Digits classes
============================================================================
"""
from ..base.integer_internal import (MapIntBuilder, CollectionIntBuilder,
MagnitudeIntBuilder, IntegerContentBase)
from ..base.digits_internal import DigitsContentBase
#---------------------------------------------------------------------------
int_0 = MapIntBuilder({
"صفر": 0,
})
int_1_9 = MapIntBuilder({
"واحد": 1,
"اثنان": 2,
"ثلاثة": 3,
"اربعة": 4,
"خمسة": 5,
"ستة": 6,
"سبعة": 7,
"ثمانية": 8,
"تسعة": 9,
})
int_10_19 = MapIntBuilder({
"عشرة": 10,
"احدى عشر": 11,
"اثنا عشر": 12,
"ثلاثة عشر": 13,
"اربعة عشر": 14,
"خمسة عشر": 15,
"ستة عشر": 16,
"سبعة عشر": 17,
"ثمانية عشر": 18,
"تسعة عشر": 19,
})
int_20_90_10 = MapIntBuilder({
"عشرون": 2,
"ثلاثون": 3,
"اربعون": 4,
"خمسون": 5,
"ستون": 6,
"سبعون": 7,
"ثمانون": 8,
"تسعون": 9,
})
int_20_99 = MagnitudeIntBuilder(
factor = 10,
spec = "<multiplier> [<remainder>]",
multipliers = [int_20_90_10],
remainders = [int_1_9],
)
int_and_1_99 = CollectionIntBuilder(
spec = "[و] <element>",
set = [int_1_9, int_10_19, int_20_99],
)
int_100s = MagnitudeIntBuilder(
factor = 100,
spec = "[<multiplier>] hundred [<remainder>]",
multipliers = [int_1_9],
remainders = [int_and_1_99],
)
int_100big = MagnitudeIntBuilder(
factor = 100,
spec = "[<multiplier>] hundred [<remainder>]",
multipliers = [int_10_19, int_20_99],
remainders = [int_و_1_99]
)
int_1000s = MagnitudeIntBuilder(
factor = 1000,
spec = "[<multiplier>] thousand [<remainder>]",
multipliers = [int_1_9, int_10_19, int_20_99, int_100s],
remainders = [int_و_1_99, int_100s]
)
int_1000000s = MagnitudeIntBuilder(
factor = 1000000,
spec = "[<multiplier>] million [<remainder>]",
multipliers = [int_1_9, int_10_19, int_20_99, int_100s, int_1000s],
remainders = [int_و_1_99, int_100s, int_1000s],
)
#---------------------------------------------------------------------------
class IntegerContent(IntegerContentBase):
builders = [int_0, int_1_9, int_10_19, int_20_99,
int_100s, int_100big, int_1000s, int_1000000s]
class DigitsContent(DigitsContentBase):
digits = [("صفر", "اووه"), "واحد", "اثنان", "ثلاثة", "اربعة",
"خمسة", "ستة", "سبعة", "ثمانية", "تسعة"] | summermk/dragonfly | dragonfly/language/other/number_arabic.py | Python | lgpl-3.0 | 5,267 |
"""
=====================
SVM: Weighted samples
=====================
Plot decision function of a weighted dataset, where the size of points
is proportional to its weight.
"""
print __doc__
import numpy as np
import pylab as pl
from sklearn import svm
# we create 20 points
np.random.seed(0)
X = np.r_[np.random.randn(10, 2) + [1, 1], np.random.randn(10, 2)]
Y = [1] * 10 + [-1] * 10
sample_weight = 100 * np.abs(np.random.randn(20))
# and assign a bigger weight to the last 10 samples
sample_weight[:10] *= 10
# # fit the model
clf = svm.SVC()
clf.fit(X, Y, sample_weight=sample_weight)
# plot the decision function
xx, yy = np.meshgrid(np.linspace(-4, 5, 500), np.linspace(-4, 5, 500))
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# plot the line, the points, and the nearest vectors to the plane
pl.contourf(xx, yy, Z, alpha=0.75, cmap=pl.cm.bone)
pl.scatter(X[:, 0], X[:, 1], c=Y, s=sample_weight, alpha=0.9, cmap=pl.cm.bone)
pl.axis('off')
pl.show()
| seckcoder/lang-learn | python/sklearn/examples/svm/plot_weighted_samples.py | Python | unlicense | 999 |
import urllib
import urlparse
def get_path(url):
scheme, host, path, query, fragment = urlparse.urlsplit(url)
return path
def get_host(url):
scheme, host, path, query, fragment = urlparse.urlsplit(url)
return host
def add_path(url, new_path):
"""Given a url and path, return a new url that combines
the two.
"""
scheme, host, path, query, fragment = urlparse.urlsplit(url)
new_path = new_path.lstrip('/')
if path.endswith('/'):
path += new_path
else:
path += '/' + new_path
return urlparse.urlunsplit([scheme, host, path, query, fragment])
def _query_param(key, value):
"""ensure that a query parameter's value is a string
of bytes in UTF-8 encoding.
"""
if isinstance(value, unicode):
pass
elif isinstance(value, str):
value = value.decode('utf-8')
else:
value = unicode(value)
return key, value.encode('utf-8')
def _make_query_tuples(params):
if hasattr(params, 'items'):
return [_query_param(*param) for param in params.items()]
else:
return [_query_param(*params)]
def add_query_params(url, params):
"""use the _update_query_params function to set a new query
string for the url based on params.
"""
return update_query_params(url, params, update=False)
def update_query_params(url, params, update=True):
"""Given a url and a tuple or dict of parameters, return
a url that includes the parameters as a properly formatted
query string.
If update is True, change any existing values to new values
given in params.
"""
scheme, host, path, query, fragment = urlparse.urlsplit(url)
# urlparse.parse_qsl gives back url-decoded byte strings. Leave these as
# they are: they will be re-urlencoded below
query_bits = [(k, v) for k, v in urlparse.parse_qsl(query)]
if update:
query_bits = dict(query_bits)
query_bits.update(_make_query_tuples(params))
else:
query_bits.extend(_make_query_tuples(params))
query = urllib.urlencode(query_bits)
return urlparse.urlunsplit([scheme, host, path, query, fragment])
| c-oreills/pyFaceGraph | src/facegraph/url_operations.py | Python | unlicense | 2,148 |
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
from getpass import getuser
import ctypes
from ctypes.util import find_library
from ctypes import c_void_p, c_uint32, POINTER, c_bool, byref
from .core_foundation import CoreFoundation, unicode_to_cfstring, cfstring_to_unicode
from .._types import str_cls, type_name
od_path = find_library('OpenDirectory')
OpenDirectory = ctypes.CDLL(od_path, use_errno=True)
ODAttributeType = CoreFoundation.CFStringRef
ODMatchType = c_uint32
ODRecordType = CoreFoundation.CFStringRef
ODSessionRef = c_void_p
ODNodeRef = c_void_p
ODQueryRef = c_void_p
ODRecordRef = c_void_p
OpenDirectory.ODSessionCreate.argtypes = [
CoreFoundation.CFAllocatorRef,
CoreFoundation.CFDictionaryRef,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODSessionCreate.restype = ODSessionRef
OpenDirectory.ODNodeCreateWithName.argtypes = [
CoreFoundation.CFAllocatorRef,
ODSessionRef,
CoreFoundation.CFStringRef,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODNodeCreateWithName.restype = ODNodeRef
OpenDirectory.ODQueryCreateWithNode.argtypes = [
CoreFoundation.CFAllocatorRef,
ODNodeRef,
CoreFoundation.CFTypeRef,
ODAttributeType,
ODMatchType,
CoreFoundation.CFTypeRef,
CoreFoundation.CFTypeRef,
CoreFoundation.CFIndex,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODQueryCreateWithNode.restype = ODQueryRef
OpenDirectory.ODQueryCopyResults.argtypes = [
ODQueryRef,
c_bool,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODQueryCopyResults.restype = CoreFoundation.CFArrayRef
OpenDirectory.ODRecordCopyValues.argtypes = [
ODRecordRef,
ODAttributeType,
POINTER(CoreFoundation.CFErrorRef)
]
OpenDirectory.ODRecordCopyValues.restype = CoreFoundation.CFArrayRef
kODMatchEqualTo = ODMatchType(0x2001)
kODRecordTypeUsers = ODRecordType.in_dll(OpenDirectory, 'kODRecordTypeUsers')
kODAttributeTypeRecordName = ODAttributeType.in_dll(OpenDirectory, 'kODAttributeTypeRecordName')
kODAttributeTypeUserShell = ODAttributeType.in_dll(OpenDirectory, 'kODAttributeTypeUserShell')
_login_shells = {}
def get_user_login_shell(username=None):
"""
Uses OS X's OpenDirectory.framework to get the user's login shell
:param username:
A unicode string of the user to get the shell for - None for the
current user
:return:
A unicode string of the user's login shell
"""
if username is None:
username = getuser()
if not isinstance(username, str_cls):
username = username.decode('utf-8')
if not isinstance(username, str_cls):
raise TypeError('username must be a unicode string, not %s' % type_name(username))
if username not in _login_shells:
error_ref = CoreFoundation.CFErrorRef()
session = OpenDirectory.ODSessionCreate(
CoreFoundation.kCFAllocatorDefault,
None,
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
node = OpenDirectory.ODNodeCreateWithName(
CoreFoundation.kCFAllocatorDefault,
session,
unicode_to_cfstring("/Local/Default"),
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
query = OpenDirectory.ODQueryCreateWithNode(
CoreFoundation.kCFAllocatorDefault,
node,
kODRecordTypeUsers,
kODAttributeTypeRecordName,
kODMatchEqualTo,
unicode_to_cfstring(username),
kODAttributeTypeUserShell,
1,
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
results = OpenDirectory.ODQueryCopyResults(
query,
False,
byref(error_ref)
)
if bool(error_ref):
raise OSError('Error!')
login_shell = None
num_results = CoreFoundation.CFArrayGetCount(results)
if num_results == 1:
od_record = CoreFoundation.CFArrayGetValueAtIndex(results, 0)
attributes = OpenDirectory.ODRecordCopyValues(od_record, kODAttributeTypeUserShell, byref(error_ref))
if bool(error_ref):
raise OSError('Error!')
num_attributes = CoreFoundation.CFArrayGetCount(results)
if num_attributes == 1:
string_ref = CoreFoundation.CFArrayGetValueAtIndex(attributes, 0)
login_shell = cfstring_to_unicode(string_ref)
_login_shells[username] = login_shell
return _login_shells.get(username)
| EnTeQuAk/dotfiles | sublime-text-3/Packages/shellenv/all/shellenv/_osx/open_directory.py | Python | unlicense | 4,675 |
# Copyright (c) 2015 FUJITSU LIMITED
# Copyright (c) 2012 EMC Corporation.
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
FibreChannel Cinder Volume driver for Fujitsu ETERNUS DX S3 series.
"""
from oslo_log import log as logging
import six
from cinder import interface
from cinder.volume import driver
from cinder.volume.drivers.fujitsu import eternus_dx_common
from cinder.zonemanager import utils as fczm_utils
LOG = logging.getLogger(__name__)
@interface.volumedriver
class FJDXFCDriver(driver.FibreChannelDriver):
"""FC Cinder Volume Driver for Fujitsu ETERNUS DX S3 series."""
# ThirdPartySystems wiki page
CI_WIKI_NAME = "Fujitsu_ETERNUS_CI"
VERSION = eternus_dx_common.FJDXCommon.VERSION
def __init__(self, *args, **kwargs):
super(FJDXFCDriver, self).__init__(*args, **kwargs)
self.common = eternus_dx_common.FJDXCommon(
'fc',
configuration=self.configuration)
self.VERSION = self.common.VERSION
def check_for_setup_error(self):
pass
def create_volume(self, volume):
"""Create volume."""
LOG.debug('create_volume, '
'volume id: %s, enter method.', volume['id'])
location, metadata = self.common.create_volume(volume)
v_metadata = self._get_metadata(volume)
metadata.update(v_metadata)
LOG.debug('create_volume, info: %s, exit method.', metadata)
return {'provider_location': six.text_type(location),
'metadata': metadata}
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
LOG.debug('create_volume_from_snapshot, '
'volume id: %(vid)s, snap id: %(sid)s, enter method.',
{'vid': volume['id'], 'sid': snapshot['id']})
location, metadata = (
self.common.create_volume_from_snapshot(volume, snapshot))
v_metadata = self._get_metadata(volume)
metadata.update(v_metadata)
LOG.debug('create_volume_from_snapshot, '
'info: %s, exit method.', metadata)
return {'provider_location': six.text_type(location),
'metadata': metadata}
def create_cloned_volume(self, volume, src_vref):
"""Create cloned volume."""
LOG.debug('create_cloned_volume, '
'target volume id: %(tid)s, '
'source volume id: %(sid)s, enter method.',
{'tid': volume['id'], 'sid': src_vref['id']})
location, metadata = (
self.common.create_cloned_volume(volume, src_vref))
v_metadata = self._get_metadata(volume)
metadata.update(v_metadata)
LOG.debug('create_cloned_volume, '
'info: %s, exit method.', metadata)
return {'provider_location': six.text_type(location),
'metadata': metadata}
def delete_volume(self, volume):
"""Delete volume on ETERNUS."""
LOG.debug('delete_volume, '
'volume id: %s, enter method.', volume['id'])
vol_exist = self.common.delete_volume(volume)
LOG.debug('delete_volume, '
'delete: %s, exit method.', vol_exist)
def create_snapshot(self, snapshot):
"""Creates a snapshot."""
LOG.debug('create_snapshot, '
'snap id: %(sid)s, volume id: %(vid)s, enter method.',
{'sid': snapshot['id'], 'vid': snapshot['volume_id']})
location, metadata = self.common.create_snapshot(snapshot)
LOG.debug('create_snapshot, info: %s, exit method.', metadata)
return {'provider_location': six.text_type(location)}
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
LOG.debug('delete_snapshot, '
'snap id: %(sid)s, volume id: %(vid)s, enter method.',
{'sid': snapshot['id'], 'vid': snapshot['volume_id']})
vol_exist = self.common.delete_snapshot(snapshot)
LOG.debug('delete_snapshot, '
'delete: %s, exit method.', vol_exist)
def ensure_export(self, context, volume):
"""Driver entry point to get the export info for an existing volume."""
return
def create_export(self, context, volume, connector):
"""Driver entry point to get the export info for a new volume."""
return
def remove_export(self, context, volume):
"""Driver entry point to remove an export for a volume."""
return
@fczm_utils.AddFCZone
def initialize_connection(self, volume, connector):
"""Allow connection to connector and return connection info."""
LOG.debug('initialize_connection, volume id: %(vid)s, '
'wwpns: %(wwpns)s, enter method.',
{'vid': volume['id'], 'wwpns': connector['wwpns']})
info = self.common.initialize_connection(volume, connector)
data = info['data']
init_tgt_map = (
self.common.build_fc_init_tgt_map(connector, data['target_wwn']))
data['initiator_target_map'] = init_tgt_map
info['data'] = data
LOG.debug('initialize_connection, '
'info: %s, exit method.', info)
return info
@fczm_utils.RemoveFCZone
def terminate_connection(self, volume, connector, **kwargs):
"""Disallow connection from connector."""
LOG.debug('terminate_connection, volume id: %(vid)s, '
'wwpns: %(wwpns)s, enter method.',
{'vid': volume['id'], 'wwpns': connector['wwpns']})
map_exist = self.common.terminate_connection(volume, connector)
attached = self.common.check_attached_volume_in_zone(connector)
info = {'driver_volume_type': 'fibre_channel',
'data': {}}
if not attached:
# No more volumes attached to the host
init_tgt_map = self.common.build_fc_init_tgt_map(connector)
info['data'] = {'initiator_target_map': init_tgt_map}
LOG.debug('terminate_connection, unmap: %(unmap)s, '
'connection info: %(info)s, exit method',
{'unmap': map_exist, 'info': info})
return info
def get_volume_stats(self, refresh=False):
"""Get volume stats."""
LOG.debug('get_volume_stats, refresh: %s, enter method.', refresh)
pool_name = None
if refresh is True:
data, pool_name = self.common.update_volume_stats()
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or 'FJDXFCDriver'
data['storage_protocol'] = 'FC'
self._stats = data
LOG.debug('get_volume_stats, '
'pool name: %s, exit method.', pool_name)
return self._stats
def extend_volume(self, volume, new_size):
"""Extend volume."""
LOG.debug('extend_volume, '
'volume id: %s, enter method.', volume['id'])
used_pool_name = self.common.extend_volume(volume, new_size)
LOG.debug('extend_volume, '
'used pool name: %s, exit method.', used_pool_name)
def _get_metadata(self, volume):
v_metadata = volume.get('volume_metadata')
if v_metadata:
ret = {data['key']: data['value'] for data in v_metadata}
else:
ret = volume.get('metadata', {})
return ret
| Hybrid-Cloud/cinder | cinder/volume/drivers/fujitsu/eternus_dx_fc.py | Python | apache-2.0 | 8,064 |
"""Provides device automations for ZHA devices that emit events."""
import voluptuous as vol
import homeassistant.components.automation.event as event
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.components.device_automation.exceptions import (
InvalidDeviceAutomationConfig,
)
from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_PLATFORM, CONF_TYPE
from . import DOMAIN
from .core.helpers import async_get_zha_device
CONF_SUBTYPE = "subtype"
DEVICE = "device"
DEVICE_IEEE = "device_ieee"
ZHA_EVENT = "zha_event"
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{vol.Required(CONF_TYPE): str, vol.Required(CONF_SUBTYPE): str}
)
async def async_validate_trigger_config(hass, config):
"""Validate config."""
config = TRIGGER_SCHEMA(config)
if "zha" in hass.config.components:
trigger = (config[CONF_TYPE], config[CONF_SUBTYPE])
try:
zha_device = await async_get_zha_device(hass, config[CONF_DEVICE_ID])
except (KeyError, AttributeError):
raise InvalidDeviceAutomationConfig
if (
zha_device.device_automation_triggers is None
or trigger not in zha_device.device_automation_triggers
):
raise InvalidDeviceAutomationConfig
return config
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for state changes based on configuration."""
trigger = (config[CONF_TYPE], config[CONF_SUBTYPE])
try:
zha_device = await async_get_zha_device(hass, config[CONF_DEVICE_ID])
except (KeyError, AttributeError):
return None
if trigger not in zha_device.device_automation_triggers:
return None
trigger = zha_device.device_automation_triggers[trigger]
event_config = {
event.CONF_PLATFORM: "event",
event.CONF_EVENT_TYPE: ZHA_EVENT,
event.CONF_EVENT_DATA: {DEVICE_IEEE: str(zha_device.ieee), **trigger},
}
event_config = event.TRIGGER_SCHEMA(event_config)
return await event.async_attach_trigger(
hass, event_config, action, automation_info, platform_type="device"
)
async def async_get_triggers(hass, device_id):
"""List device triggers.
Make sure the device supports device automations and
if it does return the trigger list.
"""
zha_device = await async_get_zha_device(hass, device_id)
if not zha_device.device_automation_triggers:
return
triggers = []
for trigger, subtype in zha_device.device_automation_triggers.keys():
triggers.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_PLATFORM: DEVICE,
CONF_TYPE: trigger,
CONF_SUBTYPE: subtype,
}
)
return triggers
| pschmitt/home-assistant | homeassistant/components/zha/device_trigger.py | Python | apache-2.0 | 2,852 |
# (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Swaminathan Vasudevan, Hewlett-Packard.
#
"""VPN Utilities and helper functions."""
from neutronclient.common import exceptions
from neutronclient.i18n import _
dpd_supported_actions = ['hold', 'clear', 'restart',
'restart-by-peer', 'disabled']
dpd_supported_keys = ['action', 'interval', 'timeout']
lifetime_keys = ['units', 'value']
lifetime_units = ['seconds']
def validate_dpd_dict(dpd_dict):
for key, value in dpd_dict.items():
if key not in dpd_supported_keys:
message = _(
"DPD Dictionary KeyError: "
"Reason-Invalid DPD key : "
"'%(key)s' not in %(supported_key)s ") % {
'key': key, 'supported_key': dpd_supported_keys}
raise exceptions.CommandError(message)
if key == 'action' and value not in dpd_supported_actions:
message = _(
"DPD Dictionary ValueError: "
"Reason-Invalid DPD action : "
"'%(key_value)s' not in %(supported_action)s ") % {
'key_value': value,
'supported_action': dpd_supported_actions}
raise exceptions.CommandError(message)
if key in ('interval', 'timeout'):
try:
if int(value) <= 0:
raise ValueError()
except ValueError:
message = _(
"DPD Dictionary ValueError: "
"Reason-Invalid positive integer value: "
"'%(key)s' = %(value)s ") % {
'key': key, 'value': value}
raise exceptions.CommandError(message)
else:
dpd_dict[key] = int(value)
return
def validate_lifetime_dict(lifetime_dict):
for key, value in lifetime_dict.items():
if key not in lifetime_keys:
message = _(
"Lifetime Dictionary KeyError: "
"Reason-Invalid unit key : "
"'%(key)s' not in %(supported_key)s ") % {
'key': key, 'supported_key': lifetime_keys}
raise exceptions.CommandError(message)
if key == 'units' and value not in lifetime_units:
message = _(
"Lifetime Dictionary ValueError: "
"Reason-Invalid units : "
"'%(key_value)s' not in %(supported_units)s ") % {
'key_value': key, 'supported_units': lifetime_units}
raise exceptions.CommandError(message)
if key == 'value':
try:
if int(value) < 60:
raise ValueError()
except ValueError:
message = _(
"Lifetime Dictionary ValueError: "
"Reason-Invalid value should be at least 60:"
"'%(key_value)s' = %(value)s ") % {
'key_value': key, 'value': value}
raise exceptions.CommandError(message)
else:
lifetime_dict['value'] = int(value)
return
def lifetime_help(policy):
lifetime = _("%s lifetime attributes. "
"'units'-seconds, default:seconds. "
"'value'-non negative integer, default:3600.") % policy
return lifetime
def dpd_help(policy):
dpd = _(" %s Dead Peer Detection attributes."
" 'action'-hold,clear,disabled,restart,restart-by-peer."
" 'interval' and 'timeout' are non negative integers. "
" 'interval' should be less than 'timeout' value. "
" 'action', default:hold 'interval', default:30, "
" 'timeout', default:120.") % policy.capitalize()
return dpd
| varunarya10/python-neutronclient | neutronclient/neutron/v2_0/vpn/utils.py | Python | apache-2.0 | 4,400 |
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django import template
from django.template import defaultfilters as filters
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import exceptions
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.usage import quotas
LOG = logging.getLogger(__name__)
class CheckNetworkEditable(object):
"""Mixin class to determine the specified network is editable."""
def allowed(self, request, datum=None):
# Only administrator is allowed to create and manage shared networks.
if datum and datum.shared:
return False
return True
class DeleteNetwork(policy.PolicyTargetMixin, CheckNetworkEditable,
tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Network",
u"Delete Networks",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Deleted Network",
u"Deleted Networks",
count
)
policy_rules = (("network", "delete_network"),)
def delete(self, request, network_id):
network_name = network_id
try:
# Retrieve the network list.
network = api.neutron.network_get(request, network_id,
expand_subnet=False)
network_name = network.name
LOG.debug('Network %(network_id)s has subnets: %(subnets)s',
{'network_id': network_id, 'subnets': network.subnets})
for subnet_id in network.subnets:
api.neutron.subnet_delete(request, subnet_id)
LOG.debug('Deleted subnet %s', subnet_id)
api.neutron.network_delete(request, network_id)
LOG.debug('Deleted network %s successfully', network_id)
except Exception:
msg = _('Failed to delete network %s')
LOG.info(msg, network_id)
redirect = reverse("horizon:project:networks:index")
exceptions.handle(request, msg % network_name, redirect=redirect)
class CreateNetwork(tables.LinkAction):
name = "create"
verbose_name = _("Create Network")
url = "horizon:project:networks:create"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_network"),)
def allowed(self, request, datum=None):
usages = quotas.tenant_quota_usages(request)
# when Settings.OPENSTACK_NEUTRON_NETWORK['enable_quotas'] = False
# usages["networks"] is empty
if usages.get('networks', {}).get('available', 1) <= 0:
if "disabled" not in self.classes:
self.classes = [c for c in self.classes] + ["disabled"]
self.verbose_name = _("Create Network (Quota exceeded)")
else:
self.verbose_name = _("Create Network")
self.classes = [c for c in self.classes if c != "disabled"]
return True
class EditNetwork(policy.PolicyTargetMixin, CheckNetworkEditable,
tables.LinkAction):
name = "update"
verbose_name = _("Edit Network")
url = "horizon:project:networks:update"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("network", "update_network"),)
class CreateSubnet(policy.PolicyTargetMixin, CheckNetworkEditable,
tables.LinkAction):
name = "subnet"
verbose_name = _("Add Subnet")
url = "horizon:project:networks:addsubnet"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_subnet"),)
# neutron has used both in their policy files, supporting both
policy_target_attrs = (("network:tenant_id", "tenant_id"),
("network:project_id", "tenant_id"),)
def allowed(self, request, datum=None):
usages = quotas.tenant_quota_usages(request)
# when Settings.OPENSTACK_NEUTRON_NETWORK['enable_quotas'] = False
# usages["subnets'] is empty
if usages.get('subnets', {}).get('available', 1) <= 0:
if 'disabled' not in self.classes:
self.classes = [c for c in self.classes] + ['disabled']
self.verbose_name = _('Add Subnet (Quota exceeded)')
else:
self.verbose_name = _('Add Subnet')
self.classes = [c for c in self.classes if c != 'disabled']
return True
def get_subnets(network):
template_name = 'project/networks/_network_ips.html'
context = {"subnets": network.subnets}
return template.loader.render_to_string(template_name, context)
DISPLAY_CHOICES = (
("up", pgettext_lazy("Admin state of a Network", u"UP")),
("down", pgettext_lazy("Admin state of a Network", u"DOWN")),
)
STATUS_DISPLAY_CHOICES = (
("active", pgettext_lazy("Current status of a Network", u"Active")),
("build", pgettext_lazy("Current status of a Network", u"Build")),
("down", pgettext_lazy("Current status of a Network", u"Down")),
("error", pgettext_lazy("Current status of a Network", u"Error")),
)
class ProjectNetworksFilterAction(tables.FilterAction):
name = "filter_project_networks"
filter_type = "server"
filter_choices = (('name', _("Name ="), True),
('shared', _("Shared ="), True,
_("e.g. Yes / No")),
('router:external', _("External ="), True,
_("e.g. Yes / No")),
('status', _("Status ="), True),
('admin_state_up', _("Admin State ="), True,
_("e.g. UP / DOWN")))
class NetworksTable(tables.DataTable):
name = tables.WrappingColumn("name_or_id",
verbose_name=_("Name"),
link='horizon:project:networks:detail')
subnets = tables.Column(get_subnets,
verbose_name=_("Subnets Associated"),)
shared = tables.Column("shared", verbose_name=_("Shared"),
filters=(filters.yesno, filters.capfirst))
external = tables.Column("router:external", verbose_name=_("External"),
filters=(filters.yesno, filters.capfirst))
status = tables.Column("status", verbose_name=_("Status"),
display_choices=STATUS_DISPLAY_CHOICES)
admin_state = tables.Column("admin_state",
verbose_name=_("Admin State"),
display_choices=DISPLAY_CHOICES)
class Meta(object):
name = "networks"
verbose_name = _("Networks")
table_actions = (CreateNetwork, DeleteNetwork,
ProjectNetworksFilterAction)
row_actions = (EditNetwork, CreateSubnet, DeleteNetwork)
| coreycb/horizon | openstack_dashboard/dashboards/project/networks/tables.py | Python | apache-2.0 | 7,635 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.layers.core."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.layers import core as core_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class DenseTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes()
def testDenseProperties(self):
dense = core_layers.Dense(2, activation=nn_ops.relu, name='my_dense')
self.assertEqual(dense.units, 2)
self.assertEqual(dense.activation, nn_ops.relu)
self.assertEqual(dense.kernel_regularizer, None)
self.assertEqual(dense.bias_regularizer, None)
self.assertEqual(dense.activity_regularizer, None)
self.assertEqual(dense.use_bias, True)
# Test auto-naming
dense = core_layers.Dense(2, activation=nn_ops.relu)
dense.apply(random_ops.random_uniform((5, 2)))
self.assertEqual(dense.name, 'dense_1')
dense = core_layers.Dense(2, activation=nn_ops.relu)
dense.apply(random_ops.random_uniform((5, 2)))
self.assertEqual(dense.name, 'dense_2')
def testVariableInput(self):
with self.test_session():
v = variable_scope.get_variable(
'X', initializer=init_ops.zeros_initializer(), shape=(1, 1))
x = core_layers.Dense(1)(v)
variables.global_variables_initializer().run()
self.assertAllEqual(x.eval(), [[0.0]])
@test_util.run_in_graph_and_eager_modes()
def testCall(self):
dense = core_layers.Dense(2, activation=nn_ops.relu, name='my_dense')
inputs = random_ops.random_uniform((5, 4), seed=1)
outputs = dense(inputs)
self.assertListEqual([5, 2], outputs.get_shape().as_list())
self.assertListEqual(dense.variables, [dense.kernel, dense.bias])
self.assertListEqual(dense.trainable_variables,
[dense.kernel, dense.bias])
self.assertListEqual(dense.non_trainable_variables, [])
if context.in_graph_mode():
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 2)
self.assertEqual(dense.kernel.name, 'my_dense/kernel:0')
self.assertEqual(dense.bias.name, 'my_dense/bias:0')
@test_util.run_in_graph_and_eager_modes()
def testCallTensorDot(self):
dense = core_layers.Dense(2, activation=nn_ops.relu, name='my_dense')
inputs = random_ops.random_uniform((5, 4, 3), seed=1)
outputs = dense(inputs)
self.assertListEqual([5, 4, 2], outputs.get_shape().as_list())
@test_util.run_in_graph_and_eager_modes()
def testNoBias(self):
dense = core_layers.Dense(2, use_bias=False, name='my_dense')
inputs = random_ops.random_uniform((5, 2), seed=1)
_ = dense(inputs)
self.assertListEqual(dense.variables, [dense.kernel])
self.assertListEqual(dense.trainable_variables, [dense.kernel])
self.assertListEqual(dense.non_trainable_variables, [])
if context.in_graph_mode():
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 1)
self.assertEqual(dense.kernel.name, 'my_dense/kernel:0')
self.assertEqual(dense.bias, None)
@test_util.run_in_graph_and_eager_modes()
def testNonTrainable(self):
dense = core_layers.Dense(2, trainable=False, name='my_dense')
inputs = random_ops.random_uniform((5, 2), seed=1)
_ = dense(inputs)
self.assertListEqual(dense.variables, [dense.kernel, dense.bias])
self.assertListEqual(dense.non_trainable_variables,
[dense.kernel, dense.bias])
self.assertListEqual(dense.trainable_variables, [])
if context.in_graph_mode():
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 0)
@test_util.run_in_graph_and_eager_modes()
def testOutputShape(self):
dense = core_layers.Dense(7, activation=nn_ops.relu, name='my_dense')
inputs = random_ops.random_uniform((5, 3), seed=1)
outputs = dense.apply(inputs)
self.assertEqual(outputs.get_shape().as_list(), [5, 7])
inputs = random_ops.random_uniform((5, 2, 3), seed=1)
outputs = dense(inputs)
self.assertEqual(outputs.get_shape().as_list(), [5, 2, 7])
inputs = random_ops.random_uniform((1, 2, 4, 3), seed=1)
outputs = dense.apply(inputs)
self.assertEqual(outputs.get_shape().as_list(), [1, 2, 4, 7])
def testCallOnPlaceHolder(self):
inputs = array_ops.placeholder(dtype=dtypes.float32)
dense = core_layers.Dense(4, name='my_dense')
with self.assertRaises(ValueError):
dense(inputs)
inputs = array_ops.placeholder(dtype=dtypes.float32, shape=[None, None])
dense = core_layers.Dense(4, name='my_dense')
with self.assertRaises(ValueError):
dense(inputs)
inputs = array_ops.placeholder(
dtype=dtypes.float32, shape=[None, None, None])
dense = core_layers.Dense(4, name='my_dense')
with self.assertRaises(ValueError):
dense(inputs)
inputs = array_ops.placeholder(dtype=dtypes.float32, shape=[None, 3])
dense = core_layers.Dense(4, name='my_dense')
dense(inputs)
inputs = array_ops.placeholder(dtype=dtypes.float32, shape=[None, None, 3])
dense = core_layers.Dense(4, name='my_dense')
dense(inputs)
@test_util.run_in_graph_and_eager_modes()
def testActivation(self):
dense = core_layers.Dense(2, activation=nn_ops.relu, name='dense1')
inputs = random_ops.random_uniform((5, 3), seed=1)
outputs = dense(inputs)
if context.in_graph_mode():
self.assertEqual(outputs.op.name, 'dense1/Relu')
dense = core_layers.Dense(2, name='dense2')
inputs = random_ops.random_uniform((5, 3), seed=1)
outputs = dense(inputs)
if context.in_graph_mode():
self.assertEqual(outputs.op.name, 'dense2/BiasAdd')
def testActivityRegularizer(self):
regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
dense = core_layers.Dense(
2, name='my_dense', activity_regularizer=regularizer)
inputs = random_ops.random_uniform((5, 3), seed=1)
_ = dense(inputs)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(dense.losses, loss_keys)
def testKernelRegularizer(self):
regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
dense = core_layers.Dense(
2, name='my_dense', kernel_regularizer=regularizer)
inputs = random_ops.random_uniform((5, 3), seed=1)
_ = dense(inputs)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(dense.losses, loss_keys)
def testKernelRegularizerWithReuse(self):
regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
inputs = random_ops.random_uniform((5, 3), seed=1)
_ = core_layers.dense(
inputs, 2, name='my_dense', kernel_regularizer=regularizer)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
_ = core_layers.dense(
inputs, 2, name='my_dense', kernel_regularizer=regularizer, reuse=True)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
def testBiasRegularizer(self):
regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
dense = core_layers.Dense(2, name='my_dense', bias_regularizer=regularizer)
inputs = random_ops.random_uniform((5, 3), seed=1)
_ = dense(inputs)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(dense.losses, loss_keys)
def testFunctionalDense(self):
with self.test_session():
inputs = random_ops.random_uniform((5, 3), seed=1)
outputs = core_layers.dense(
inputs, 2, activation=nn_ops.relu, name='my_dense')
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 2)
self.assertEqual(outputs.op.name, 'my_dense/Relu')
def testFunctionalDenseTwice(self):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2)
vars1 = _get_variable_dict_from_varstore().values()
core_layers.dense(inputs, 2)
vars2 = _get_variable_dict_from_varstore().values()
self.assertEqual(len(vars1), 2)
self.assertEqual(len(vars2), 4)
# TODO(alive): get this to work in eager mode.
def testFunctionalDenseTwiceReuse(self):
with self.test_session():
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2, name='my_dense')
vars1 = variables.trainable_variables()
core_layers.dense(inputs, 2, name='my_dense', reuse=True)
vars2 = variables.trainable_variables()
self.assertEqual(vars1, vars2)
# TODO(alive): get this to work in eager mode.
def testFunctionalDenseTwiceReuseFromScope(self):
with self.test_session():
with variable_scope.variable_scope('scope'):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2, name='my_dense')
vars1 = variables.trainable_variables()
with variable_scope.variable_scope('scope', reuse=True):
core_layers.dense(inputs, 2, name='my_dense')
vars2 = variables.trainable_variables()
self.assertEqual(vars1, vars2)
def testFunctionalDenseInitializerFromScope(self):
with variable_scope.variable_scope(
'scope', initializer=init_ops.ones_initializer()), self.test_session():
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2)
variables.global_variables_initializer().run()
weights = _get_variable_dict_from_varstore()
self.assertEqual(len(weights), 2)
# Check that the matrix weights got initialized to ones (from scope).
self.assertAllClose(weights['scope/dense/kernel'].read_value().eval(),
np.ones((3, 2)))
# Check that the bias still got initialized to zeros.
self.assertAllClose(weights['scope/dense/bias'].read_value().eval(),
np.zeros((2)))
def testEagerExecution(self):
with context.eager_mode():
container = variable_scope.EagerVariableStore()
x = constant_op.constant([[2.0]])
with container.as_default():
y = core_layers.dense(
x, 1, name='my_dense',
kernel_initializer=init_ops.ones_initializer())
self.assertAllEqual(y, [[2.0]])
self.assertEqual(len(container.variables()), 2)
# Recreate the layer to test reuse.
with container.as_default():
core_layers.dense(
x, 1, name='my_dense',
kernel_initializer=init_ops.ones_initializer())
self.assertEqual(len(container.variables()), 2)
def testFunctionalDenseWithCustomGetter(self):
called = [0]
def custom_getter(getter, *args, **kwargs):
called[0] += 1
return getter(*args, **kwargs)
with variable_scope.variable_scope('test', custom_getter=custom_getter):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2)
self.assertEqual(called[0], 2)
def testFunctionalDenseInScope(self):
with self.test_session():
with variable_scope.variable_scope('test'):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2, name='my_dense')
var_dict = _get_variable_dict_from_varstore()
var_key = 'test/my_dense/kernel'
self.assertEqual(var_dict[var_key].name, '%s:0' % var_key)
with variable_scope.variable_scope('test1') as scope:
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2, name=scope)
var_dict = _get_variable_dict_from_varstore()
var_key = 'test1/kernel'
self.assertEqual(var_dict[var_key].name, '%s:0' % var_key)
with variable_scope.variable_scope('test2'):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2)
var_dict = _get_variable_dict_from_varstore()
var_key = 'test2/dense/kernel'
self.assertEqual(var_dict[var_key].name, '%s:0' % var_key)
@test_util.run_in_graph_and_eager_modes()
def testComputeOutputShape(self):
dense = core_layers.Dense(2, activation=nn_ops.relu, name='dense1')
ts = tensor_shape.TensorShape
# pylint: disable=protected-access
with self.assertRaises(ValueError):
dense._compute_output_shape(ts(None))
with self.assertRaises(ValueError):
dense._compute_output_shape(ts([]))
with self.assertRaises(ValueError):
dense._compute_output_shape(ts([1]))
self.assertEqual(
[None, 2],
dense._compute_output_shape((None, 3)).as_list())
self.assertEqual(
[None, 2],
dense._compute_output_shape(ts([None, 3])).as_list())
self.assertEqual(
[None, 4, 2],
dense._compute_output_shape(ts([None, 4, 3])).as_list())
# pylint: enable=protected-access
@test_util.run_in_graph_and_eager_modes()
def testConstraints(self):
k_constraint = lambda x: x / math_ops.reduce_sum(x)
b_constraint = lambda x: x / math_ops.reduce_max(x)
dense = core_layers.Dense(2,
kernel_constraint=k_constraint,
bias_constraint=b_constraint)
inputs = random_ops.random_uniform((5, 3), seed=1)
dense(inputs)
self.assertEqual(dense.kernel_constraint, k_constraint)
self.assertEqual(dense.bias_constraint, b_constraint)
def _get_variable_dict_from_varstore():
var_dict = variable_scope._get_default_variable_store()._vars # pylint: disable=protected-access
sorted_var_dict = collections.OrderedDict(
sorted(var_dict.items(), key=lambda t: t[0]))
return sorted_var_dict
class DropoutTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes()
def testDropoutProperties(self):
dp = core_layers.Dropout(0.5, name='dropout')
self.assertEqual(dp.rate, 0.5)
self.assertEqual(dp.noise_shape, None)
dp.apply(array_ops.ones(()))
self.assertEqual(dp.name, 'dropout')
@test_util.run_in_graph_and_eager_modes()
def testBooleanLearningPhase(self):
dp = core_layers.Dropout(0.5)
inputs = array_ops.ones((5, 3))
dropped = dp.apply(inputs, training=True)
if context.in_graph_mode():
self.evaluate(variables.global_variables_initializer())
np_output = self.evaluate(dropped)
self.assertAlmostEqual(0., np_output.min())
dropped = dp.apply(inputs, training=False)
np_output = self.evaluate(dropped)
self.assertAllClose(np.ones((5, 3)), np_output)
def testDynamicLearningPhase(self):
with self.test_session() as sess:
dp = core_layers.Dropout(0.5, seed=1)
inputs = array_ops.ones((5, 5))
training = array_ops.placeholder(dtype='bool')
dropped = dp.apply(inputs, training=training)
self.evaluate(variables.global_variables_initializer())
np_output = sess.run(dropped, feed_dict={training: True})
self.assertAlmostEqual(0., np_output.min())
np_output = sess.run(dropped, feed_dict={training: False})
self.assertAllClose(np.ones((5, 5)), np_output)
@test_util.run_in_graph_and_eager_modes()
def testDynamicNoiseShape(self):
inputs = array_ops.ones((5, 3, 2))
noise_shape = [None, 1, None]
dp = core_layers.Dropout(0.5, noise_shape=noise_shape, seed=1)
dropped = dp.apply(inputs, training=True)
self.evaluate(variables.global_variables_initializer())
np_output = self.evaluate(dropped)
self.assertAlmostEqual(0., np_output.min())
self.assertAllClose(np_output[:, 0, :], np_output[:, 1, :])
def testCustomNoiseShape(self):
inputs = array_ops.ones((5, 3, 2))
noise_shape = [5, 1, 2]
dp = core_layers.Dropout(0.5, noise_shape=noise_shape, seed=1)
dropped = dp.apply(inputs, training=True)
self.evaluate(variables.global_variables_initializer())
np_output = self.evaluate(dropped)
self.assertAlmostEqual(0., np_output.min())
self.assertAllClose(np_output[:, 0, :], np_output[:, 1, :])
def testFunctionalDropout(self):
with self.test_session():
inputs = array_ops.ones((5, 5))
dropped = core_layers.dropout(inputs, 0.5, training=True, seed=1)
variables.global_variables_initializer().run()
np_output = self.evaluate(dropped)
self.assertAlmostEqual(0., np_output.min())
dropped = core_layers.dropout(inputs, 0.5, training=False, seed=1)
np_output = self.evaluate(dropped)
self.assertAllClose(np.ones((5, 5)), np_output)
def testDynamicRate(self):
with self.test_session() as sess:
rate = array_ops.placeholder(dtype='float32', name='rate')
dp = core_layers.Dropout(rate, name='dropout')
inputs = array_ops.ones((5, 5))
dropped = dp.apply(inputs, training=True)
sess.run(variables.global_variables_initializer())
np_output = sess.run(dropped, feed_dict={rate: 0.5})
self.assertAlmostEqual(0., np_output.min())
np_output = sess.run(dropped, feed_dict={rate: 0.0})
self.assertAllClose(np.ones((5, 5)), np_output)
class FlattenTest(test.TestCase):
def testCreateFlatten(self):
with self.test_session() as sess:
x = array_ops.placeholder(shape=(None, 2, 3), dtype='float32')
y = core_layers.Flatten()(x)
np_output = sess.run(y, feed_dict={x: np.zeros((3, 2, 3))})
self.assertEqual(list(np_output.shape), [3, 6])
self.assertEqual(y.get_shape().as_list(), [None, 6])
x = array_ops.placeholder(shape=(1, 2, 3, 2), dtype='float32')
y = core_layers.Flatten()(x)
np_output = sess.run(y, feed_dict={x: np.zeros((1, 2, 3, 2))})
self.assertEqual(list(np_output.shape), [1, 12])
self.assertEqual(y.get_shape().as_list(), [1, 12])
def testComputeShape(self):
shape = core_layers.Flatten()._compute_output_shape((1, 2, 3, 2))
self.assertEqual(shape.as_list(), [1, 12])
shape = core_layers.Flatten()._compute_output_shape((None, 3, 2))
self.assertEqual(shape.as_list(), [None, 6])
shape = core_layers.Flatten()._compute_output_shape((None, 3, None))
self.assertEqual(shape.as_list(), [None, None])
def testFunctionalFlatten(self):
x = array_ops.placeholder(shape=(None, 2, 3), dtype='float32')
y = core_layers.flatten(x, name='flatten')
self.assertEqual(y.get_shape().as_list(), [None, 6])
def testFlattenValueError(self):
x = array_ops.placeholder(shape=(None,), dtype='float32')
with self.assertRaises(ValueError):
core_layers.Flatten()(x)
def testFlattenUnknownAxes(self):
with self.test_session() as sess:
x = array_ops.placeholder(shape=(5, None, None), dtype='float32')
y = core_layers.Flatten()(x)
np_output = sess.run(y, feed_dict={x: np.zeros((5, 2, 3))})
self.assertEqual(list(np_output.shape), [5, 6])
self.assertEqual(y.get_shape().as_list(), [5, None])
x = array_ops.placeholder(shape=(5, None, 2), dtype='float32')
y = core_layers.Flatten()(x)
np_output = sess.run(y, feed_dict={x: np.zeros((5, 3, 2))})
self.assertEqual(list(np_output.shape), [5, 6])
self.assertEqual(y.get_shape().as_list(), [5, None])
if __name__ == '__main__':
test.main()
| eadgarchen/tensorflow | tensorflow/python/layers/core_test.py | Python | apache-2.0 | 20,438 |
# coding=utf-8
# Copyright 2022 The init2winit Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| google/init2winit | init2winit/optimizer_lib/__init__.py | Python | apache-2.0 | 603 |
from __future__ import print_function
import shlex
import subprocess
import sys
from .config import Configuration
class PkgConfig(object):
class Error(Exception):
"""Raised when information could not be obtained from pkg-config."""
def __init__(self, package_name):
"""Query pkg-config for information about a package.
:type package_name: str
:param package_name: The name of the package to query.
:raises PkgConfig.Error: When a call to pkg-config fails.
"""
self.package_name = package_name
self._cflags = self._call("--cflags")
self._cflags_only_I = self._call("--cflags-only-I")
self._cflags_only_other = self._call("--cflags-only-other")
self._libs = self._call("--libs")
self._libs_only_l = self._call("--libs-only-l")
self._libs_only_L = self._call("--libs-only-L")
self._libs_only_other = self._call("--libs-only-other")
def _call(self, *pkg_config_args):
try:
cmd = [Configuration.current.pkg_config] + list(pkg_config_args) + [self.package_name]
print("Executing command '{}'".format(cmd), file=sys.stderr)
return shlex.split(subprocess.check_output(cmd).decode('utf-8'))
except subprocess.CalledProcessError as e:
raise self.Error("pkg-config exited with error code {}".format(e.returncode))
@property
def swiftc_flags(self):
"""Flags for this package in a format suitable for passing to `swiftc`.
:rtype: list[str]
"""
return (
["-Xcc {}".format(s) for s in self._cflags_only_other]
+ ["-Xlinker {}".format(s) for s in self._libs_only_other]
+ self._cflags_only_I
+ self._libs_only_L
+ self._libs_only_l)
@property
def cflags(self):
"""CFLAGS for this package.
:rtype: list[str]
"""
return self._cflags
@property
def ldflags(self):
"""LDFLAGS for this package.
:rtype: list[str]
"""
return self._libs
| JGiola/swift-corelibs-foundation | lib/pkg_config.py | Python | apache-2.0 | 2,114 |
# -*- coding: utf-8 -*-
#
# cloudtracker documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 5 12:45:40 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath('../cloudtracker/'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.pngmath']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'cloudtracker'
copyright = u'2011, Jordan Dawe'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'cloudtrackerdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'cloudtracker.tex', u'cloudtracker Documentation',
u'Jordan Dawe', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'cloudtracker', u'cloudtracker Documentation',
[u'Jordan Dawe'], 1)
]
| freedryk/cloudtracker | doc/conf.py | Python | bsd-2-clause | 7,125 |
None
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import os
# simple json is a python 2.5 library you need to install
import json
# json comes bundled with python 2.6. use one or the other
#import json
def run():
print "starting"
from receiver.models import Submission
from xformmanager.models import FormDefModel
# this part of the script walks through all the registered
# form definitions and bundles them with the original xsd
# schema for resubmission
domain = None
# you can manually set a single domain here. if you don't then
# all the data will be exported.
domain = "Grameen"
if domain:
all_schemas = FormDefModel.objects.filter(domain__name__iexact=domain)
else:
all_schemas = FormDefModel.objects.all()
for schema in all_schemas:
print "processsing %s" % schema
file_loc = schema.xsd_file_location
print "xsd file: %s" % file_loc
if file_loc:
headers = {
"original-submit-time" : str(schema.submit_time),
"original-submit-ip" : str(schema.submit_ip),
"bytes-received" : schema.bytes_received,
"form-name" : schema.form_name,
"form-display-name" : schema.form_display_name,
"target-namespace" : schema.target_namespace,
"date-created" : str(schema.date_created),
"domain" : str(schema.get_domain)
}
dir, filename = os.path.split(file_loc)
new_dir = os.path.join(dir, "export")
if not os.path.exists(new_dir):
os.makedirs(new_dir)
write_file = os.path.join(new_dir, filename.replace(".xml", ".xsdexport"))
fout = open(write_file, 'w')
jsoned = json.dumps(headers)
print jsoned
fout.write(jsoned)
fout.write("\n\n")
xsd_file = open(file_loc, "r")
payload = xsd_file.read()
xsd_file.close()
fout.write(payload)
fout.close()
# this part of the script walks through all the submissions
# and bundles them in an exportable format with the original
# submitting IP and time, as well as a reference to the
# original post
#all_submissions = Submission.objects.all()
if domain:
all_submissions = Submission.objects.filter(domain__name__iexact=domain)
else:
all_submissions = Submission.objects.all()
for submission in all_submissions:
#print "processing %s (%s)" % (submission,submission.raw_post)
post_file = open(submission.raw_post, "r")
submit_time = str(submission.submit_time)
# first line is content type
content_type = post_file.readline().split(":")[1].strip()
# second line is content length
content_length = post_file.readline().split(":")[1].strip()
# third line is empty
post_file.readline()
# the rest is the actual body of the post
headers = { "content-type" : content_type,
"content-length" : content_length,
"time-received" : str(submission.submit_time),
"original-ip" : str(submission.submit_ip),
"domain" : submission.domain.name
}
# check the directory and create it if it doesn't exist
dir, filename = os.path.split(submission.raw_post)
new_dir = os.path.join(dir, "export")
if not os.path.exists(new_dir):
os.makedirs(new_dir)
# the format will be:
# {headers} (dict)
# (empty line)
# <body>
write_file = os.path.join(new_dir, filename.replace("postdata", "postexport"))
fout = open(write_file, 'w')
jsoned = json.dumps(headers)
fout.write(jsoned)
fout.write("\n\n")
try:
payload = post_file.read()
fout.write(payload)
except Exception:
print "error processing %s" % write_file
fout.close()
print "done"
| commtrack/temp-aquatest | utilities/data_migration/data_export_script_new.py | Python | bsd-3-clause | 4,187 |
"""
Control global computation context
"""
from collections import defaultdict
_globals = defaultdict(lambda: None)
_globals['callbacks'] = set()
class set_options(object):
""" Set global state within controled context
This lets you specify various global settings in a tightly controlled with
block
Valid keyword arguments currently include:
get - the scheduler to use
pool - a thread or process pool
cache - Cache to use for intermediate results
func_loads/func_dumps - loads/dumps functions for serialization of data
likely to contain functions. Defaults to dill.loads/dill.dumps
rerun_exceptions_locally - rerun failed tasks in master process
Example
-------
>>> with set_options(get=dask.get): # doctest: +SKIP
... x = np.array(x) # uses dask.get internally
"""
def __init__(self, **kwargs):
self.old = _globals.copy()
_globals.update(kwargs)
def __enter__(self):
return
def __exit__(self, type, value, traceback):
_globals.clear()
_globals.update(self.old)
| wiso/dask | dask/context.py | Python | bsd-3-clause | 1,121 |
from __future__ import unicode_literals
from django.core.exceptions import PermissionDenied
from django.core.mail import send_mail
from django.core import validators
from django.db import models
from django.db.models.manager import EmptyManager
from django.utils.crypto import get_random_string, salted_hmac
from django.utils import six
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.contrib import auth
from django.contrib.auth.hashers import (
check_password, make_password, is_password_usable)
from django.contrib.auth.signals import user_logged_in
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import python_2_unicode_compatible
def update_last_login(sender, user, **kwargs):
"""
A signal receiver which updates the last_login date for
the user logging in.
"""
user.last_login = timezone.now()
user.save(update_fields=['last_login'])
user_logged_in.connect(update_last_login)
class PermissionManager(models.Manager):
def get_by_natural_key(self, codename, app_label, model):
return self.get(
codename=codename,
content_type=ContentType.objects.db_manager(self.db).get_by_natural_key(app_label, model),
)
@python_2_unicode_compatible
class Permission(models.Model):
"""
The permissions system provides a way to assign permissions to specific
users and groups of users.
The permission system is used by the Django admin site, but may also be
useful in your own code. The Django admin site uses permissions as follows:
- The "add" permission limits the user's ability to view the "add" form
and add an object.
- The "change" permission limits a user's ability to view the change
list, view the "change" form and change an object.
- The "delete" permission limits the ability to delete an object.
Permissions are set globally per type of object, not per specific object
instance. It is possible to say "Mary may change news stories," but it's
not currently possible to say "Mary may change news stories, but only the
ones she created herself" or "Mary may only change news stories that have a
certain status or publication date."
Three basic permissions -- add, change and delete -- are automatically
created for each Django model.
"""
name = models.CharField(_('name'), max_length=255)
content_type = models.ForeignKey(ContentType)
codename = models.CharField(_('codename'), max_length=100)
objects = PermissionManager()
class Meta:
verbose_name = _('permission')
verbose_name_plural = _('permissions')
unique_together = (('content_type', 'codename'),)
ordering = ('content_type__app_label', 'content_type__model',
'codename')
def __str__(self):
return "%s | %s | %s" % (
six.text_type(self.content_type.app_label),
six.text_type(self.content_type),
six.text_type(self.name))
def natural_key(self):
return (self.codename,) + self.content_type.natural_key()
natural_key.dependencies = ['contenttypes.contenttype']
class GroupManager(models.Manager):
"""
The manager for the auth's Group model.
"""
def get_by_natural_key(self, name):
return self.get(name=name)
@python_2_unicode_compatible
class Group(models.Model):
"""
Groups are a generic way of categorizing users to apply permissions, or
some other label, to those users. A user can belong to any number of
groups.
A user in a group automatically has all the permissions granted to that
group. For example, if the group Site editors has the permission
can_edit_home_page, any user in that group will have that permission.
Beyond permissions, groups are a convenient way to categorize users to
apply some label, or extended functionality, to them. For example, you
could create a group 'Special users', and you could write code that would
do special things to those users -- such as giving them access to a
members-only portion of your site, or sending them members-only email
messages.
"""
name = models.CharField(_('name'), max_length=80, unique=True)
permissions = models.ManyToManyField(Permission,
verbose_name=_('permissions'), blank=True)
objects = GroupManager()
class Meta:
verbose_name = _('group')
verbose_name_plural = _('groups')
def __str__(self):
return self.name
def natural_key(self):
return (self.name,)
class BaseUserManager(models.Manager):
@classmethod
def normalize_email(cls, email):
"""
Normalize the address by lowercasing the domain part of the email
address.
"""
email = email or ''
try:
email_name, domain_part = email.strip().rsplit('@', 1)
except ValueError:
pass
else:
email = '@'.join([email_name, domain_part.lower()])
return email
def make_random_password(self, length=10,
allowed_chars='abcdefghjkmnpqrstuvwxyz'
'ABCDEFGHJKLMNPQRSTUVWXYZ'
'23456789'):
"""
Generates a random password with the given length and given
allowed_chars. Note that the default value of allowed_chars does not
have "I" or "O" or letters and digits that look similar -- just to
avoid confusion.
"""
return get_random_string(length, allowed_chars)
def get_by_natural_key(self, username):
return self.get(**{self.model.USERNAME_FIELD: username})
class UserManager(BaseUserManager):
def _create_user(self, username, email, password,
is_staff, is_superuser, **extra_fields):
"""
Creates and saves a User with the given username, email and password.
"""
now = timezone.now()
if not username:
raise ValueError('The given username must be set')
email = self.normalize_email(email)
user = self.model(username=username, email=email,
is_staff=is_staff, is_active=True,
is_superuser=is_superuser,
date_joined=now, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, email=None, password=None, **extra_fields):
return self._create_user(username, email, password, False, False,
**extra_fields)
def create_superuser(self, username, email, password, **extra_fields):
return self._create_user(username, email, password, True, True,
**extra_fields)
@python_2_unicode_compatible
class AbstractBaseUser(models.Model):
password = models.CharField(_('password'), max_length=128)
last_login = models.DateTimeField(_('last login'), blank=True, null=True)
is_active = True
REQUIRED_FIELDS = []
class Meta:
abstract = True
def get_username(self):
"Return the identifying username for this User"
return getattr(self, self.USERNAME_FIELD)
def __str__(self):
return self.get_username()
def natural_key(self):
return (self.get_username(),)
def is_anonymous(self):
"""
Always returns False. This is a way of comparing User objects to
anonymous users.
"""
return False
def is_authenticated(self):
"""
Always return True. This is a way to tell if the user has been
authenticated in templates.
"""
return True
def set_password(self, raw_password):
self.password = make_password(raw_password)
def check_password(self, raw_password):
"""
Returns a boolean of whether the raw_password was correct. Handles
hashing formats behind the scenes.
"""
def setter(raw_password):
self.set_password(raw_password)
self.save(update_fields=["password"])
return check_password(raw_password, self.password, setter)
def set_unusable_password(self):
# Sets a value that will never be a valid hash
self.password = make_password(None)
def has_usable_password(self):
return is_password_usable(self.password)
def get_full_name(self):
raise NotImplementedError('subclasses of AbstractBaseUser must provide a get_full_name() method')
def get_short_name(self):
raise NotImplementedError('subclasses of AbstractBaseUser must provide a get_short_name() method.')
def get_session_auth_hash(self):
"""
Returns an HMAC of the password field.
"""
key_salt = "django.contrib.auth.models.AbstractBaseUser.get_session_auth_hash"
return salted_hmac(key_salt, self.password).hexdigest()
# A few helper functions for common logic between User and AnonymousUser.
def _user_get_all_permissions(user, obj):
permissions = set()
for backend in auth.get_backends():
if hasattr(backend, "get_all_permissions"):
permissions.update(backend.get_all_permissions(user, obj))
return permissions
def _user_has_perm(user, perm, obj):
"""
A backend can raise `PermissionDenied` to short-circuit permission checking.
"""
for backend in auth.get_backends():
if not hasattr(backend, 'has_perm'):
continue
try:
if backend.has_perm(user, perm, obj):
return True
except PermissionDenied:
return False
return False
def _user_has_module_perms(user, app_label):
"""
A backend can raise `PermissionDenied` to short-circuit permission checking.
"""
for backend in auth.get_backends():
if not hasattr(backend, 'has_module_perms'):
continue
try:
if backend.has_module_perms(user, app_label):
return True
except PermissionDenied:
return False
return False
class PermissionsMixin(models.Model):
"""
A mixin class that adds the fields and methods necessary to support
Django's Group and Permission model using the ModelBackend.
"""
is_superuser = models.BooleanField(_('superuser status'), default=False,
help_text=_('Designates that this user has all permissions without '
'explicitly assigning them.'))
groups = models.ManyToManyField(Group, verbose_name=_('groups'),
blank=True, help_text=_('The groups this user belongs to. A user will '
'get all permissions granted to each of '
'their groups.'),
related_name="user_set", related_query_name="user")
user_permissions = models.ManyToManyField(Permission,
verbose_name=_('user permissions'), blank=True,
help_text=_('Specific permissions for this user.'),
related_name="user_set", related_query_name="user")
class Meta:
abstract = True
def get_group_permissions(self, obj=None):
"""
Returns a list of permission strings that this user has through their
groups. This method queries all available auth backends. If an object
is passed in, only permissions matching this object are returned.
"""
permissions = set()
for backend in auth.get_backends():
if hasattr(backend, "get_group_permissions"):
permissions.update(backend.get_group_permissions(self, obj))
return permissions
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj)
def has_perm(self, perm, obj=None):
"""
Returns True if the user has the specified permission. This method
queries all available auth backends, but returns immediately if any
backend returns True. Thus, a user who has permission from a single
auth backend is assumed to have permission in general. If an object is
provided, permissions for this specific object are checked.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
# Otherwise we need to check the backends.
return _user_has_perm(self, perm, obj)
def has_perms(self, perm_list, obj=None):
"""
Returns True if the user has each of the specified permissions. If
object is passed, it checks if the user has all required perms for this
object.
"""
for perm in perm_list:
if not self.has_perm(perm, obj):
return False
return True
def has_module_perms(self, app_label):
"""
Returns True if the user has any permissions in the given app label.
Uses pretty much the same logic as has_perm, above.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
return _user_has_module_perms(self, app_label)
class AbstractUser(AbstractBaseUser, PermissionsMixin):
"""
An abstract base class implementing a fully featured User model with
admin-compliant permissions.
Username, password and email are required. Other fields are optional.
"""
username = models.CharField(_('username'), max_length=30, unique=True,
help_text=_('Required. 30 characters or fewer. Letters, digits and '
'@/./+/-/_ only.'),
validators=[
validators.RegexValidator(r'^[\w.@+-]+$',
_('Enter a valid username. '
'This value may contain only letters, numbers '
'and @/./+/-/_ characters.'), 'invalid'),
],
error_messages={
'unique': _("A user with that username already exists."),
})
first_name = models.CharField(_('first name'), max_length=30, blank=True)
last_name = models.CharField(_('last name'), max_length=30, blank=True)
email = models.EmailField(_('email address'), blank=True)
is_staff = models.BooleanField(_('staff status'), default=False,
help_text=_('Designates whether the user can log into this admin '
'site.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('Designates whether this user should be treated as '
'active. Unselect this instead of deleting accounts.'))
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
objects = UserManager()
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
abstract = True
def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"Returns the short name for the user."
return self.first_name
def email_user(self, subject, message, from_email=None, **kwargs):
"""
Sends an email to this User.
"""
send_mail(subject, message, from_email, [self.email], **kwargs)
class User(AbstractUser):
"""
Users within the Django authentication system are represented by this
model.
Username, password and email are required. Other fields are optional.
"""
class Meta(AbstractUser.Meta):
swappable = 'AUTH_USER_MODEL'
@python_2_unicode_compatible
class AnonymousUser(object):
id = None
pk = None
username = ''
is_staff = False
is_active = False
is_superuser = False
_groups = EmptyManager(Group)
_user_permissions = EmptyManager(Permission)
def __init__(self):
pass
def __str__(self):
return 'AnonymousUser'
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return 1 # instances always return the same hash value
def save(self):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def delete(self):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def set_password(self, raw_password):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def check_password(self, raw_password):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def _get_groups(self):
return self._groups
groups = property(_get_groups)
def _get_user_permissions(self):
return self._user_permissions
user_permissions = property(_get_user_permissions)
def get_group_permissions(self, obj=None):
return set()
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj=obj)
def has_perm(self, perm, obj=None):
return _user_has_perm(self, perm, obj=obj)
def has_perms(self, perm_list, obj=None):
for perm in perm_list:
if not self.has_perm(perm, obj):
return False
return True
def has_module_perms(self, module):
return _user_has_module_perms(self, module)
def is_anonymous(self):
return True
def is_authenticated(self):
return False
| pwmarcz/django | django/contrib/auth/models.py | Python | bsd-3-clause | 17,843 |
from __future__ import absolute_import, unicode_literals
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from wagtail.wagtailadmin.forms import PageViewRestrictionForm
from wagtail.wagtailadmin.modal_workflow import render_modal_workflow
from wagtail.wagtailcore.models import Page, PageViewRestriction
def set_privacy(request, page_id):
page = get_object_or_404(Page, id=page_id)
page_perms = page.permissions_for_user(request.user)
if not page_perms.can_set_view_restrictions():
raise PermissionDenied
# fetch restriction records in depth order so that ancestors appear first
restrictions = page.get_view_restrictions().order_by('page__depth')
if restrictions:
restriction = restrictions[0]
restriction_exists_on_ancestor = (restriction.page != page)
else:
restriction = None
restriction_exists_on_ancestor = False
if request.method == 'POST':
form = PageViewRestrictionForm(request.POST, instance=restriction)
if form.is_valid() and not restriction_exists_on_ancestor:
if form.cleaned_data['restriction_type'] == PageViewRestriction.NONE:
# remove any existing restriction
if restriction:
restriction.delete()
else:
restriction = form.save(commit=False)
restriction.page = page
form.save()
return render_modal_workflow(
request, None, 'wagtailadmin/page_privacy/set_privacy_done.js', {
'is_public': (form.cleaned_data['restriction_type'] == 'none')
}
)
else: # request is a GET
if not restriction_exists_on_ancestor:
if restriction:
form = PageViewRestrictionForm(instance=restriction)
else:
# no current view restrictions on this page
form = PageViewRestrictionForm(initial={
'restriction_type': 'none'
})
if restriction_exists_on_ancestor:
# display a message indicating that there is a restriction at ancestor level -
# do not provide the form for setting up new restrictions
return render_modal_workflow(
request, 'wagtailadmin/page_privacy/ancestor_privacy.html', None,
{
'page_with_restriction': restriction.page,
}
)
else:
# no restriction set at ancestor level - can set restrictions here
return render_modal_workflow(
request,
'wagtailadmin/page_privacy/set_privacy.html',
'wagtailadmin/page_privacy/set_privacy.js', {
'page': page,
'form': form,
}
)
| chrxr/wagtail | wagtail/wagtailadmin/views/page_privacy.py | Python | bsd-3-clause | 2,828 |
# -*- coding: utf-8 -*-
"""API Request cache tests."""
#
# (C) Pywikibot team, 2012-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'
#
from pywikibot.site import BaseSite
import scripts.maintenance.cache as cache
from tests import _cache_dir
from tests.aspects import unittest, TestCase
class RequestCacheTests(TestCase):
"""Validate cache entries."""
net = False
def _check_cache_entry(self, entry):
"""Assert validity of the cache entry."""
self.assertIsInstance(entry.site, BaseSite)
self.assertIsInstance(entry.site._loginstatus, int)
self.assertIsInstance(entry.site._username, list)
if entry.site._loginstatus >= 1:
self.assertIsNotNone(entry.site._username[0])
self.assertIsInstance(entry._params, dict)
self.assertIsNotNone(entry._params)
# TODO: more tests on entry._params, and possibly fixes needed
# to make it closely replicate the original object.
def test_cache(self):
"""Test the apicache by doing _check_cache_entry over each entry."""
cache.process_entries(_cache_dir, self._check_cache_entry)
if __name__ == '__main__':
unittest.main()
| valhallasw/pywikibot-core | tests/cache_tests.py | Python | mit | 1,258 |
from __future__ import print_function
import sys
def func():
print('{0}.{1}'.format(*sys.version_info[:2]))
print(repr(sys.argv[1:]))
print('Hello World')
return 0
| Teino1978-Corp/pre-commit | testing/resources/python3_hooks_repo/python3_hook/main.py | Python | mit | 183 |
# Image Patches Differential Optical Flow Rotation/Scale
#
# This example shows off using your OpenMV Cam to measure
# rotation/scale by comparing the current and the previous
# image against each other. Note that only rotation/scale is
# handled - not X and Y translation in this mode.
#
# However, this examples goes beyond doing optical flow on the whole
# image at once. Instead it breaks up the process by working on groups
# of pixels in the image. This gives you a "new" image of results.
#
# NOTE that surfaces need to have some type of "edge" on them for the
# algorithm to work. A featureless surface produces crazy results.
# NOTE: Unless you have a very nice test rig this example is hard to see usefulness of...
BLOCK_W = 16 # pow2
BLOCK_H = 16 # pow2
# To run this demo effectively please mount your OpenMV Cam on a steady
# base and SLOWLY rotate the camera around the lens and move the camera
# forward/backwards to see the numbers change.
# I.e. Z direction changes only.
import sensor, image, time, math
# NOTE!!! You have to use a small power of 2 resolution when using
# find_displacement(). This is because the algorithm is powered by
# something called phase correlation which does the image comparison
# using FFTs. A non-power of 2 resolution requires padding to a power
# of 2 which reduces the usefulness of the algorithm results. Please
# use a resolution like B128X128 or B128X64 (2x faster).
# Your OpenMV Cam supports power of 2 resolutions of 64x32, 64x64,
# 128x64, and 128x128. If you want a resolution of 32x32 you can create
# it by doing "img.pool(2, 2)" on a 64x64 image.
sensor.reset() # Reset and initialize the sensor.
sensor.set_pixformat(sensor.GRAYSCALE) # Set pixel format to GRAYSCALE (or RGB565)
sensor.set_framesize(sensor.B128X128) # Set frame size to 128x128... (or 128x64)...
sensor.skip_frames(time = 2000) # Wait for settings take effect.
clock = time.clock() # Create a clock object to track the FPS.
# Take from the main frame buffer's RAM to allocate a second frame buffer.
# There's a lot more RAM in the frame buffer than in the MicroPython heap.
# However, after doing this you have a lot less RAM for some algorithms...
# So, be aware that it's a lot easier to get out of RAM issues now.
extra_fb = sensor.alloc_extra_fb(sensor.width(), sensor.height(), sensor.GRAYSCALE)
extra_fb.replace(sensor.snapshot())
while(True):
clock.tick() # Track elapsed milliseconds between snapshots().
img = sensor.snapshot() # Take a picture and return the image.
for y in range(0, sensor.height(), BLOCK_H):
for x in range(0, sensor.width(), BLOCK_W):
displacement = extra_fb.find_displacement(img, logpolar=True, \
roi = (x, y, BLOCK_W, BLOCK_H), template_roi = (x, y, BLOCK_W, BLOCK_H))
# Below 0.1 or so (YMMV) and the results are just noise.
if(displacement.response() > 0.1):
rotation_change = displacement.rotation()
zoom_amount = 1.0 + displacement.scale()
pixel_x = x + (BLOCK_W//2) + int(math.sin(rotation_change) * zoom_amount * (BLOCK_W//4))
pixel_y = y + (BLOCK_H//2) + int(math.cos(rotation_change) * zoom_amount * (BLOCK_H//4))
img.draw_line((x + BLOCK_W//2, y + BLOCK_H//2, pixel_x, pixel_y), \
color = 255)
else:
img.draw_line((x + BLOCK_W//2, y + BLOCK_H//2, x + BLOCK_W//2, y + BLOCK_H//2), \
color = 0)
extra_fb.replace(img)
print(clock.fps())
| openmv/openmv | scripts/examples/OpenMV/22-Optical-Flow/image-patches-differential-rotation-scale.py | Python | mit | 3,596 |
"""
categories: Types,bytes
description: Bytes subscr with step != 1 not implemented
cause: Unknown
workaround: Unknown
"""
print(b'123'[0:3:2])
| cwyark/micropython | tests/cpydiff/types_bytes_subscrstep.py | Python | mit | 145 |
## This file is part of Invenio.
## Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio Access Control Config. """
__revision__ = \
"$Id$"
# pylint: disable=C0301
from invenio import config
from invenio.config import CFG_SITE_NAME, CFG_SITE_URL, CFG_SITE_LANG, \
CFG_SITE_SECURE_URL, CFG_SITE_SUPPORT_EMAIL, CFG_CERN_SITE, \
CFG_OPENAIRE_SITE, CFG_SITE_RECORD, CFG_INSPIRE_SITE, \
CFG_SITE_ADMIN_EMAIL
from invenio.messages import gettext_set_language
class InvenioWebAccessFireroleError(Exception):
"""Just an Exception to discover if it's a FireRole problem"""
pass
# VALUES TO BE EXPORTED
# CURRENTLY USED BY THE FILES access_control_engine.py access_control_admin.py webaccessadmin_lib.py
# name of the role giving superadmin rights
SUPERADMINROLE = 'superadmin'
# name of the webaccess webadmin role
WEBACCESSADMINROLE = 'webaccessadmin'
# name of the action allowing roles to access the web administrator interface
WEBACCESSACTION = 'cfgwebaccess'
# name of the action allowing roles to access the web administrator interface
VIEWRESTRCOLL = 'viewrestrcoll'
# name of the action allowing roles to delegate the rights to other roles
# ex: libraryadmin to delegate libraryworker
DELEGATEADDUSERROLE = 'accdelegaterole'
# max number of users to display in the drop down selects
MAXSELECTUSERS = 25
# max number of users to display in a page (mainly for user area)
MAXPAGEUSERS = 25
# default role definition, source:
CFG_ACC_EMPTY_ROLE_DEFINITION_SRC = 'deny all'
# default role definition, compiled:
CFG_ACC_EMPTY_ROLE_DEFINITION_OBJ = (False, ())
# default role definition, compiled and serialized:
CFG_ACC_EMPTY_ROLE_DEFINITION_SER = None
# List of tags containing (multiple) emails of users who should authorize
# to access the corresponding record regardless of collection restrictions.
if CFG_CERN_SITE:
CFG_ACC_GRANT_AUTHOR_RIGHTS_TO_EMAILS_IN_TAGS = ['859__f', '270__m']
else:
CFG_ACC_GRANT_AUTHOR_RIGHTS_TO_EMAILS_IN_TAGS = ['8560_f']
if CFG_CERN_SITE:
CFG_ACC_GRANT_VIEWER_RIGHTS_TO_EMAILS_IN_TAGS = ['506__m']
else:
CFG_ACC_GRANT_VIEWER_RIGHTS_TO_EMAILS_IN_TAGS = []
# Use external source for access control?
# CFG_EXTERNAL_AUTHENTICATION -- this is a dictionary with the enabled login method.
# The key is the name of the login method and the value is an instance of
# of the login method (see /help/admin/webaccess-admin-guide#5). Set the value
# to None if you wish to use the local Invenio authentication method.
# CFG_EXTERNAL_AUTH_DEFAULT -- set this to the key in CFG_EXTERNAL_AUTHENTICATION
# that should be considered as default login method
# CFG_EXTERNAL_AUTH_USING_SSO -- set this to the login method name of an SSO
# login method, if any, otherwise set this to None.
# CFG_EXTERNAL_AUTH_LOGOUT_SSO -- if CFG_EXTERNAL_AUTH_USING_SSO was not None
# set this to the URL that should be contacted to perform an SSO logout
from invenio.external_authentication_robot import ExternalAuthRobot
if CFG_CERN_SITE:
from invenio import external_authentication_sso as ea_sso
CFG_EXTERNAL_AUTH_USING_SSO = "CERN"
CFG_EXTERNAL_AUTH_DEFAULT = CFG_EXTERNAL_AUTH_USING_SSO
CFG_EXTERNAL_AUTH_LOGOUT_SSO = 'https://login.cern.ch/adfs/ls/?wa=wsignout1.0'
CFG_EXTERNAL_AUTHENTICATION = {
CFG_EXTERNAL_AUTH_USING_SSO : ea_sso.ExternalAuthSSO(),
}
elif CFG_OPENAIRE_SITE:
CFG_EXTERNAL_AUTH_DEFAULT = 'Local'
CFG_EXTERNAL_AUTH_USING_SSO = False
CFG_EXTERNAL_AUTH_LOGOUT_SSO = None
CFG_EXTERNAL_AUTHENTICATION = {
"Local": None,
"OpenAIRE": ExternalAuthRobot(enforce_external_nicknames=True, use_zlib=False, external_id_attribute_name="id"),
}
elif CFG_INSPIRE_SITE:
# INSPIRE specific robot configuration
CFG_EXTERNAL_AUTH_DEFAULT = 'Local'
CFG_EXTERNAL_AUTH_USING_SSO = False
CFG_EXTERNAL_AUTH_LOGOUT_SSO = None
CFG_EXTERNAL_AUTHENTICATION = {
"Local": None,
"Robot": ExternalAuthRobot(enforce_external_nicknames=True, use_zlib=False, check_user_ip=2, external_id_attribute_name='personid'),
"ZRobot": ExternalAuthRobot(enforce_external_nicknames=True, use_zlib=True, check_user_ip=2, external_id_attribute_name='personid')
}
else:
CFG_EXTERNAL_AUTH_DEFAULT = 'Local'
CFG_EXTERNAL_AUTH_USING_SSO = False
CFG_EXTERNAL_AUTH_LOGOUT_SSO = None
CFG_EXTERNAL_AUTHENTICATION = {
"Local": None,
"Robot": ExternalAuthRobot(enforce_external_nicknames=True, use_zlib=False),
"ZRobot": ExternalAuthRobot(enforce_external_nicknames=True, use_zlib=True)
}
# CFG_TEMP_EMAIL_ADDRESS
# Temporary email address for logging in with an OpenID/OAuth provider which
# doesn't supply email address
CFG_TEMP_EMAIL_ADDRESS = "%s@NOEMAIL"
# CFG_OPENID_PROVIDERS
# CFG_OAUTH1_PROVIDERS
# CFG_OAUTH2_PROVIDERS
# Choose which providers you want to use. Some providers don't supply e mail
# address, if you choose them, the users will be registered with an temporary
# email address like CFG_TEMP_EMAIL_ADDRESS % randomstring
#
# Order of the login buttons can be changed by CFG_EXTERNAL_LOGIN_BUTTON_ORDER
# in invenio.websession_config
CFG_OPENID_PROVIDERS = [
'google',
'yahoo',
'aol',
'wordpress',
'myvidoop',
'openid',
'verisign',
'myopenid',
'myspace',
'livejournal',
'blogger'
]
CFG_OAUTH1_PROVIDERS = [
'twitter',
'linkedin',
'flickr'
]
CFG_OAUTH2_PROVIDERS = [
'facebook',
'yammer',
'foursquare',
'googleoauth2',
'instagram',
'orcid'
]
# CFG_OPENID_CONFIGURATIONS
# identifier: (required) identifier url. {0} will be replaced by username (an
# input).
# trust_email: (optional, default: False) Some providers let their users
# change their emails on login page. If the provider doesn't let the user,
# set it True.
CFG_OPENID_CONFIGURATIONS = {
'openid': {
'identifier': '{0}'
},
'myvidoop': {
'identifier': '{0}.myvidoop.com'
},
'google': {
'identifier': 'https://www.google.com/accounts/o8/id',
'trust_email': True
},
'wordpress': {
'identifier': '{0}.wordpress.com'
},
'aol': {
'identifier': 'openid.aol.com/{0}',
'trust_email': True
},
'myopenid': {
'identifier': '{0}.myopenid.com'
},
'yahoo': {
'identifier': 'yahoo.com',
'trust_email': True
},
'verisign': {
'identifier': '{0}.pip.verisignlabs.com'
},
'myspace': {
'identifier': 'www.myspace.com/{0}'
},
'livejournal': {
'identifier': '{0}.livejournal.com'
},
'blogger': {
'identifier': '{0}'
}
}
# CFG_OAUTH1_CONFIGURATIONS
#
# !!IMPORTANT!!
# While creating an app in the provider site, the callback uri (redirect uri)
# must be in the form of :
# CFG_SITE_SECURE_URL/youraccount/login?login_method=oauth1&provider=PROVIDERNAME
#
# consumer_key: required
# Consumer key taken from provider.
#
# consumer_secret: required
# Consumer secret taken from provider.
#
# authorize_url: required
# The url to redirect the user for authorization
#
# authorize_parameters: optional
# Additional parameters for authorize_url (ie. scope)
#
# request_token_url: required
# The url to get request token
#
# access_token_url: required
# The url to exchange the request token with the access token
#
# request_url: optional
# The url to gather the user information
#
# request_parameters: optional
# Additional parameters for request_url
#
# email, nickname: optional
# id: required
# The location where these properties in the response returned from the
# provider.
# example:
# if the response is:
# {
# 'user': {
# 'user_name': 'ABC',
# 'contact': [
# {
# 'email': 'abc@def.com'
# }
# ]
# },
# 'user_id': 'XXX',
# }
# then:
# email must be : ['user', 'contact', 0, 'email']
# id must be: ['user_id']
# nickname must be: ['user', 'user_name']
#
# debug: optional
# When debug key is set to 1, after login process, the json object
# returned from provider is displayed on the screen. It may be used
# for finding where the id, email or nickname is.
CFG_OAUTH1_CONFIGURATIONS = {
'twitter': {
'consumer_key' : '',
'consumer_secret' : '',
'request_token_url' : 'https://api.twitter.com/oauth/request_token',
'access_token_url' : 'https://api.twitter.com/oauth/access_token',
'authorize_url' : 'https://api.twitter.com/oauth/authorize',
'id': ['user_id'],
'nickname': ['screen_name']
},
'flickr': {
'consumer_key' : '',
'consumer_secret' : '',
'request_token_url' : 'http://www.flickr.com/services/oauth/request_token',
'access_token_url' : 'http://www.flickr.com/services/oauth/access_token',
'authorize_url' : 'http://www.flickr.com/services/oauth/authorize',
'authorize_parameters': {
'perms': 'read'
},
'nickname': ['username'],
'id': ['user_nsid']
},
'linkedin': {
'consumer_key' : '',
'consumer_secret' : '',
'request_token_url' : 'https://api.linkedin.com/uas/oauth/requestToken',
'access_token_url' : 'https://api.linkedin.com/uas/oauth/accessToken',
'authorize_url' : 'https://www.linkedin.com/uas/oauth/authorize',
'request_url': 'http://api.linkedin.com/v1/people/~:(id)',
'request_parameters': {
'format': 'json'
},
'id': ['id']
}
}
# CFG_OAUTH2_CONFIGURATIONS
#
# !!IMPORTANT!!
# While creating an app in the provider site, the callback uri (redirect uri)
# must be in the form of :
# CFG_SITE_SECURE_URL/youraccount/login?login_method=oauth2&provider=PROVIDERNAME
#
# consumer_key: required
# Consumer key taken from provider.
#
# consumer_secret: required
# Consumer secret taken from provider.
#
# authorize_url: required
# The url to redirect the user for authorization
#
# authorize_parameters:
# Additional parameters for authorize_url (like scope)
#
# access_token_url: required
# The url to get the access token.
#
# request_url: required
# The url to gather the user information.
# {access_token} will be replaced by access token
#
# email, nickname: optional
# id: required
# The location where these properties in the response returned from the
# provider.
# !! See the example in CFG_OAUTH1_CONFIGURATIONS !!
#
# debug: optional
# When debug key is set to 1, after login process, the json object
# returned from provider is displayed on the screen. It may be used
# for finding where the id, email or nickname is.
CFG_OAUTH2_CONFIGURATIONS = {
'facebook': {
'consumer_key': '118319526393',
'consumer_secret': '8d675eb0ef89f2f8fbbe4ee56ab473c6',
'access_token_url': 'https://graph.facebook.com/oauth/access_token',
'authorize_url': 'https://www.facebook.com/dialog/oauth',
'authorize_parameters': {
'scope': 'email'
},
'request_url' : 'https://graph.facebook.com/me?access_token={access_token}',
'email': ['email'],
'id': ['id'],
'nickname': ['username']
},
'foursquare': {
'consumer_key': '',
'consumer_secret': '',
'access_token_url': 'https://foursquare.com/oauth2/access_token',
'authorize_url': 'https://foursquare.com/oauth2/authorize',
'request_url': 'https://api.foursquare.com/v2/users/self?oauth_token={access_token}',
'id': ['response', 'user', 'id'],
'email': ['response', 'user', 'contact' ,'email']
},
'yammer': {
'consumer_key': '',
'consumer_secret': '',
'access_token_url': 'https://www.yammer.com/oauth2/access_token.json',
'authorize_url': 'https://www.yammer.com/dialog/oauth',
'request_url': 'https://www.yammer.com/oauth2/access_token.json?access_token={access_token}',
'email':['user', 'contact', 'email_addresses', 0, 'address'],
'id': ['user', 'id'],
'nickname': ['user', 'name']
},
'googleoauth2': {
'consumer_key': '',
'consumer_secret': '',
'access_token_url': 'https://accounts.google.com/o/oauth2/token',
'authorize_url': 'https://accounts.google.com/o/oauth2/auth',
'authorize_parameters': {
'scope': 'https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email'
},
'request_url': 'https://www.googleapis.com/oauth2/v1/userinfo?access_token={access_token}',
'email':['email'],
'id': ['id']
},
'instagram': {
'consumer_key': '',
'consumer_secret': '',
'access_token_url': 'https://api.instagram.com/oauth/access_token',
'authorize_url': 'https://api.instagram.com/oauth/authorize/',
'authorize_parameters': {
'scope': 'basic'
},
'id': ['user', 'id'],
'nickname': ['user', 'username']
},
'orcid': {
'consumer_key': '',
'consumer_secret': '',
'authorize_url': 'http://sandbox-1.orcid.org/oauth/authorize',
'access_token_url': 'http://api.sandbox-1.orcid.org/oauth/token',
'request_url': 'http://api.sandbox-1.orcid.org/{id}/orcid-profile',
'authorize_parameters': {
'scope': '/orcid-profile/read-limited',
'response_type': 'code',
'access_type': 'offline',
},
'id': ['orcid'],
}
}
## Let's override OpenID/OAuth1/OAuth2 configuration from invenio(-local).conf
CFG_OPENID_PROVIDERS = config.CFG_OPENID_PROVIDERS
CFG_OAUTH1_PROVIDERS = config.CFG_OAUTH1_PROVIDERS
CFG_OAUTH2_PROVIDERS = config.CFG_OAUTH2_PROVIDERS
if config.CFG_OPENID_CONFIGURATIONS:
for provider, configuration in config.CFG_OPENID_CONFIGURATIONS.items():
if provider in CFG_OPENID_CONFIGURATIONS:
CFG_OPENID_CONFIGURATIONS[provider].update(configuration)
else:
CFG_OPENID_CONFIGURATIONS[provider] = configuration
if config.CFG_OAUTH1_CONFIGURATIONS:
for provider, configuration in config.CFG_OAUTH1_CONFIGURATIONS.items():
if provider in CFG_OAUTH1_CONFIGURATIONS:
CFG_OAUTH1_CONFIGURATIONS[provider].update(configuration)
else:
CFG_OAUTH1_CONFIGURATIONS[provider] = configuration
if config.CFG_OAUTH2_CONFIGURATIONS:
for provider, configuration in config.CFG_OAUTH2_CONFIGURATIONS.items():
if provider in CFG_OAUTH2_CONFIGURATIONS:
CFG_OAUTH2_CONFIGURATIONS[provider].update(configuration)
else:
CFG_OAUTH2_CONFIGURATIONS[provider] = configuration
# If OpenID authentication is enabled, add 'openid' to login methods
CFG_OPENID_AUTHENTICATION = bool(CFG_OPENID_PROVIDERS)
if CFG_OPENID_AUTHENTICATION:
from invenio.external_authentication_openid import ExternalOpenID
CFG_EXTERNAL_AUTHENTICATION['openid'] = ExternalOpenID(enforce_external_nicknames=True)
# If OAuth1 authentication is enabled, add 'oauth1' to login methods.
CFG_OAUTH1_AUTHENTICATION = bool(CFG_OAUTH1_PROVIDERS)
if CFG_OAUTH1_PROVIDERS:
from invenio.external_authentication_oauth1 import ExternalOAuth1
CFG_EXTERNAL_AUTHENTICATION['oauth1'] = ExternalOAuth1(enforce_external_nicknames=True)
# If OAuth2 authentication is enabled, add 'oauth2' to login methods.
CFG_OAUTH2_AUTHENTICATION = bool(CFG_OAUTH2_PROVIDERS)
if CFG_OAUTH2_AUTHENTICATION:
from invenio.external_authentication_oauth2 import ExternalOAuth2
CFG_EXTERNAL_AUTHENTICATION['oauth2'] = ExternalOAuth2(enforce_external_nicknames=True)
## If using SSO, this is the number of seconds after which the keep-alive
## SSO handler is pinged again to provide fresh SSO information.
CFG_EXTERNAL_AUTH_SSO_REFRESH = 600
# default data for the add_default_settings function
# Note: by default the definition is set to deny any. This won't be a problem
# because userid directly connected with roles will still be allowed.
# roles
# name description definition
DEF_ROLES = ((SUPERADMINROLE, 'superuser with all rights', 'deny any'),
(WEBACCESSADMINROLE, 'WebAccess administrator', 'deny any'),
('anyuser', 'Any user', 'allow any'),
('basketusers', 'Users who can use baskets', 'allow any'),
('loanusers', 'Users who can use loans', 'allow any'),
('groupusers', 'Users who can use groups', 'allow any'),
('alertusers', 'Users who can use alerts', 'allow any'),
('messageusers', 'Users who can use messages', 'allow any'),
('holdingsusers', 'Users who can view holdings', 'allow any'),
('statisticsusers', 'Users who can view statistics', 'allow any'),
('claimpaperusers', 'Users who can perform changes to their own paper attributions without the need for an operator\'s approval', 'allow any'),
('claimpaperoperators', 'Users who can perform changes to _all_ paper attributions without the need for an operator\'s approval', 'deny any'),
('paperclaimviewers', 'Users who can view "claim my paper" facilities.', 'allow all'),
('paperattributionviewers', 'Users who can view "attribute this paper" facilities', 'allow all'),
('paperattributionlinkviewers', 'Users who can see attribution links in the search', 'allow all'),
)
# Demo site roles
DEF_DEMO_ROLES = (('photocurator', 'Photo collection curator', 'deny any'),
('thesesviewer', 'Theses and Drafts viewer', 'allow group "Theses and Drafts viewers"'),
('ALEPHviewer', 'ALEPH viewer', 'allow group "ALEPH viewers"'),
('ISOLDEnotesviewer', 'ISOLDE Internal Notes viewer', 'allow group "ISOLDE Internal Notes viewers"'), ('thesescurator', 'Theses collection curator', 'deny any'),
('swordcurator', 'BibSword client curator', 'deny any'),
('referee_DEMOBOO_*', 'Book collection curator', 'deny any'),
('restrictedpicturesviewer', 'Restricted pictures viewer', 'deny any'),
('curator', 'Curator', 'deny any'),
('basketusers', 'Users who can use baskets', 'deny email "hyde@cds.cern.ch"\nallow any'),
('claimpaperusers', 'Users who can perform changes to their own paper attributions without the need for an operator\'s approval', 'deny email "hyde@cds.cern.ch"\nallow any'),
('submit_DEMOJRN_*', 'Users who can submit (and modify) "Atlantis Times" articles', 'deny all'),
('atlantiseditor', 'Users who can configure "Atlantis Times" journal', 'deny all'),
('commentmoderator', 'Users who can moderate comments', 'deny all'),
('poetrycommentreader', 'Users who can view comments in Poetry collection', 'deny all'))
DEF_DEMO_USER_ROLES = (('jekyll@cds.cern.ch', 'thesesviewer'),
('balthasar.montague@cds.cern.ch', 'ALEPHviewer'),
('dorian.gray@cds.cern.ch', 'ISOLDEnotesviewer'),
('jekyll@cds.cern.ch', 'swordcurator'),
('jekyll@cds.cern.ch', 'claimpaperusers'),
('dorian.gray@cds.cern.ch', 'referee_DEMOBOO_*'),
('balthasar.montague@cds.cern.ch', 'curator'),
('romeo.montague@cds.cern.ch', 'restrictedpicturesviewer'),
('romeo.montague@cds.cern.ch', 'swordcurator'),
('romeo.montague@cds.cern.ch', 'thesescurator'),
('juliet.capulet@cds.cern.ch', 'restrictedpicturesviewer'),
('juliet.capulet@cds.cern.ch', 'photocurator'),
('romeo.montague@cds.cern.ch', 'submit_DEMOJRN_*'),
('juliet.capulet@cds.cern.ch', 'submit_DEMOJRN_*'),
('balthasar.montague@cds.cern.ch', 'atlantiseditor'),
('romeo.montague@cds.cern.ch', 'poetrycommentreader'))
# users
# list of e-mail addresses
DEF_USERS = []
# actions
# name desc allowedkeywords optional
DEF_ACTIONS = (
('cfgwebsearch', 'configure WebSearch', '', 'no'),
('cfgbibformat', 'configure BibFormat', '', 'no'),
('cfgbibknowledge', 'configure BibKnowledge', '', 'no'),
('cfgwebsubmit', 'configure WebSubmit', '', 'no'),
('cfgbibrank', 'configure BibRank', '', 'no'),
('cfgwebcomment', 'configure WebComment', '', 'no'),
('cfgweblinkback', 'configure WebLinkback' , '', 'no'),
('cfgoaiharvest', 'configure OAI Harvest', '', 'no'),
('cfgoairepository', 'configure OAI Repository', '', 'no'),
('cfgbibindex', 'configure BibIndex', '', 'no'),
('cfgbibexport', 'configure BibExport', '', 'no'),
('cfgrobotkeys', 'configure Robot keys', 'login_method,robot', 'yes'),
('cfgbibsort', 'configure BibSort', '', 'no'),
('runbibindex', 'run BibIndex', '', 'no'),
('runbibupload', 'run BibUpload', '', 'no'),
('runwebcoll', 'run webcoll', 'collection', 'yes'),
('runbibformat', 'run BibFormat', 'format', 'yes'),
('runbibclassify', 'run BibClassify', 'taxonomy', 'yes'),
('runbibtaskex', 'run BibTaskEx example', '', 'no'),
('runbibrank', 'run BibRank', '', 'no'),
('runoaiharvest', 'run oaiharvest task', '', 'no'),
('runoairepository', 'run oairepositoryupdater task', '', 'no'),
('runbibedit', 'run Record Editor', 'collection', 'yes'),
('runbibeditmulti', 'run Multi-Record Editor', '', 'no'),
('runbibdocfile', 'run Document File Manager', '', 'no'),
('runbibmerge', 'run Record Merger', '', 'no'),
('runbibswordclient', 'run BibSword client', '', 'no'),
('runwebstatadmin', 'run WebStadAdmin', '', 'no'),
('runinveniogc', 'run InvenioGC', '', 'no'),
('runbibexport', 'run BibExport', '', 'no'),
('referee', 'referee document type doctype/category categ', 'doctype,categ', 'yes'),
('submit', 'use webSubmit', 'doctype,act,categ', 'yes'),
('viewrestrdoc', 'view restricted document', 'status', 'no'),
('viewrestrcomment', 'view restricted comment', 'status', 'no'),
(WEBACCESSACTION, 'configure WebAccess', '', 'no'),
(DELEGATEADDUSERROLE, 'delegate subroles inside WebAccess', 'role', 'no'),
(VIEWRESTRCOLL, 'view restricted collection', 'collection', 'no'),
('cfgwebjournal', 'configure WebJournal', 'name,with_editor_rights', 'no'),
('viewcomment', 'view comments', 'collection', 'no'),
('viewlinkbacks', 'view linkbacks', 'collection', 'no'),
('sendcomment', 'send comments', 'collection', 'no'),
('attachcommentfile', 'attach files to comments', 'collection', 'no'),
('attachsubmissionfile', 'upload files to drop box during submission', '', 'no'),
('cfgbibexport', 'configure BibExport', '', 'no'),
('runbibexport', 'run BibExport', '', 'no'),
('usebaskets', 'use baskets', '', 'no'),
('useloans', 'use loans', '', 'no'),
('usegroups', 'use groups', '', 'no'),
('usealerts', 'use alerts', '', 'no'),
('usemessages', 'use messages', '', 'no'),
('viewholdings', 'view holdings', 'collection', 'yes'),
('viewstatistics', 'view statistics', 'collection', 'yes'),
('runbibcirculation', 'run BibCirculation', '', 'no'),
('moderatecomments', 'moderate comments', 'collection', 'no'),
('moderatelinkbacks', 'moderate linkbacks', 'collection', 'no'),
('runbatchuploader', 'run batchuploader', 'collection', 'yes'),
('runbibtasklet', 'run BibTaskLet', '', 'no'),
('claimpaper_view_pid_universe', 'View the Claim Paper interface', '', 'no'),
('claimpaper_claim_own_papers', 'Clam papers to his own personID', '', 'no'),
('claimpaper_claim_others_papers', 'Claim papers for others', '', 'no'),
('claimpaper_change_own_data', 'Change data associated to his own person ID', '', 'no'),
('claimpaper_change_others_data', 'Change data of any person ID', '', 'no'),
('runbibtasklet', 'run BibTaskLet', '', 'no'),
('cfgbibsched', 'configure BibSched', '', 'no')
)
# Default authorizations
# role action arguments
DEF_AUTHS = (('basketusers', 'usebaskets', {}),
('loanusers', 'useloans', {}),
('groupusers', 'usegroups', {}),
('alertusers', 'usealerts', {}),
('messageusers', 'usemessages', {}),
('holdingsusers', 'viewholdings', {}),
('statisticsusers', 'viewstatistics', {}),
('claimpaperusers', 'claimpaper_view_pid_universe', {}),
('claimpaperoperators', 'claimpaper_view_pid_universe', {}),
('claimpaperusers', 'claimpaper_claim_own_papers', {}),
('claimpaperoperators', 'claimpaper_claim_own_papers', {}),
('claimpaperoperators', 'claimpaper_claim_others_papers', {}),
('claimpaperusers', 'claimpaper_change_own_data', {}),
('claimpaperoperators', 'claimpaper_change_own_data', {}),
('claimpaperoperators', 'claimpaper_change_others_data', {}),
)
# Demo site authorizations
# role action arguments
DEF_DEMO_AUTHS = (
('photocurator', 'runwebcoll', {'collection': 'Pictures'}),
('restrictedpicturesviewer', 'viewrestrdoc', {'status': 'restricted_picture'}),
('thesesviewer', VIEWRESTRCOLL, {'collection': 'Theses'}),
('thesesviewer', VIEWRESTRCOLL, {'collection': 'Drafts'}),
('ALEPHviewer', VIEWRESTRCOLL, {'collection': 'ALEPH Theses'}),
('ALEPHviewer', VIEWRESTRCOLL, {'collection': 'ALEPH Internal Notes'}),
('ISOLDEnotesviewer', VIEWRESTRCOLL, {'collection': 'ISOLDE Internal Notes'}),
('referee_DEMOBOO_*', 'referee', {'doctype': 'DEMOBOO', 'categ': '*'}),
('curator', 'cfgbibknowledge', {}),
('curator', 'runbibedit', {}),
('curator', 'runbibeditmulti', {}),
('curator', 'runbibmerge', {}),
('swordcurator', 'runbibswordclient', {}),
('thesescurator', 'runbibedit', {'collection': 'Theses'}),
('thesescurator', VIEWRESTRCOLL, {'collection': 'Theses'}),
('photocurator', 'runbibedit', {'collection': 'Pictures'}),
('referee_DEMOBOO_*', 'runbibedit', {'collection': 'Books'}),
('submit_DEMOJRN_*', 'submit', {'doctype': 'DEMOJRN', 'act': 'SBI', 'categ': '*'}),
('submit_DEMOJRN_*', 'submit', {'doctype': 'DEMOJRN', 'act': 'MBI', 'categ': '*'}),
('submit_DEMOJRN_*', 'cfgwebjournal', {'name': 'AtlantisTimes', 'with_editor_rights': 'no'}),
('atlantiseditor', 'cfgwebjournal', {'name': 'AtlantisTimes', 'with_editor_rights': 'yes'}),
('referee_DEMOBOO_*', 'runbatchuploader', {'collection': 'Books'}),
('poetrycommentreader', 'viewcomment', {'collection': 'Poetry'}),
('atlantiseditor', VIEWRESTRCOLL, {'collection': 'Atlantis Times Drafts'}),
('anyuser', 'submit', {'doctype': 'DEMOART', 'act': 'SBI', 'categ': 'ARTICLE'}),
)
_ = gettext_set_language(CFG_SITE_LANG)
# Activities (i.e. actions) for which exists an administrative web interface.
CFG_ACC_ACTIVITIES_URLS = {
'runbibedit' : (_("Run Record Editor"), "%s/%s/edit/?ln=%%s" % (CFG_SITE_URL, CFG_SITE_RECORD)),
'runbibeditmulti' : (_("Run Multi-Record Editor"), "%s/%s/multiedit/?ln=%%s" % (CFG_SITE_URL, CFG_SITE_RECORD)),
'runbibdocfile' : (_("Run Document File Manager"), "%s/%s/managedocfiles?ln=%%s" % (CFG_SITE_URL, CFG_SITE_RECORD)),
'runbibmerge' : (_("Run Record Merger"), "%s/%s/merge/?ln=%%s" % (CFG_SITE_URL, CFG_SITE_RECORD)),
'runbibswordclient' : (_("Run BibSword client"), "%s/bibsword/?ln=%%s" % CFG_SITE_URL),
'cfgbibknowledge' : (_("Configure BibKnowledge"), "%s/kb?ln=%%s" % CFG_SITE_URL),
'cfgbibformat' : (_("Configure BibFormat"), "%s/admin/bibformat/bibformatadmin.py?ln=%%s" % CFG_SITE_URL),
'cfgoaiharvest' : (_("Configure OAI Harvest"), "%s/admin/oaiharvest/oaiharvestadmin.py?ln=%%s" % CFG_SITE_URL),
'cfgoairepository' : (_("Configure OAI Repository"), "%s/admin/oairepository/oairepositoryadmin.py?ln=%%s" % CFG_SITE_URL),
'cfgbibindex' : (_("Configure BibIndex"), "%s/admin/bibindex/bibindexadmin.py?ln=%%s" % CFG_SITE_URL),
'cfgbibrank' : (_("Configure BibRank"), "%s/admin/bibrank/bibrankadmin.py?ln=%%s" % CFG_SITE_URL),
'cfgwebaccess' : (_("Configure WebAccess"), "%s/admin/webaccess/webaccessadmin.py?ln=%%s" % CFG_SITE_URL),
'cfgwebcomment' : (_("Configure WebComment"), "%s/admin/webcomment/webcommentadmin.py?ln=%%s" % CFG_SITE_URL),
'cfgweblinkback' : (_("Configure WebLinkback"), "%s/admin/weblinkback/weblinkbackadmin.py?ln=%%s" % CFG_SITE_URL),
'cfgwebsearch' : (_("Configure WebSearch"), "%s/admin/websearch/websearchadmin.py?ln=%%s" % CFG_SITE_URL),
'cfgwebsubmit' : (_("Configure WebSubmit"), "%s/admin/websubmit/websubmitadmin.py?ln=%%s" % CFG_SITE_URL),
'cfgwebjournal' : (_("Configure WebJournal"), "%s/admin/webjournal/webjournaladmin.py?ln=%%s" % CFG_SITE_URL),
'cfgbibsort' : (_("Configure BibSort"), "%s/admin/bibsort/bibsortadmin.py?ln=%%s" % CFG_SITE_URL),
'runbibcirculation' : (_("Run BibCirculation"), "%s/admin/bibcirculation/bibcirculationadmin.py?ln=%%s" % CFG_SITE_URL),
'runbatchuploader' : (_("Run Batch Uploader"), "%s/batchuploader/metadata?ln=%%s" % CFG_SITE_URL),
'claimpaper_claim_others_papers' : (_("Run Person/Author Manager"), "%s/person/search?ln=%%s" % CFG_SITE_URL)
}
CFG_WEBACCESS_MSGS = {
0: 'Try to <a href="%s/youraccount/login?referer=%%s">login</a> with another account.' % (CFG_SITE_SECURE_URL),
1: '<br />If you think this is not correct, please contact: <a href="mailto:%s">%s</a>' % (CFG_SITE_SUPPORT_EMAIL, CFG_SITE_SUPPORT_EMAIL),
2: '<br />If you have any questions, please write to <a href="mailto:%s">%s</a>' % (CFG_SITE_SUPPORT_EMAIL, CFG_SITE_SUPPORT_EMAIL),
3: 'Guest users are not allowed, please <a href="%s/youraccount/login">login</a>.' % CFG_SITE_SECURE_URL,
4: 'The site is temporarily closed for maintenance. Please come back soon.',
5: 'Authorization failure',
6: '%s temporarily closed' % CFG_SITE_NAME,
7: 'This functionality is temporarily closed due to server maintenance. Please use only the search engine in the meantime.',
8: 'Functionality temporarily closed'
}
CFG_WEBACCESS_WARNING_MSGS = {
0: 'Authorization granted',
1: 'You are not authorized to perform this action.',
2: 'You are not authorized to perform any action.',
3: 'The action %s does not exist.',
4: 'Unexpected error occurred.',
5: 'Missing mandatory keyword argument(s) for this action.',
6: 'Guest accounts are not authorized to perform this action.',
7: 'Not enough arguments, user ID and action name required.',
8: 'Incorrect keyword argument(s) for this action.',
9: """Account '%s' is not yet activated.""",
10: """You were not authorized by the authentication method '%s'.""",
11: """The selected login method '%s' is not the default method for this account, please try another one.""",
12: """Selected login method '%s' does not exist.""",
13: """Could not register '%s' account.""",
14: """Could not login using '%s', because this user is unknown.""",
15: """Could not login using your '%s' account, because you have introduced a wrong password.""",
16: """External authentication troubles using '%s' (maybe temporary network problems).""",
17: """You have not yet confirmed the email address for the '%s' authentication method.""",
18: """The administrator has not yet activated your account for the '%s' authentication method.""",
19: """The site is having troubles in sending you an email for confirming your email address. The error has been logged and will be taken care of as soon as possible.""",
20: """No roles are authorized to perform action %s with the given parameters.""",
21: """Verification cancelled""",
22: """Verification failed. Please try again or use another provider to login""",
23: """Verification failed. It is probably because the configuration isn't set properly. Please contact with the <a href="mailto:%s">administator</a>""" % CFG_SITE_ADMIN_EMAIL
}
#There are three status key that must be here: OK, REMOVED and REVOKED
#the value doesn't matter at all
CFG_WEB_API_KEY_STATUS = {
'OK':'OK',
'REMOVED':'REMOVED',
'REVOKED':'REVOKED',
'WARNING':'WARNING'
}
| AlbertoPeon/invenio | modules/webaccess/lib/access_control_config.py | Python | gpl-2.0 | 35,364 |
# (C) British Crown Copyright 2014 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for the `iris.quickplot.points` function."""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
from iris.tests.unit.plot import TestGraphicStringCoord
if tests.MPL_AVAILABLE:
import iris.quickplot as qplt
@tests.skip_plot
class TestStringCoordPlot(TestGraphicStringCoord):
def test_yaxis_labels(self):
qplt.points(self.cube, coords=('bar', 'str_coord'))
self.assertBoundsTickLabels('yaxis')
def test_xaxis_labels(self):
qplt.points(self.cube, coords=('str_coord', 'bar'))
self.assertBoundsTickLabels('xaxis')
if __name__ == "__main__":
tests.main()
| decvalts/iris | lib/iris/tests/unit/quickplot/test_points.py | Python | gpl-3.0 | 1,543 |
#
# Copyright 2006,2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, optfir
from gnuradio.blks2impl.fm_emph import fm_deemph
from math import pi
class fm_demod_cf(gr.hier_block2):
"""
Generalized FM demodulation block with deemphasis and audio
filtering.
This block demodulates a band-limited, complex down-converted FM
channel into the the original baseband signal, optionally applying
deemphasis. Low pass filtering is done on the resultant signal. It
produces an output float strem in the range of [-1.0, +1.0].
@param channel_rate: incoming sample rate of the FM baseband
@type sample_rate: integer
@param deviation: maximum FM deviation (default = 5000)
@type deviation: float
@param audio_decim: input to output decimation rate
@type audio_decim: integer
@param audio_pass: audio low pass filter passband frequency
@type audio_pass: float
@param audio_stop: audio low pass filter stop frequency
@type audio_stop: float
@param gain: gain applied to audio output (default = 1.0)
@type gain: float
@param tau: deemphasis time constant (default = 75e-6), specify 'None'
to prevent deemphasis
"""
def __init__(self, channel_rate, audio_decim, deviation,
audio_pass, audio_stop, gain=1.0, tau=75e-6):
gr.hier_block2.__init__(self, "fm_demod_cf",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature(1, 1, gr.sizeof_float)) # Output signature
k = channel_rate/(2*pi*deviation)
QUAD = gr.quadrature_demod_cf(k)
audio_taps = optfir.low_pass(gain, # Filter gain
channel_rate, # Sample rate
audio_pass, # Audio passband
audio_stop, # Audio stopband
0.1, # Passband ripple
60) # Stopband attenuation
LPF = gr.fir_filter_fff(audio_decim, audio_taps)
if tau is not None:
DEEMPH = fm_deemph(channel_rate, tau)
self.connect(self, QUAD, DEEMPH, LPF, self)
else:
self.connect(self, QUAD, LPF, self)
class demod_20k0f3e_cf(fm_demod_cf):
"""
NBFM demodulation block, 20 KHz channels
This block demodulates a complex, downconverted, narrowband FM
channel conforming to 20K0F3E emission standards, outputting
floats in the range [-1.0, +1.0].
@param sample_rate: incoming sample rate of the FM baseband
@type sample_rate: integer
@param audio_decim: input to output decimation rate
@type audio_decim: integer
"""
def __init__(self, channel_rate, audio_decim):
fm_demod_cf.__init__(self, channel_rate, audio_decim,
5000, # Deviation
3000, # Audio passband frequency
4500) # Audio stopband frequency
class demod_200kf3e_cf(fm_demod_cf):
"""
WFM demodulation block, mono.
This block demodulates a complex, downconverted, wideband FM
channel conforming to 200KF3E emission standards, outputting
floats in the range [-1.0, +1.0].
@param sample_rate: incoming sample rate of the FM baseband
@type sample_rate: integer
@param audio_decim: input to output decimation rate
@type audio_decim: integer
"""
def __init__(self, channel_rate, audio_decim):
fm_demod_cf.__init__(self, channel_rate, audio_decim,
75000, # Deviation
15000, # Audio passband
16000, # Audio stopband
20.0) # Audio gain
| n4hy/gnuradio | gnuradio-core/src/python/gnuradio/blks2impl/fm_demod.py | Python | gpl-3.0 | 4,236 |
# -*- coding: utf-8 -*-
# Copyright (c) 2016, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt, add_months, cint, nowdate, getdate
from frappe.model.document import Document
from erpnext.accounts.doctype.purchase_invoice.purchase_invoice import get_fixed_asset_account
from erpnext.accounts.doctype.asset.depreciation \
import get_disposal_account_and_cost_center, get_depreciation_accounts
class Asset(Document):
def validate(self):
self.status = self.get_status()
self.validate_item()
self.set_missing_values()
self.validate_asset_values()
self.make_depreciation_schedule()
self.set_accumulated_depreciation()
if self.get("schedules"):
self.validate_expected_value_after_useful_life()
# Validate depreciation related accounts
get_depreciation_accounts(self)
def on_submit(self):
self.set_status()
def on_cancel(self):
self.validate_cancellation()
self.delete_depreciation_entries()
self.set_status()
def validate_item(self):
item = frappe.db.get_value("Item", self.item_code,
["is_fixed_asset", "is_stock_item", "disabled"], as_dict=1)
if not item:
frappe.throw(_("Item {0} does not exist").format(self.item_code))
elif item.disabled:
frappe.throw(_("Item {0} has been disabled").format(self.item_code))
elif not item.is_fixed_asset:
frappe.throw(_("Item {0} must be a Fixed Asset Item").format(self.item_code))
elif item.is_stock_item:
frappe.throw(_("Item {0} must be a non-stock item").format(self.item_code))
def set_missing_values(self):
if self.item_code:
item_details = get_item_details(self.item_code)
for field, value in item_details.items():
if not self.get(field):
self.set(field, value)
self.value_after_depreciation = (flt(self.gross_purchase_amount) -
flt(self.opening_accumulated_depreciation))
def validate_asset_values(self):
if flt(self.expected_value_after_useful_life) >= flt(self.gross_purchase_amount):
frappe.throw(_("Expected Value After Useful Life must be less than Gross Purchase Amount"))
if not flt(self.gross_purchase_amount):
frappe.throw(_("Gross Purchase Amount is mandatory"), frappe.MandatoryError)
if not self.is_existing_asset:
self.opening_accumulated_depreciation = 0
self.number_of_depreciations_booked = 0
if not self.next_depreciation_date:
frappe.throw(_("Next Depreciation Date is mandatory for new asset"))
else:
depreciable_amount = flt(self.gross_purchase_amount) - flt(self.expected_value_after_useful_life)
if flt(self.opening_accumulated_depreciation) > depreciable_amount:
frappe.throw(_("Opening Accumulated Depreciation must be less than equal to {0}")
.format(depreciable_amount))
if self.opening_accumulated_depreciation:
if not self.number_of_depreciations_booked:
frappe.throw(_("Please set Number of Depreciations Booked"))
else:
self.number_of_depreciations_booked = 0
if cint(self.number_of_depreciations_booked) > cint(self.total_number_of_depreciations):
frappe.throw(_("Number of Depreciations Booked cannot be greater than Total Number of Depreciations"))
if self.next_depreciation_date and getdate(self.next_depreciation_date) < getdate(nowdate()):
frappe.msgprint(_("Next Depreciation Date is entered as past date"), title=_('Warning'), indicator='red')
if self.next_depreciation_date and getdate(self.next_depreciation_date) < getdate(self.purchase_date):
frappe.throw(_("Next Depreciation Date cannot be before Purchase Date"))
if (flt(self.value_after_depreciation) > flt(self.expected_value_after_useful_life)
and not self.next_depreciation_date):
frappe.throw(_("Please set Next Depreciation Date"))
def make_depreciation_schedule(self):
if self.depreciation_method != 'Manual':
self.schedules = []
if not self.get("schedules") and self.next_depreciation_date:
value_after_depreciation = flt(self.value_after_depreciation)
number_of_pending_depreciations = cint(self.total_number_of_depreciations) - \
cint(self.number_of_depreciations_booked)
if number_of_pending_depreciations:
for n in xrange(number_of_pending_depreciations):
schedule_date = add_months(self.next_depreciation_date,
n * cint(self.frequency_of_depreciation))
depreciation_amount = self.get_depreciation_amount(value_after_depreciation)
value_after_depreciation -= flt(depreciation_amount)
self.append("schedules", {
"schedule_date": schedule_date,
"depreciation_amount": depreciation_amount
})
def set_accumulated_depreciation(self):
accumulated_depreciation = flt(self.opening_accumulated_depreciation)
value_after_depreciation = flt(self.value_after_depreciation)
for i, d in enumerate(self.get("schedules")):
depreciation_amount = flt(d.depreciation_amount, d.precision("depreciation_amount"))
value_after_depreciation -= flt(depreciation_amount)
if i==len(self.get("schedules"))-1 and self.depreciation_method == "Straight Line":
depreciation_amount += flt(value_after_depreciation - flt(self.expected_value_after_useful_life),
d.precision("depreciation_amount"))
d.depreciation_amount = depreciation_amount
accumulated_depreciation += d.depreciation_amount
d.accumulated_depreciation_amount = flt(accumulated_depreciation, d.precision("accumulated_depreciation_amount"))
def get_depreciation_amount(self, depreciable_value):
if self.depreciation_method in ("Straight Line", "Manual"):
depreciation_amount = (flt(self.value_after_depreciation) -
flt(self.expected_value_after_useful_life)) / (cint(self.total_number_of_depreciations) -
cint(self.number_of_depreciations_booked))
else:
factor = 200.0 / self.total_number_of_depreciations
depreciation_amount = flt(depreciable_value * factor / 100, 0)
value_after_depreciation = flt(depreciable_value) - depreciation_amount
if value_after_depreciation < flt(self.expected_value_after_useful_life):
depreciation_amount = flt(depreciable_value) - flt(self.expected_value_after_useful_life)
return depreciation_amount
def validate_expected_value_after_useful_life(self):
accumulated_depreciation_after_full_schedule = \
max([d.accumulated_depreciation_amount for d in self.get("schedules")])
asset_value_after_full_schedule = (flt(self.gross_purchase_amount) -
flt(accumulated_depreciation_after_full_schedule))
if self.expected_value_after_useful_life < asset_value_after_full_schedule:
frappe.throw(_("Expected value after useful life must be greater than or equal to {0}")
.format(asset_value_after_full_schedule))
def validate_cancellation(self):
if self.status not in ("Submitted", "Partially Depreciated", "Fully Depreciated"):
frappe.throw(_("Asset cannot be cancelled, as it is already {0}").format(self.status))
if self.purchase_invoice:
frappe.throw(_("Please cancel Purchase Invoice {0} first").format(self.purchase_invoice))
def delete_depreciation_entries(self):
for d in self.get("schedules"):
if d.journal_entry:
frappe.get_doc("Journal Entry", d.journal_entry).cancel()
d.db_set("journal_entry", None)
self.db_set("value_after_depreciation",
(flt(self.gross_purchase_amount) - flt(self.opening_accumulated_depreciation)))
def set_status(self, status=None):
'''Get and update status'''
if not status:
status = self.get_status()
self.db_set("status", status)
def get_status(self):
'''Returns status based on whether it is draft, submitted, scrapped or depreciated'''
if self.docstatus == 0:
status = "Draft"
elif self.docstatus == 1:
status = "Submitted"
if self.journal_entry_for_scrap:
status = "Scrapped"
elif flt(self.value_after_depreciation) <= flt(self.expected_value_after_useful_life):
status = "Fully Depreciated"
elif flt(self.value_after_depreciation) < flt(self.gross_purchase_amount):
status = 'Partially Depreciated'
elif self.docstatus == 2:
status = "Cancelled"
return status
@frappe.whitelist()
def make_purchase_invoice(asset, item_code, gross_purchase_amount, company, posting_date):
pi = frappe.new_doc("Purchase Invoice")
pi.company = company
pi.currency = frappe.db.get_value("Company", company, "default_currency")
pi.set_posting_time = 1
pi.posting_date = posting_date
pi.append("items", {
"item_code": item_code,
"is_fixed_asset": 1,
"asset": asset,
"expense_account": get_fixed_asset_account(asset),
"qty": 1,
"price_list_rate": gross_purchase_amount,
"rate": gross_purchase_amount
})
pi.set_missing_values()
return pi
@frappe.whitelist()
def make_sales_invoice(asset, item_code, company):
si = frappe.new_doc("Sales Invoice")
si.company = company
si.currency = frappe.db.get_value("Company", company, "default_currency")
disposal_account, depreciation_cost_center = get_disposal_account_and_cost_center(company)
si.append("items", {
"item_code": item_code,
"is_fixed_asset": 1,
"asset": asset,
"income_account": disposal_account,
"cost_center": depreciation_cost_center,
"qty": 1
})
si.set_missing_values()
return si
@frappe.whitelist()
def transfer_asset(args):
import json
args = json.loads(args)
movement_entry = frappe.new_doc("Asset Movement")
movement_entry.update(args)
movement_entry.insert()
movement_entry.submit()
frappe.db.commit()
frappe.msgprint(_("Asset Movement record {0} created").format("<a href='#Form/Asset Movement/{0}'>{0}</a>".format(movement_entry.name)))
@frappe.whitelist()
def get_item_details(item_code):
asset_category = frappe.db.get_value("Item", item_code, "asset_category")
if not asset_category:
frappe.throw(_("Please enter Asset Category in Item {0}").format(item_code))
ret = frappe.db.get_value("Asset Category", asset_category,
["depreciation_method", "total_number_of_depreciations", "frequency_of_depreciation"], as_dict=1)
ret.update({
"asset_category": asset_category
})
return ret
| emakis/erpnext | erpnext/accounts/doctype/asset/asset.py | Python | gpl-3.0 | 10,030 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# This is a virtual module that is entirely implemented as an action plugin and runs on the controller
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: fetch
short_description: Fetch files from remote nodes
description:
- This module works like M(copy), but in reverse.
- It is used for fetching files from remote machines and storing them locally in a file tree, organized by hostname.
- Files that already exist at I(dest) will be overwritten if they are different than the I(src).
- This module is also supported for Windows targets.
version_added: '0.2'
options:
src:
description:
- The file on the remote system to fetch.
- This I(must) be a file, not a directory.
- Recursive fetching may be supported in a later release.
required: yes
dest:
description:
- A directory to save the file into.
- For example, if the I(dest) directory is C(/backup) a I(src) file named C(/etc/profile) on host
C(host.example.com), would be saved into C(/backup/host.example.com/etc/profile).
The host name is based on the inventory name.
required: yes
fail_on_missing:
version_added: '1.1'
description:
- When set to C(yes), the task will fail if the remote file cannot be read for any reason.
- Prior to Ansible 2.5, setting this would only fail if the source file was missing.
- The default was changed to C(yes) in Ansible 2.5.
type: bool
default: yes
validate_checksum:
version_added: '1.4'
description:
- Verify that the source and destination checksums match after the files are fetched.
type: bool
default: yes
flat:
version_added: '1.2'
description:
- Allows you to override the default behavior of appending hostname/path/to/file to the destination.
- If C(dest) ends with '/', it will use the basename of the source file, similar to the copy module.
- This can be useful if working with a single host, or if retrieving files that are uniquely named per host.
- If using multiple hosts with the same filename, the file will be overwritten for each host.
type: bool
default: no
notes:
- When running fetch with C(become), the M(slurp) module will also be
used to fetch the contents of the file for determining the remote
checksum. This effectively doubles the transfer size, and
depending on the file size can consume all available memory on the
remote or local hosts causing a C(MemoryError). Due to this it is
advisable to run this module without C(become) whenever possible.
- Prior to Ansible 2.5 this module would not fail if reading the remote
file was impossible unless C(fail_on_missing) was set.
- In Ansible 2.5 or later, playbook authors are encouraged to use
C(fail_when) or C(ignore_errors) to get this ability. They may
also explicitly set C(fail_on_missing) to C(no) to get the
non-failing behaviour.
- This module is also supported for Windows targets.
seealso:
- module: copy
- module: slurp
author:
- Ansible Core Team
- Michael DeHaan
'''
EXAMPLES = r'''
- name: Store file into /tmp/fetched/host.example.com/tmp/somefile
fetch:
src: /tmp/somefile
dest: /tmp/fetched
- name: Specifying a path directly
fetch:
src: /tmp/somefile
dest: /tmp/prefix-{{ inventory_hostname }}
flat: yes
- name: Specifying a destination path
fetch:
src: /tmp/uniquefile
dest: /tmp/special/
flat: yes
- name: Storing in a path relative to the playbook
fetch:
src: /tmp/uniquefile
dest: special/prefix-{{ inventory_hostname }}
flat: yes
'''
| indrajitr/ansible | lib/ansible/modules/fetch.py | Python | gpl-3.0 | 3,790 |
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
import smtplib
import sys
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from mo_logs import Log
from mo_dots import listwrap
from mo_dots import coalesce
from mo_kwargs import override
class Emailer:
@override
def __init__(
self,
from_address,
to_address,
host,
username,
password,
subject="catchy title",
port=465,
use_ssl=1,
kwargs=None
):
self.settings = kwargs
self.server = None
def __enter__(self):
if self.server is not None:
Log.error("Got a problem")
if self.settings.use_ssl:
self.server = smtplib.SMTP_SSL(self.settings.host, self.settings.port)
else:
self.server = smtplib.SMTP(self.settings.host, self.settings.port)
if self.settings.username and self.settings.password:
self.server.login(self.settings.username, self.settings.password)
return self
def __exit__(self, type, value, traceback):
try:
self.server.quit()
except Exception as e:
Log.warning("Problem with smtp server quit(), ignoring problem", e)
self.server = None
def send_email(self,
from_address=None,
to_address=None,
subject=None,
text_data=None,
html_data=None
):
"""Sends an email.
from_addr is an email address; to_addrs is a list of email adresses.
Addresses can be plain (e.g. "jsmith@example.com") or with real names
(e.g. "John Smith <jsmith@example.com>").
text_data and html_data are both strings. You can specify one or both.
If you specify both, the email will be sent as a MIME multipart
alternative, i.e., the recipient will see the HTML content if his
viewer supports it; otherwise he'll see the text content.
"""
settings = self.settings
from_address = coalesce(from_address, settings["from"], settings.from_address)
to_address = listwrap(coalesce(to_address, settings.to_address, settings.to_addrs))
if not from_address or not to_address:
raise Exception("Both from_addr and to_addrs must be specified")
if not text_data and not html_data:
raise Exception("Must specify either text_data or html_data")
if not html_data:
msg = MIMEText(text_data)
elif not text_data:
msg = MIMEText(html_data, 'html')
else:
msg = MIMEMultipart('alternative')
msg.attach(MIMEText(text_data, 'plain'))
msg.attach(MIMEText(html_data, 'html'))
msg['Subject'] = coalesce(subject, settings.subject)
msg['From'] = from_address
msg['To'] = ', '.join(to_address)
if self.server:
# CALL AS PART OF A SMTP SESSION
self.server.sendmail(from_address, to_address, msg.as_string())
else:
# CALL AS STAND-ALONE
with self:
self.server.sendmail(from_address, to_address, msg.as_string())
if sys.hexversion < 0x020603f0:
# versions earlier than 2.6.3 have a bug in smtplib when sending over SSL:
# http://bugs.python.org/issue4066
# Unfortunately the stock version of Python in Snow Leopard is 2.6.1, so
# we patch it here to avoid having to install an updated Python version.
import socket
import ssl
def _get_socket_fixed(self, host, port, timeout):
if self.debuglevel > 0:
print>> sys.stderr, 'connect:', (host, port)
new_socket = socket.create_connection((host, port), timeout)
new_socket = ssl.wrap_socket(new_socket, self.keyfile, self.certfile)
self.file = smtplib.SSLFakeFile(new_socket)
return new_socket
smtplib.SMTP_SSL._get_socket = _get_socket_fixed
| klahnakoski/Bugzilla-ETL | vendor/pyLibrary/env/emailer.py | Python | mpl-2.0 | 4,306 |
# -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('catalog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='catalogintegration',
name='service_username',
field=models.CharField(default=u'lms_catalog_service_user', help_text='Username created for Course Catalog Integration, e.g. lms_catalog_service_user.', max_length=100),
),
]
| edx-solutions/edx-platform | openedx/core/djangoapps/catalog/migrations/0002_catalogintegration_username.py | Python | agpl-3.0 | 503 |
from __future__ import unicode_literals
def execute():
"""Make standard print formats readonly for system manager"""
import webnotes.model.doc
new_perms = [
{
'parent': 'Print Format',
'parentfield': 'permissions',
'parenttype': 'DocType',
'role': 'System Manager',
'permlevel': 1,
'read': 1,
},
{
'parent': 'Print Format',
'parentfield': 'permissions',
'parenttype': 'DocType',
'role': 'Administrator',
'permlevel': 1,
'read': 1,
'write': 1
},
]
for perms in new_perms:
doc = webnotes.model.doc.Document('DocPerm')
doc.fields.update(perms)
doc.save()
webnotes.conn.commit()
webnotes.conn.begin()
webnotes.reload_doc('core', 'doctype', 'print_format') | gangadhar-kadam/mtn-erpnext | patches/may_2012/std_pf_readonly.py | Python | agpl-3.0 | 718 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import exceptions
from heat.common import exception
from heat.common import heat_keystoneclient as hkc
from heat.engine.clients import client_plugin
from heat.engine import constraints
class KeystoneClientPlugin(client_plugin.ClientPlugin):
exceptions_module = exceptions
service_types = [IDENTITY] = ['identity']
def _create(self):
return hkc.KeystoneClient(self.context)
def is_not_found(self, ex):
return isinstance(ex, exceptions.NotFound)
def is_over_limit(self, ex):
return isinstance(ex, exceptions.RequestEntityTooLarge)
def is_conflict(self, ex):
return isinstance(ex, exceptions.Conflict)
def get_role_id(self, role):
try:
role_obj = self.client().client.roles.get(role)
return role_obj.id
except exceptions.NotFound:
role_list = self.client().client.roles.list(name=role)
for role_obj in role_list:
if role_obj.name == role:
return role_obj.id
raise exception.EntityNotFound(entity='KeystoneRole', name=role)
def get_project_id(self, project):
try:
project_obj = self.client().client.projects.get(project)
return project_obj.id
except exceptions.NotFound:
project_list = self.client().client.projects.list(name=project)
for project_obj in project_list:
if project_obj.name == project:
return project_obj.id
raise exception.EntityNotFound(entity='KeystoneProject',
name=project)
def get_domain_id(self, domain):
try:
domain_obj = self.client().client.domains.get(domain)
return domain_obj.id
except exceptions.NotFound:
domain_list = self.client().client.domains.list(name=domain)
for domain_obj in domain_list:
if domain_obj.name == domain:
return domain_obj.id
raise exception.EntityNotFound(entity='KeystoneDomain', name=domain)
def get_group_id(self, group):
try:
group_obj = self.client().client.groups.get(group)
return group_obj.id
except exceptions.NotFound:
group_list = self.client().client.groups.list(name=group)
for group_obj in group_list:
if group_obj.name == group:
return group_obj.id
raise exception.EntityNotFound(entity='KeystoneGroup', name=group)
def get_service_id(self, service):
try:
service_obj = self.client().client.services.get(service)
return service_obj.id
except exceptions.NotFound:
service_list = self.client().client.services.list(name=service)
if len(service_list) == 1:
return service_list[0].id
elif len(service_list) > 1:
raise exception.KeystoneServiceNameConflict(service=service)
else:
raise exception.EntityNotFound(entity='KeystoneService',
name=service)
def get_user_id(self, user):
try:
user_obj = self.client().client.users.get(user)
return user_obj.id
except exceptions.NotFound:
user_list = self.client().client.users.list(name=user)
for user_obj in user_list:
if user_obj.name == user:
return user_obj.id
raise exception.EntityNotFound(entity='KeystoneUser', name=user)
class KeystoneRoleConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,)
def validate_with_client(self, client, role):
client.client_plugin('keystone').get_role_id(role)
class KeystoneDomainConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,)
def validate_with_client(self, client, domain):
client.client_plugin('keystone').get_domain_id(domain)
class KeystoneProjectConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,)
def validate_with_client(self, client, project):
client.client_plugin('keystone').get_project_id(project)
class KeystoneGroupConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,)
def validate_with_client(self, client, group):
client.client_plugin('keystone').get_group_id(group)
class KeystoneServiceConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,
exception.KeystoneServiceNameConflict,)
def validate_with_client(self, client, service):
client.client_plugin('keystone').get_service_id(service)
class KeystoneUserConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,)
def validate_with_client(self, client, user):
client.client_plugin('keystone').get_user_id(user)
| cryptickp/heat | heat/engine/clients/os/keystone.py | Python | apache-2.0 | 5,676 |
from threading import Timer
class RepeatedTimer(object):
def __init__(self, interval, function, *args, **kwargs):
self._timer = None
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.is_running = False
self.start()
def _run(self):
self.is_running = False
self.start()
self.function(*self.args, **self.kwargs)
def start(self):
if not self.is_running:
self._timer = Timer(self.interval, self._run)
self._timer.start()
self.is_running = True
def stop(self):
self._timer.cancel()
self.is_running = False
| tecdct2941/nxos_dashboard | repeated_timer.py | Python | apache-2.0 | 721 |
# -*- coding: utf-8 -*-
"""
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import dom_parser
from resources.lib.modules import source_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['de']
self.genre_filter = ['horror']
self.domains = ['horrorkino.do.am']
self.base_link = 'http://horrorkino.do.am/'
self.search_link = 'video/shv'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = self.__search([localtitle] + source_utils.aliases_to_array(aliases), year)
if not url and title != localtitle: url = self.__search([title] + source_utils.aliases_to_array(aliases), year)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
r = client.request(urlparse.urljoin(self.base_link, url))
r = re.findall('''vicode\s*=\s*["'](.*?)["'];''', r)[0].decode('string_escape')
r = dom_parser.parse_dom(r, 'iframe', req='src')
r = [i.attrs['src'] for i in r]
for i in r:
valid, host = source_utils.is_host_valid(i, hostDict)
if not valid: continue
sources.append({'source': host, 'quality': 'SD', 'language': 'de', 'url': i, 'direct': False, 'debridonly': False, 'checkquality': True})
return sources
except:
return sources
def resolve(self, url):
return url
def __search(self, titles, year):
try:
t = [cleantitle.get(i) for i in set(titles) if i]
y = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1), '0']
r = client.request(urlparse.urljoin(self.base_link, self.search_link), post={'query': cleantitle.query(titles[0])})
r = dom_parser.parse_dom(r, 'li', attrs={'class': 'entTd'})
r = dom_parser.parse_dom(r, 'div', attrs={'class': 've-screen'}, req='title')
r = [(dom_parser.parse_dom(i, 'a', req='href'), i.attrs['title'].split(' - ')[0]) for i in r]
r = [(i[0][0].attrs['href'], i[1], re.findall('(.+?) \(*(\d{4})', i[1])) for i in r]
r = [(i[0], i[2][0][0] if len(i[2]) > 0 else i[1], i[2][0][1] if len(i[2]) > 0 else '0') for i in r]
r = sorted(r, key=lambda i: int(i[2]), reverse=True) # with year > no year
r = [i[0] for i in r if cleantitle.get(i[1]) in t and i[2] in y][0]
return source_utils.strip_domain(r)
except:
return
| TheWardoctor/Wardoctors-repo | script.module.uncoded/lib/resources/lib/sources/de/horrorkino.py | Python | apache-2.0 | 3,418 |
"""Sensor to collect the reference daily prices of electricity ('PVPC') in Spain."""
import logging
from random import randint
from typing import Optional
from aiopvpc import PVPCData
from homeassistant import config_entries
from homeassistant.const import CONF_NAME, ENERGY_KILO_WATT_HOUR
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.event import async_call_later, async_track_time_change
from homeassistant.helpers.restore_state import RestoreEntity
import homeassistant.util.dt as dt_util
from .const import ATTR_TARIFF
_LOGGER = logging.getLogger(__name__)
ATTR_PRICE = "price"
ICON = "mdi:currency-eur"
UNIT = f"€/{ENERGY_KILO_WATT_HOUR}"
_DEFAULT_TIMEOUT = 10
async def async_setup_entry(
hass: HomeAssistant, config_entry: config_entries.ConfigEntry, async_add_entities
):
"""Set up the electricity price sensor from config_entry."""
name = config_entry.data[CONF_NAME]
pvpc_data_handler = PVPCData(
tariff=config_entry.data[ATTR_TARIFF],
local_timezone=hass.config.time_zone,
websession=async_get_clientsession(hass),
logger=_LOGGER,
timeout=_DEFAULT_TIMEOUT,
)
async_add_entities(
[ElecPriceSensor(name, config_entry.unique_id, pvpc_data_handler)], False
)
class ElecPriceSensor(RestoreEntity):
"""Class to hold the prices of electricity as a sensor."""
unit_of_measurement = UNIT
icon = ICON
should_poll = False
def __init__(self, name, unique_id, pvpc_data_handler):
"""Initialize the sensor object."""
self._name = name
self._unique_id = unique_id
self._pvpc_data = pvpc_data_handler
self._num_retries = 0
self._hourly_tracker = None
self._price_tracker = None
async def async_will_remove_from_hass(self) -> None:
"""Cancel listeners for sensor updates."""
self._hourly_tracker()
self._price_tracker()
async def async_added_to_hass(self):
"""Handle entity which will be added."""
await super().async_added_to_hass()
state = await self.async_get_last_state()
if state:
self._pvpc_data.state = state.state
# Update 'state' value in hour changes
self._hourly_tracker = async_track_time_change(
self.hass, self.update_current_price, second=[0], minute=[0]
)
# Update prices at random time, 2 times/hour (don't want to upset API)
random_minute = randint(1, 29)
mins_update = [random_minute, random_minute + 30]
self._price_tracker = async_track_time_change(
self.hass, self.async_update_prices, second=[0], minute=mins_update
)
_LOGGER.debug(
"Setup of price sensor %s (%s) with tariff '%s', "
"updating prices each hour at %s min",
self.name,
self.entity_id,
self._pvpc_data.tariff,
mins_update,
)
await self.async_update_prices(dt_util.utcnow())
self.update_current_price(dt_util.utcnow())
@property
def unique_id(self) -> Optional[str]:
"""Return a unique ID."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._pvpc_data.state
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._pvpc_data.state_available
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._pvpc_data.attributes
@callback
def update_current_price(self, now):
"""Update the sensor state, by selecting the current price for this hour."""
self._pvpc_data.process_state_and_attributes(now)
self.async_write_ha_state()
async def async_update_prices(self, now):
"""Update electricity prices from the ESIOS API."""
prices = await self._pvpc_data.async_update_prices(now)
if not prices and self._pvpc_data.source_available:
self._num_retries += 1
if self._num_retries > 2:
_LOGGER.warning(
"%s: repeated bad data update, mark component as unavailable source",
self.entity_id,
)
self._pvpc_data.source_available = False
return
retry_delay = 2 * self._num_retries * self._pvpc_data.timeout
_LOGGER.debug(
"%s: Bad update[retry:%d], will try again in %d s",
self.entity_id,
self._num_retries,
retry_delay,
)
async_call_later(self.hass, retry_delay, self.async_update_prices)
return
if not prices:
_LOGGER.debug("%s: data source is not yet available", self.entity_id)
return
self._num_retries = 0
if not self._pvpc_data.source_available:
self._pvpc_data.source_available = True
_LOGGER.warning("%s: component has recovered data access", self.entity_id)
self.update_current_price(now)
| nkgilley/home-assistant | homeassistant/components/pvpc_hourly_pricing/sensor.py | Python | apache-2.0 | 5,339 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training helper that checkpoints models and creates session."""
import time
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_management
from tensorflow.python.util.tf_export import tf_export
def _maybe_name(obj):
"""Returns object name if it has one, or a message otherwise.
This is useful for names that apper in error messages.
Args:
obj: Object to get the name of.
Returns:
name, "None", or a "no name" message.
"""
if obj is None:
return "None"
elif hasattr(obj, "name"):
return obj.name
else:
return "<no name for %s>" % type(obj)
def _restore_checkpoint_and_maybe_run_saved_model_initializers(
sess, saver, path):
"""Restores checkpoint values and SavedModel initializers if found."""
# NOTE: All references to SavedModel refer to SavedModels loaded from the
# load_v2 API (which does not require the `sess` argument).
# If the graph contains resources loaded from a SavedModel, they are not
# restored when calling `saver.restore`. Thus, the SavedModel initializer must
# be called with `saver.restore` to properly initialize the model.
# The SavedModel init is stored in the "saved_model_initializers" collection.
# This collection is part of the MetaGraph's default_init_op, so it is already
# called by MonitoredSession as long as the saver doesn't restore any
# checkpoints from the working dir.
saved_model_init_ops = ops.get_collection("saved_model_initializers")
if saved_model_init_ops:
sess.run(saved_model_init_ops)
# The saver must be called *after* the SavedModel init, because the SavedModel
# init will restore the variables from the SavedModel variables directory.
# Initializing/restoring twice is not ideal but there's no other way to do it.
saver.restore(sess, path)
@tf_export(v1=["train.SessionManager"])
class SessionManager(object):
"""Training helper that restores from checkpoint and creates session.
This class is a small wrapper that takes care of session creation and
checkpoint recovery. It also provides functions that to facilitate
coordination among multiple training threads or processes.
* Checkpointing trained variables as the training progresses.
* Initializing variables on startup, restoring them from the most recent
checkpoint after a crash, or wait for checkpoints to become available.
### Usage:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will checkpoint the model in '/tmp/mydir'.
sm = SessionManager()
sess = sm.prepare_session(master, init_op, saver, checkpoint_dir)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`prepare_session()` initializes or restores a model. It requires `init_op`
and `saver` as an argument.
A second process could wait for the model to be ready by doing the following:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will wait for the model to become ready.
sm = SessionManager()
sess = sm.wait_for_session(master)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`wait_for_session()` waits for a model to be initialized by other processes.
"""
def __init__(self,
local_init_op=None,
ready_op=None,
ready_for_local_init_op=None,
graph=None,
recovery_wait_secs=30,
local_init_run_options=None,
local_init_feed_dict=None):
"""Creates a SessionManager.
The `local_init_op` is an `Operation` that is run always after a new session
was created. If `None`, this step is skipped.
The `ready_op` is an `Operation` used to check if the model is ready. The
model is considered ready if that operation returns an empty 1D string
tensor. If the operation returns a non empty 1D string tensor, the elements
are concatenated and used to indicate to the user why the model is not
ready.
The `ready_for_local_init_op` is an `Operation` used to check if the model
is ready to run local_init_op. The model is considered ready if that
operation returns an empty 1D string tensor. If the operation returns a non
empty 1D string tensor, the elements are concatenated and used to indicate
to the user why the model is not ready.
If `ready_op` is `None`, the model is not checked for readiness.
`recovery_wait_secs` is the number of seconds between checks that
the model is ready. It is used by processes to wait for a model to
be initialized or restored. Defaults to 30 seconds.
Args:
local_init_op: An `Operation` run immediately after session creation.
Usually used to initialize tables and local variables.
ready_op: An `Operation` to check if the model is initialized.
ready_for_local_init_op: An `Operation` to check if the model is ready
to run local_init_op.
graph: The `Graph` that the model will use.
recovery_wait_secs: Seconds between checks for the model to be ready.
local_init_run_options: RunOptions to be passed to session.run when
executing the local_init_op.
local_init_feed_dict: Optional session feed dictionary to use when running
the local_init_op.
Raises:
ValueError: If ready_for_local_init_op is not None but local_init_op is
None
"""
# Sets default values of arguments.
if graph is None:
graph = ops.get_default_graph()
self._local_init_op = local_init_op
self._ready_op = ready_op
self._ready_for_local_init_op = ready_for_local_init_op
self._graph = graph
self._recovery_wait_secs = recovery_wait_secs
self._target = None
self._local_init_run_options = local_init_run_options
self._local_init_feed_dict = local_init_feed_dict
if ready_for_local_init_op is not None and local_init_op is None:
raise ValueError("If you pass a ready_for_local_init_op "
"you must also pass a local_init_op "
", ready_for_local_init_op [%s]" %
ready_for_local_init_op)
def _restore_checkpoint(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, and tries to restore a checkpoint.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, is_restored) where 'is_restored' is `True` if
the session could be restored, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
self._target = master
# This is required to so that we initialize the TPU device before
# restoring from checkpoint since we'll be placing variables on the device
# and TPUInitialize wipes out the memory of the device.
strategy = distribution_strategy_context.get_strategy()
if strategy and hasattr(strategy.extended,
"_experimental_initialize_system"):
strategy.extended._experimental_initialize_system() # pylint: disable=protected-access
sess = session.Session(self._target, graph=self._graph, config=config)
if checkpoint_dir and checkpoint_filename_with_path:
raise ValueError("Can not provide both checkpoint_dir and "
"checkpoint_filename_with_path.")
# If either saver or checkpoint_* is not specified, cannot restore. Just
# return.
if not saver or not (checkpoint_dir or checkpoint_filename_with_path):
return sess, False
if checkpoint_filename_with_path:
_restore_checkpoint_and_maybe_run_saved_model_initializers(
sess, saver, checkpoint_filename_with_path)
return sess, True
# Waits up until max_wait_secs for checkpoint to become available.
wait_time = 0
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
while not ckpt or not ckpt.model_checkpoint_path:
if wait_for_checkpoint and wait_time < max_wait_secs:
logging.info("Waiting for checkpoint to be available.")
time.sleep(self._recovery_wait_secs)
wait_time += self._recovery_wait_secs
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
else:
return sess, False
# Loads the checkpoint.
_restore_checkpoint_and_maybe_run_saved_model_initializers(
sess, saver, ckpt.model_checkpoint_path)
saver.recover_last_checkpoints(ckpt.all_model_checkpoint_paths)
return sess, True
def prepare_session(self,
master,
init_op=None,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None,
init_feed_dict=None,
init_fn=None):
"""Creates a `Session`. Makes sure the model is ready to be used.
Creates a `Session` on 'master'. If a `saver` object is passed in, and
`checkpoint_dir` points to a directory containing valid checkpoint
files, then it will try to recover the model from checkpoint. If
no checkpoint files are available, and `wait_for_checkpoint` is
`True`, then the process would check every `recovery_wait_secs`,
up to `max_wait_secs`, for recovery to succeed.
If the model cannot be recovered successfully then it is initialized by
running the `init_op` and calling `init_fn` if they are provided.
The `local_init_op` is also run after init_op and init_fn, regardless of
whether the model was recovered successfully, but only if
`ready_for_local_init_op` passes.
If the model is recovered from a checkpoint it is assumed that all
global variables have been initialized, in particular neither `init_op`
nor `init_fn` will be executed.
It is an error if the model cannot be recovered and no `init_op`
or `init_fn` or `local_init_op` are passed.
Args:
master: `String` representation of the TensorFlow master to use.
init_op: Optional `Operation` used to initialize the model.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
init_feed_dict: Optional dictionary that maps `Tensor` objects to feed
values. This feed dictionary is passed to the session `run()` call when
running the init op.
init_fn: Optional callable used to initialize the model. Called after the
optional `init_op` is called. The callable must accept one argument,
the session being initialized.
Returns:
A `Session` object that can be used to drive the model.
Raises:
RuntimeError: If the model cannot be initialized or recovered.
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
sess, is_loaded_from_checkpoint = self._restore_checkpoint(
master,
saver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
if not is_loaded_from_checkpoint:
if init_op is None and not init_fn and self._local_init_op is None:
raise RuntimeError("Model is not initialized and no init_op or "
"init_fn or local_init_op was given")
if init_op is not None:
sess.run(init_op, feed_dict=init_feed_dict)
if init_fn:
init_fn(sess)
local_init_success, msg = self._try_run_local_init_op(sess)
if not local_init_success:
raise RuntimeError(
"Init operations did not make model ready for local_init. "
"Init op: %s, init fn: %s, error: %s" % (_maybe_name(init_op),
init_fn,
msg))
is_ready, msg = self._model_ready(sess)
if not is_ready:
raise RuntimeError(
"Init operations did not make model ready. "
"Init op: %s, init fn: %s, local_init_op: %s, error: %s" %
(_maybe_name(init_op), init_fn, self._local_init_op, msg))
return sess
def recover_session(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, recovering if possible.
Creates a new session on 'master'. If the session is not initialized
and can be recovered from a checkpoint, recover it.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, initialized) where 'initialized' is `True` if
the session could be recovered and initialized, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
sess, is_loaded_from_checkpoint = self._restore_checkpoint(
master,
saver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
# Always try to run local_init_op
local_init_success, msg = self._try_run_local_init_op(sess)
if not is_loaded_from_checkpoint:
# Do not need to run checks for readiness
return sess, False
restoring_file = checkpoint_dir or checkpoint_filename_with_path
if not local_init_success:
logging.info(
"Restoring model from %s did not make model ready for local init:"
" %s", restoring_file, msg)
return sess, False
is_ready, msg = self._model_ready(sess)
if not is_ready:
logging.info("Restoring model from %s did not make model ready: %s",
restoring_file, msg)
return sess, False
logging.info("Restored model from %s", restoring_file)
return sess, is_loaded_from_checkpoint
def wait_for_session(self, master, config=None, max_wait_secs=float("Inf")):
"""Creates a new `Session` and waits for model to be ready.
Creates a new `Session` on 'master'. Waits for the model to be
initialized or recovered from a checkpoint. It's expected that
another thread or process will make the model ready, and that this
is intended to be used by threads/processes that participate in a
distributed training configuration where a different thread/process
is responsible for initializing or recovering the model being trained.
NB: The amount of time this method waits for the session is bounded
by max_wait_secs. By default, this function will wait indefinitely.
Args:
master: `String` representation of the TensorFlow master to use.
config: Optional ConfigProto proto used to configure the session.
max_wait_secs: Maximum time to wait for the session to become available.
Returns:
A `Session`. May be None if the operation exceeds the timeout
specified by config.operation_timeout_in_ms.
Raises:
tf.DeadlineExceededError: if the session is not available after
max_wait_secs.
"""
self._target = master
if max_wait_secs is None:
max_wait_secs = float("Inf")
timer = _CountDownTimer(max_wait_secs)
while True:
sess = session.Session(self._target, graph=self._graph, config=config)
not_ready_msg = None
not_ready_local_msg = None
local_init_success, not_ready_local_msg = self._try_run_local_init_op(
sess)
if local_init_success:
# Successful if local_init_op is None, or ready_for_local_init_op passes
is_ready, not_ready_msg = self._model_ready(sess)
if is_ready:
return sess
self._safe_close(sess)
# Do we have enough time left to try again?
remaining_ms_after_wait = (
timer.secs_remaining() - self._recovery_wait_secs)
if remaining_ms_after_wait < 0:
raise errors.DeadlineExceededError(
None, None,
"Session was not ready after waiting %d secs." % (max_wait_secs,))
logging.info("Waiting for model to be ready. "
"Ready_for_local_init_op: %s, ready: %s",
not_ready_local_msg, not_ready_msg)
time.sleep(self._recovery_wait_secs)
def _safe_close(self, sess):
"""Closes a session without raising an exception.
Just like sess.close() but ignores exceptions.
Args:
sess: A `Session`.
"""
# pylint: disable=broad-except
try:
sess.close()
except Exception:
# Intentionally not logging to avoid user complaints that
# they get cryptic errors. We really do not care that Close
# fails.
pass
# pylint: enable=broad-except
def _model_ready(self, sess):
"""Checks if the model is ready or not.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready and False
otherwise, and msg is `None` if the model is ready, a `String` with the
reason why it is not ready otherwise.
"""
return _ready(self._ready_op, sess, "Model not ready")
def _model_ready_for_local_init(self, sess):
"""Checks if the model is ready to run local_init_op.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready to run
local_init_op and False otherwise, and msg is `None` if the model is
ready to run local_init_op, a `String` with the reason why it is not ready
otherwise.
"""
return _ready(self._ready_for_local_init_op, sess,
"Model not ready for local init")
def _try_run_local_init_op(self, sess):
"""Tries to run _local_init_op, if not None, and is ready for local init.
Args:
sess: A `Session`.
Returns:
A tuple (is_successful, msg), where is_successful is True if
_local_init_op is None, or we ran _local_init_op, and False otherwise;
and msg is a `String` with the reason why the model was not ready to run
local init.
"""
if self._local_init_op is not None:
is_ready_for_local_init, msg = self._model_ready_for_local_init(sess)
if is_ready_for_local_init:
logging.info("Running local_init_op.")
sess.run(self._local_init_op, feed_dict=self._local_init_feed_dict,
options=self._local_init_run_options)
logging.info("Done running local_init_op.")
return True, None
else:
return False, msg
return True, None
def _ready(op, sess, msg):
"""Checks if the model is ready or not, as determined by op.
Args:
op: An op, either _ready_op or _ready_for_local_init_op, which defines the
readiness of the model.
sess: A `Session`.
msg: A message to log to warning if not ready
Returns:
A tuple (is_ready, msg), where is_ready is True if ready and False
otherwise, and msg is `None` if the model is ready, a `String` with the
reason why it is not ready otherwise.
"""
if op is None:
return True, None
else:
try:
ready_value = sess.run(op)
# The model is considered ready if ready_op returns an empty 1-D tensor.
# Also compare to `None` and dtype being int32 for backward
# compatibility.
if (ready_value is None or ready_value.dtype == np.int32 or
ready_value.size == 0):
return True, None
else:
# TODO(sherrym): If a custom ready_op returns other types of tensor,
# or strings other than variable names, this message could be
# confusing.
non_initialized_varnames = ", ".join(
[i.decode("utf-8") for i in ready_value])
return False, "Variables not initialized: " + non_initialized_varnames
except errors.FailedPreconditionError as e:
if "uninitialized" not in str(e):
logging.warning("%s : error [%s]", msg, str(e))
raise e
return False, str(e)
class _CountDownTimer(object):
__slots__ = ["_start_time_secs", "_duration_secs"]
def __init__(self, duration_secs):
self._start_time_secs = time.time()
self._duration_secs = duration_secs
def secs_remaining(self):
diff = self._duration_secs - (time.time() - self._start_time_secs)
return max(0, diff)
| tensorflow/tensorflow | tensorflow/python/training/session_manager.py | Python | apache-2.0 | 23,320 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for RNN cell wrapper v2 implementation."""
from absl.testing import parameterized
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras import combinations
from tensorflow.python.keras import layers
from tensorflow.python.keras.layers import rnn_cell_wrapper_v2
from tensorflow.python.keras.layers.legacy_rnn import rnn_cell_impl
from tensorflow.python.keras.legacy_tf_layers import base as legacy_base_layer
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import test
@combinations.generate(combinations.combine(mode=["graph", "eager"]))
class RNNCellWrapperTest(test.TestCase, parameterized.TestCase):
def testResidualWrapper(self):
wrapper_type = rnn_cell_wrapper_v2.ResidualWrapper
x = ops.convert_to_tensor_v2_with_dispatch(
np.array([[1., 1., 1.]]), dtype="float32")
m = ops.convert_to_tensor_v2_with_dispatch(
np.array([[0.1, 0.1, 0.1]]), dtype="float32")
base_cell = rnn_cell_impl.GRUCell(
3, kernel_initializer=init_ops.constant_initializer(0.5),
bias_initializer=init_ops.constant_initializer(0.5))
g, m_new = base_cell(x, m)
wrapper_object = wrapper_type(base_cell)
children = wrapper_object._trackable_children()
wrapper_object.get_config() # Should not throw an error
self.assertIn("cell", children)
self.assertIs(children["cell"], base_cell)
g_res, m_new_res = wrapper_object(x, m)
self.evaluate([variables_lib.global_variables_initializer()])
res = self.evaluate([g, g_res, m_new, m_new_res])
# Residual connections
self.assertAllClose(res[1], res[0] + [1., 1., 1.])
# States are left untouched
self.assertAllClose(res[2], res[3])
def testResidualWrapperWithSlice(self):
wrapper_type = rnn_cell_wrapper_v2.ResidualWrapper
x = ops.convert_to_tensor_v2_with_dispatch(
np.array([[1., 1., 1., 1., 1.]]), dtype="float32")
m = ops.convert_to_tensor_v2_with_dispatch(
np.array([[0.1, 0.1, 0.1]]), dtype="float32")
base_cell = rnn_cell_impl.GRUCell(
3, kernel_initializer=init_ops.constant_initializer(0.5),
bias_initializer=init_ops.constant_initializer(0.5))
g, m_new = base_cell(x, m)
def residual_with_slice_fn(inp, out):
inp_sliced = array_ops.slice(inp, [0, 0], [-1, 3])
return inp_sliced + out
g_res, m_new_res = wrapper_type(
base_cell, residual_with_slice_fn)(x, m)
self.evaluate([variables_lib.global_variables_initializer()])
res_g, res_g_res, res_m_new, res_m_new_res = self.evaluate(
[g, g_res, m_new, m_new_res])
# Residual connections
self.assertAllClose(res_g_res, res_g + [1., 1., 1.])
# States are left untouched
self.assertAllClose(res_m_new, res_m_new_res)
def testDeviceWrapper(self):
wrapper_type = rnn_cell_wrapper_v2.DeviceWrapper
x = array_ops.zeros([1, 3])
m = array_ops.zeros([1, 3])
cell = rnn_cell_impl.GRUCell(3)
wrapped_cell = wrapper_type(cell, "/cpu:0")
children = wrapped_cell._trackable_children()
wrapped_cell.get_config() # Should not throw an error
self.assertIn("cell", children)
self.assertIs(children["cell"], cell)
outputs, _ = wrapped_cell(x, m)
self.assertIn("cpu:0", outputs.device.lower())
@parameterized.parameters(
[[rnn_cell_impl.DropoutWrapper, rnn_cell_wrapper_v2.DropoutWrapper],
[rnn_cell_impl.ResidualWrapper, rnn_cell_wrapper_v2.ResidualWrapper]])
def testWrapperKerasStyle(self, wrapper, wrapper_v2):
"""Tests if wrapper cell is instantiated in keras style scope."""
wrapped_cell_v2 = wrapper_v2(rnn_cell_impl.BasicRNNCell(1))
self.assertIsNone(getattr(wrapped_cell_v2, "_keras_style", None))
wrapped_cell = wrapper(rnn_cell_impl.BasicRNNCell(1))
self.assertFalse(wrapped_cell._keras_style)
@parameterized.parameters(
[rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper])
def testWrapperWeights(self, wrapper):
"""Tests that wrapper weights contain wrapped cells weights."""
base_cell = layers.SimpleRNNCell(1, name="basic_rnn_cell")
rnn_cell = wrapper(base_cell)
rnn_layer = layers.RNN(rnn_cell)
inputs = ops.convert_to_tensor_v2_with_dispatch([[[1]]],
dtype=dtypes.float32)
rnn_layer(inputs)
wrapper_name = generic_utils.to_snake_case(wrapper.__name__)
expected_weights = ["rnn/" + wrapper_name + "/" + var for var in
("kernel:0", "recurrent_kernel:0", "bias:0")]
self.assertLen(rnn_cell.weights, 3)
self.assertCountEqual([v.name for v in rnn_cell.weights], expected_weights)
self.assertCountEqual([v.name for v in rnn_cell.trainable_variables],
expected_weights)
self.assertCountEqual([v.name for v in rnn_cell.non_trainable_variables],
[])
self.assertCountEqual([v.name for v in rnn_cell.cell.weights],
expected_weights)
@parameterized.parameters(
[rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper])
def testWrapperV2Caller(self, wrapper):
"""Tests that wrapper V2 is using the LayerRNNCell's caller."""
with legacy_base_layer.keras_style_scope():
base_cell = rnn_cell_impl.MultiRNNCell(
[rnn_cell_impl.BasicRNNCell(1) for _ in range(2)])
rnn_cell = wrapper(base_cell)
inputs = ops.convert_to_tensor_v2_with_dispatch([[1]], dtype=dtypes.float32)
state = ops.convert_to_tensor_v2_with_dispatch([[1]], dtype=dtypes.float32)
_ = rnn_cell(inputs, [state, state])
weights = base_cell._cells[0].weights
self.assertLen(weights, expected_len=2)
self.assertTrue(all("_wrapper" in v.name for v in weights))
@parameterized.parameters(
[rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper])
def testWrapperV2Build(self, wrapper):
cell = rnn_cell_impl.LSTMCell(10)
wrapper = wrapper(cell)
wrapper.build((1,))
self.assertTrue(cell.built)
def testDeviceWrapperSerialization(self):
wrapper_cls = rnn_cell_wrapper_v2.DeviceWrapper
cell = layers.LSTMCell(10)
wrapper = wrapper_cls(cell, "/cpu:0")
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertDictEqual(config, reconstructed_wrapper.get_config())
self.assertIsInstance(reconstructed_wrapper, wrapper_cls)
def testResidualWrapperSerialization(self):
wrapper_cls = rnn_cell_wrapper_v2.ResidualWrapper
cell = layers.LSTMCell(10)
wrapper = wrapper_cls(cell)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertDictEqual(config, reconstructed_wrapper.get_config())
self.assertIsInstance(reconstructed_wrapper, wrapper_cls)
wrapper = wrapper_cls(cell, residual_fn=lambda i, o: i + i + o)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
# Assert the reconstructed function will perform the math correctly.
self.assertEqual(reconstructed_wrapper._residual_fn(1, 2), 4)
def residual_fn(inputs, outputs):
return inputs * 3 + outputs
wrapper = wrapper_cls(cell, residual_fn=residual_fn)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
# Assert the reconstructed function will perform the math correctly.
self.assertEqual(reconstructed_wrapper._residual_fn(1, 2), 5)
def testDropoutWrapperSerialization(self):
wrapper_cls = rnn_cell_wrapper_v2.DropoutWrapper
cell = layers.GRUCell(10)
wrapper = wrapper_cls(cell)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertDictEqual(config, reconstructed_wrapper.get_config())
self.assertIsInstance(reconstructed_wrapper, wrapper_cls)
wrapper = wrapper_cls(cell, dropout_state_filter_visitor=lambda s: True)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertTrue(reconstructed_wrapper._dropout_state_filter(None))
def dropout_state_filter_visitor(unused_state):
return False
wrapper = wrapper_cls(
cell, dropout_state_filter_visitor=dropout_state_filter_visitor)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertFalse(reconstructed_wrapper._dropout_state_filter(None))
def testDropoutWrapperWithKerasLSTMCell(self):
wrapper_cls = rnn_cell_wrapper_v2.DropoutWrapper
cell = layers.LSTMCell(10)
with self.assertRaisesRegex(ValueError, "does not work with "):
wrapper_cls(cell)
cell = layers.LSTMCellV2(10)
with self.assertRaisesRegex(ValueError, "does not work with "):
wrapper_cls(cell)
if __name__ == "__main__":
test.main()
| tensorflow/tensorflow | tensorflow/python/keras/layers/rnn_cell_wrapper_v2_test.py | Python | apache-2.0 | 9,770 |
#!/usr/bin/env python
# example checkbutton.py
import pygtk
pygtk.require('2.0')
import gtk
class CheckButton:
# Our callback.
# The data passed to this method is printed to stdout
def callback(self, widget, data=None):
print "%s was toggled %s" % (data, ("OFF", "ON")[widget.get_active()])
# This callback quits the program
def delete_event(self, widget, event, data=None):
gtk.main_quit()
return False
def __init__(self):
# Create a new window
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
# Set the window title
self.window.set_title("Check Button")
# Set a handler for delete_event that immediately
# exits GTK.
self.window.connect("delete_event", self.delete_event)
# Sets the border width of the window.
self.window.set_border_width(20)
# Create a vertical box
vbox = gtk.VBox(True, 2)
# Put the vbox in the main window
self.window.add(vbox)
# Create first button
button = gtk.CheckButton("check button 1")
# When the button is toggled, we call the "callback" method
# with a pointer to "button" as its argument
button.connect("toggled", self.callback, "check button 1")
# Insert button 1
vbox.pack_start(button, True, True, 2)
button.show()
# Create second button
button = gtk.CheckButton("check button 2")
# When the button is toggled, we call the "callback" method
# with a pointer to "button 2" as its argument
button.connect("toggled", self.callback, "check button 2")
# Insert button 2
vbox.pack_start(button, True, True, 2)
button.show()
# Create "Quit" button
button = gtk.Button("Quit")
# When the button is clicked, we call the mainquit function
# and the program exits
button.connect("clicked", lambda wid: gtk.main_quit())
# Insert the quit button
vbox.pack_start(button, True, True, 2)
button.show()
vbox.show()
self.window.show()
def main():
gtk.main()
return 0
if __name__ == "__main__":
CheckButton()
main()
| spaceone/pyjs | pygtkweb/demos/checkbutton.py | Python | apache-2.0 | 2,231 |
"""Tkinker gui for pylint"""
from Tkinter import Tk, Frame, Listbox, Entry, Label, Button, Scrollbar
from Tkinter import TOP, LEFT, RIGHT, BOTTOM, END, X, Y, BOTH
import os
import sys
if sys.platform.startswith('win'):
PYLINT = 'pylint.bat'
else:
PYLINT = 'pylint'
class LintGui:
"""Build and control a window to interact with pylint"""
def __init__(self, root=None):
self.root = root or Tk()
self.root.title('Pylint')
top_frame = Frame(self.root)
res_frame = Frame(self.root)
btn_frame = Frame(self.root)
top_frame.pack(side=TOP, fill=X)
res_frame.pack(side=TOP, fill=BOTH, expand=True)
btn_frame.pack(side=TOP, fill=X)
Label(top_frame, text='Module or package').pack(side=LEFT)
self.txtModule = Entry(top_frame, background='white')
self.txtModule.bind('<Return>', self.run_lint)
self.txtModule.pack(side=LEFT, expand=True, fill=X)
Button(top_frame, text='Run', command=self.run_lint).pack(side=LEFT)
scrl = Scrollbar(res_frame)
self.results = Listbox(res_frame,
background='white',
font='fixedsys',
selectmode='browse',
yscrollcommand=scrl.set)
scrl.configure(command=self.results.yview)
self.results.pack(side=LEFT, expand=True, fill=BOTH)
scrl.pack(side=RIGHT, fill=Y)
Button(btn_frame, text='Quit', command=self.quit).pack(side=BOTTOM)
#self.root.bind('<ctrl-q>', self.quit)
self.txtModule.focus_set()
def mainloop(self):
"""launch the mainloop of the application"""
self.root.mainloop()
def quit(self, _=None):
"""quit the application"""
self.root.quit()
def run_lint(self, _=None):
"""launches pylint"""
colors = {'W:':'red1', 'E:': 'red4',
'W:': 'red3', '**': 'navy'}
self.root.configure(cursor='watch')
self.results.focus_set()
self.results.delete(0, END)
self.results.update()
module = self.txtModule.get()
pout = os.popen('%s %s' % (PYLINT, module), 'r')
for line in pout.xreadlines():
line = line.rstrip()
self.results.insert(END, line)
fg_color = colors.get(line[:2], 'black')
self.results.itemconfigure(END, fg=fg_color)
self.results.update()
self.root.configure(cursor='')
def Run(args):
"""launch pylint gui from args"""
if args:
print 'USAGE: pylint-gui\n launch a simple pylint gui using Tk'
return
gui = LintGui()
gui.mainloop()
if __name__ == '__main__':
Run(sys.argv[1:])
| dbbhattacharya/kitsune | vendor/packages/pylint/gui.py | Python | bsd-3-clause | 2,790 |
# Generated by Django 2.2.6 on 2019-10-23 09:06
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import olympia.amo.models
class Migration(migrations.Migration):
dependencies = [
('scanners', '0008_auto_20191021_1718'),
]
operations = [
migrations.CreateModel(
name='ScannerMatch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(blank=True, default=django.utils.timezone.now, editable=False)),
('modified', models.DateTimeField(auto_now=True)),
('result', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scanners.ScannerResult')),
('rule', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scanners.ScannerRule')),
],
options={
'get_latest_by': 'created',
'abstract': False,
'base_manager_name': 'objects',
},
bases=(olympia.amo.models.SearchMixin, olympia.amo.models.SaveUpdateMixin, models.Model),
),
migrations.AddField(
model_name='scannerresult',
name='matched_rules',
field=models.ManyToManyField(through='scanners.ScannerMatch', to='scanners.ScannerRule'),
),
]
| bqbn/addons-server | src/olympia/scanners/migrations/0009_auto_20191023_0906.py | Python | bsd-3-clause | 1,450 |
from django.utils.six.moves import http_client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import Permission
from django_webtest import WebTest
from purl import URL
from oscar.core.compat import get_user_model
User = get_user_model()
def add_permissions(user, permissions):
"""
Grant permissions to the passed user
:param permissions: e.g. ['partner.dashboard_access']
"""
for permission in permissions:
app_label, __, codename = permission.partition('.')
perm = Permission.objects.get(content_type__app_label=app_label,
codename=codename)
user.user_permissions.add(perm)
class WebTestCase(WebTest):
is_staff = False
is_anonymous = False
is_superuser = False
username = 'testuser'
email = 'testuser@buymore.com'
password = 'somefancypassword'
permissions = []
def setUp(self):
self.user = None
if not self.is_anonymous:
self.user = self.create_user(
self.username, self.email, self.password)
self.user.is_staff = self.is_staff
add_permissions(self.user, self.permissions)
self.user.save()
def create_user(self, username=None, email=None, password=None):
"""
Create a user for use in a test.
As usernames are optional in newer versions of Django, it only sets it
if exists.
"""
kwargs = {'email': email, 'password': password}
if 'username' in User._meta.get_all_field_names():
kwargs['username'] = username
return User.objects.create_user(**kwargs)
def get(self, url, **kwargs):
kwargs.setdefault('user', self.user)
return self.app.get(url, **kwargs)
def post(self, url, **kwargs):
kwargs.setdefault('user', self.user)
return self.app.post(url, **kwargs)
# Custom assertions
def assertIsRedirect(self, response, expected_url=None):
self.assertTrue(response.status_code in (
http_client.FOUND, http_client.MOVED_PERMANENTLY))
if expected_url:
location = URL.from_string(response['Location'])
self.assertEqual(expected_url, location.path())
def assertRedirectsTo(self, response, url_name):
self.assertTrue(str(response.status_code).startswith('3'))
location = response.headers['Location']
redirect_path = location.replace('http://localhost:80', '')
self.assertEqual(reverse(url_name), redirect_path)
def assertNoAccess(self, response):
self.assertContext(response)
self.assertTrue(response.status_code in (http_client.NOT_FOUND,
http_client.FORBIDDEN))
def assertRedirectUrlName(self, response, name, kwargs=None):
self.assertIsRedirect(response)
location = response['Location'].replace('http://testserver', '')
self.assertEqual(location, reverse(name, kwargs=kwargs))
def assertIsOk(self, response):
self.assertEqual(http_client.OK, response.status_code)
def assertContext(self, response):
self.assertTrue(response.context is not None,
'No context was returned')
def assertInContext(self, response, key):
self.assertContext(response)
self.assertTrue(key in response.context,
"Context should contain a variable '%s'" % key)
| jinnykoo/christmas | src/oscar/test/testcases.py | Python | bsd-3-clause | 3,477 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ConnectivitySource(Model):
"""Parameters that define the source of the connection.
All required parameters must be populated in order to send to Azure.
:param resource_id: Required. The ID of the resource from which a
connectivity check will be initiated.
:type resource_id: str
:param port: The source port from which a connectivity check will be
performed.
:type port: int
"""
_validation = {
'resource_id': {'required': True},
}
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(self, *, resource_id: str, port: int=None, **kwargs) -> None:
super(ConnectivitySource, self).__init__(**kwargs)
self.resource_id = resource_id
self.port = port
| lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2018_01_01/models/connectivity_source_py3.py | Python | mit | 1,352 |
import bpy
camera = bpy.context.edit_movieclip.tracking.camera
camera.sensor_width = 23.6
camera.units = 'MILLIMETERS'
camera.pixel_aspect = 1
camera.k1 = 0.0
camera.k2 = 0.0
camera.k3 = 0.0
| Microvellum/Fluid-Designer | win64-vc/2.78/scripts/presets/tracking_camera/Nikon_DX.py | Python | gpl-3.0 | 192 |
"""
Tools and data structures for working with genomic intervals (or sets of
regions on a line in general) efficiently.
"""
# For compatiblity with existing stuff
from bx.intervals.intersection import * | dnanexus/rseqc | rseqc/lib/bx/intervals/__init__.py | Python | gpl-3.0 | 204 |
# -*- coding: utf-8 -*-
from ..internal.DeadCrypter import DeadCrypter
class Movie2KTo(DeadCrypter):
__name__ = "Movie2KTo"
__type__ = "crypter"
__version__ = "0.56"
__status__ = "stable"
__pattern__ = r'http://(?:www\.)?movie2k\.to/(.+)\.html'
__config__ = [("activated", "bool", "Activated", True)]
__description__ = """Movie2k.to decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("4Christopher", "4Christopher@gmx.de")]
| TheBraveWarrior/pyload | module/plugins/crypter/Movie2KTo.py | Python | gpl-3.0 | 472 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_ike_peer
short_description: Manage IPSec IKE Peer configuration on BIG-IP
description:
- Manage IPSec IKE Peer configuration on BIG-IP.
version_added: 2.8
options:
name:
description:
- Specifies the name of the IKE peer.
required: True
description:
description:
- Description of the IKE peer.
version:
description:
- Specifies which version of IKE is used.
- If the system you are configuring is the IPsec initiator, and you select
both versions, the system tries using IKEv2 for negotiation. If the remote
peer does not support IKEv2, the IPsec tunnel fails. To use IKEv1 in this
case, you must deselect Version 2 and try again.
- If the system you are configuring is the IPsec responder, and you select
both versions, the IPsec initiator system determines which IKE version to use.
- When creating a new IKE peer, this value is required.
choices:
- v1
- v2
presented_id_type:
description:
- Specifies the identifier type that the local system uses to identify
itself to the peer during IKE Phase 1 negotiations.
choices:
- address
- asn1dn
- fqdn
- keyid-tag
- user-fqdn
- override
presented_id_value:
description:
- This is a required value when C(version) includes (Cv2).
- Specifies a value for the identity when using a C(presented_id_type) of
C(override).
verified_id_type:
description:
- Specifies the identifier type that the local system uses to identify
the peer during IKE Phase 1 negotiation.
- This is a required value when C(version) includes (Cv2).
- When C(user-fqdn), value of C(verified_id_value) must be in the form of
User @ DNS domain string.
choices:
- address
- asn1dn
- fqdn
- keyid-tag
- user-fqdn
- override
verified_id_value:
description:
- This is a required value when C(version) includes (Cv2).
- Specifies a value for the identity when using a C(verified_id_type) of
C(override).
phase1_auth_method:
description:
- Specifies the authentication method for phase 1 negotiation.
- When creating a new IKE peer, if this value is not specified, the default is
C(rsa-signature).
choices:
- pre-shared-key
- rsa-signature
phase1_cert:
description:
- Specifies the digital certificate to use for the RSA signature.
- When creating a new IKE peer, if this value is not specified, and
C(phase1_auth_method) is C(rsa-signature), the default is C(default.crt).
- This parameter is invalid when C(phase1_auth_method) is C(pre-shared-key).
phase1_key:
description:
- Specifies the public key that the digital certificate contains.
- When creating a new IKE peer, if this value is not specified, and
C(phase1_auth_method) is C(rsa-signature), the default is C(default.key).
- This parameter is invalid when C(phase1_auth_method) is C(pre-shared-key).
phase1_verify_peer_cert:
description:
- In IKEv2, specifies whether the certificate sent by the IKE peer is verified
using the Trusted Certificate Authorities, a CRL, and/or a peer certificate.
- In IKEv1, specifies whether the identifier sent by the peer is verified with
the credentials in the certificate, in the following manner - ASN1DN; specifies
that the entire certificate subject name is compared with the identifier.
Address, FQDN, or User FQDN; specifies that the certificate's subjectAltName is
compared with the identifier. If the two do not match, the negotiation fails.
- When creating a new IKE peer, if this value is not specified, and
C(phase1_auth_method) is C(rsa-signature), the default is C(no).
- This parameter is invalid when C(phase1_auth_method) is C(pre-shared-key).
type: bool
preshared_key:
description:
- Specifies a string that the IKE peers share for authenticating each other.
- This parameter is only relevant when C(phase1_auth_method) is C(pre-shared-key).
- This parameter is invalid when C(phase1_auth_method) is C(rsa-signature).
remote_address:
description:
- Displays the IP address of the BIG-IP system that is remote to the system
you are configuring.
phase1_encryption_algorithm:
description:
- Specifies the algorithm to use for IKE encryption.
- IKE C(version) C(v2) does not support C(blowfish), C(camellia), or C(cast128).
choices:
- 3des
- des
- blowfish
- cast128
- aes128
- aes192
- aes256
- camellia
phase1_hash_algorithm:
description:
- Specifies the algorithm to use for IKE authentication.
choices:
- sha1
- md5
- sha256
- sha384
- sha512
phase1_perfect_forward_secrecy:
description:
- Specifies the Diffie-Hellman group to use for IKE Phase 1 and Phase 2 negotiations.
choices:
- ecp256
- ecp384
- ecp521
- modp768
- modp1024
- modp1536
- modp2048
- modp3072
- modp4096
- modp6144
- modp8192
update_password:
description:
- C(always) will allow to update passwords if the user chooses to do so.
C(on_create) will only set the password for newly created IKE peers.
default: always
choices:
- always
- on_create
partition:
description:
- Device partition to manage resources on.
default: Common
state:
description:
- When C(present), ensures that the resource exists.
- When C(absent), ensures the resource is removed.
default: present
choices:
- present
- absent
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create a ...
bigip_ike_peer:
name: foo
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
presented_id_type:
description: The new Presented ID Type value of the resource.
returned: changed
type: string
sample: address
verified_id_type:
description: The new Verified ID Type value of the resource.
returned: changed
type: string
sample: address
phase1_auth_method:
description: The new IKE Phase 1 Credentials Authentication Method value of the resource.
returned: changed
type: string
sample: rsa-signature
remote_address:
description: The new Remote Address value of the resource.
returned: changed
type: string
sample: 1.2.2.1
version:
description: The new list of IKE versions.
returned: changed
type: list
sample: ['v1', 'v2']
phase1_encryption_algorithm:
description: The new IKE Phase 1 Encryption Algorithm.
returned: changed
type: string
sample: 3des
phase1_hash_algorithm:
description: The new IKE Phase 1 Authentication Algorithm.
returned: changed
type: string
sample: sha256
phase1_perfect_forward_secrecy:
description: The new IKE Phase 1 Perfect Forward Secrecy.
returned: changed
type: string
sample: modp1024
phase1_cert:
description: The new IKE Phase 1 Certificate Credentials.
returned: changed
type: string
sample: /Common/cert1.crt
phase1_key:
description: The new IKE Phase 1 Key Credentials.
returned: changed
type: string
sample: /Common/cert1.key
phase1_verify_peer_cert:
description: The new IKE Phase 1 Key Verify Peer Certificate setting.
returned: changed
type: bool
sample: yes
verified_id_value:
description: The new Verified ID Value setting for the Verified ID Type.
returned: changed
type: string
sample: 1.2.3.1
presented_id_value:
description: The new Presented ID Value setting for the Presented ID Type.
returned: changed
type: string
sample: 1.2.3.1
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import flatten_boolean
from library.module_utils.network.f5.compare import cmp_str_with_none
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import flatten_boolean
from ansible.module_utils.network.f5.compare import cmp_str_with_none
class Parameters(AnsibleF5Parameters):
api_map = {
'myIdType': 'presented_id_type',
'peersIdType': 'verified_id_type',
'phase1AuthMethod': 'phase1_auth_method',
'presharedKeyEncrypted': 'preshared_key',
'remoteAddress': 'remote_address',
'version': 'version',
'phase1EncryptAlgorithm': 'phase1_encryption_algorithm',
'phase1HashAlgorithm': 'phase1_hash_algorithm',
'phase1PerfectForwardSecrecy': 'phase1_perfect_forward_secrecy',
'myCertFile': 'phase1_cert',
'myCertKeyFile': 'phase1_key',
'verifyCert': 'phase1_verify_peer_cert',
'peersIdValue': 'verified_id_value',
'myIdValue': 'presented_id_value',
}
api_attributes = [
'myIdType',
'peersIdType',
'phase1AuthMethod',
'presharedKeyEncrypted',
'remoteAddress',
'version',
'phase1EncryptAlgorithm',
'phase1HashAlgorithm',
'phase1PerfectForwardSecrecy',
'myCertFile',
'myCertKeyFile',
'verifyCert',
'peersIdValue',
'myIdValue',
'description',
]
returnables = [
'presented_id_type',
'verified_id_type',
'phase1_auth_method',
'preshared_key',
'remote_address',
'version',
'phase1_encryption_algorithm',
'phase1_hash_algorithm',
'phase1_perfect_forward_secrecy',
'phase1_cert',
'phase1_key',
'phase1_verify_peer_cert',
'verified_id_value',
'presented_id_value',
'description',
]
updatables = [
'presented_id_type',
'verified_id_type',
'phase1_auth_method',
'preshared_key',
'remote_address',
'version',
'phase1_encryption_algorithm',
'phase1_hash_algorithm',
'phase1_perfect_forward_secrecy',
'phase1_cert',
'phase1_key',
'phase1_verify_peer_cert',
'verified_id_value',
'presented_id_value',
'description',
]
@property
def phase1_verify_peer_cert(self):
return flatten_boolean(self._values['phase1_verify_peer_cert'])
class ApiParameters(Parameters):
@property
def description(self):
if self._values['description'] in [None, 'none']:
return None
return self._values['description']
class ModuleParameters(Parameters):
@property
def phase1_cert(self):
if self._values['phase1_cert'] is None:
return None
if self._values['phase1_cert'] in ['', 'none']:
return ''
return fq_name(self.partition, self._values['phase1_cert'])
@property
def phase1_key(self):
if self._values['phase1_key'] is None:
return None
if self._values['phase1_key'] in ['', 'none']:
return ''
return fq_name(self.partition, self._values['phase1_key'])
@property
def description(self):
if self._values['description'] is None:
return None
elif self._values['description'] in ['none', '']:
return ''
return self._values['description']
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def phase1_verify_peer_cert(self):
if self._values['phase1_verify_peer_cert'] is None:
return None
elif self._values['phase1_verify_peer_cert'] == 'yes':
return 'true'
else:
return 'false'
class ReportableChanges(Changes):
@property
def phase1_verify_peer_cert(self):
return flatten_boolean(self._values['phase1_verify_peer_cert'])
@property
def preshared_key(self):
return None
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def description(self):
return cmp_str_with_none(self.want.description, self.have.description)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/net/ipsec/ike-peer/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def update(self):
self.have = self.read_current_from_device()
if self.changes.version is not None and len(self.changes.version) == 0:
raise F5ModuleError(
"At least one version value must be specified."
)
if self.changes.phase1_auth_method == 'pre-shared-key':
if self.changes.preshared_key is None and self.have.preshared_key is None:
raise F5ModuleError(
"A 'preshared_key' must be specified when changing 'phase1_auth_method' "
"to 'pre-shared-key'."
)
if self.want.update_password == 'always':
self.want.update({'preshared_key': self.want.preshared_key})
else:
if self.want.preshared_key:
del self.want._values['preshared_key']
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.changes.version is None:
raise F5ModuleError(
"The 'version' parameter is required when creating a new IKE peer."
)
if self.changes.phase1_auth_method is None:
self.changes.update({'phase1_auth_method': 'rsa-signature'})
if self.changes.phase1_cert is None:
self.changes.update({'phase1_cert': 'default.crt'})
if self.changes.phase1_key is None:
self.changes.update({'phase1_key': 'default.key'})
if self.module.check_mode:
return True
self.create_on_device()
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/net/ipsec/ike-peer/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/net/ipsec/ike-peer/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/net/ipsec/ike-peer/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/net/ipsec/ike-peer/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
presented_id_type=dict(
choices=['address', 'asn1dn', 'fqdn', 'keyid-tag', 'user-fqdn', 'override']
),
presented_id_value=dict(),
verified_id_type=dict(
choices=['address', 'asn1dn', 'fqdn', 'keyid-tag', 'user-fqdn', 'override']
),
verified_id_value=dict(),
phase1_auth_method=dict(
choices=[
'pre-shared-key', 'rsa-signature'
]
),
preshared_key=dict(no_log=True),
remote_address=dict(),
version=dict(
type='list',
choices=['v1', 'v2']
),
phase1_encryption_algorithm=dict(
choices=[
'3des', 'des', 'blowfish', 'cast128', 'aes128', 'aes192',
'aes256', 'camellia'
]
),
phase1_hash_algorithm=dict(
choices=[
'sha1', 'md5', 'sha256', 'sha384', 'sha512'
]
),
phase1_perfect_forward_secrecy=dict(
choices=[
'ecp256', 'ecp384', 'ecp521', 'modp768', 'modp1024', 'modp1536',
'modp2048', 'modp3072', 'modp4096', 'modp6144', 'modp8192'
]
),
phase1_cert=dict(),
phase1_key=dict(),
phase1_verify_peer_cert=dict(type='bool'),
update_password=dict(
default='always',
choices=['always', 'on_create']
),
description=dict(),
state=dict(default='present', choices=['absent', 'present']),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_if = [
['presented_id_type', 'fqdn', ['presented_id_value']],
['presented_id_type', 'keyid-tag', ['presented_id_value']],
['presented_id_type', 'user-fqdn', ['presented_id_value']],
['presented_id_type', 'override', ['presented_id_value']],
['verified_id_type', 'fqdn', ['verified_id_value']],
['verified_id_type', 'keyid-tag', ['verified_id_value']],
['verified_id_type', 'user-fqdn', ['verified_id_value']],
['verified_id_type', 'override', ['verified_id_value']],
]
self.required_together = [
['phase1_cert', 'phase1_key']
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
required_if=spec.required_if,
required_together=spec.required_together,
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| yfried/ansible | lib/ansible/modules/network/f5/bigip_ike_peer.py | Python | gpl-3.0 | 25,483 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('sites', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_pk', models.TextField(verbose_name='object ID')),
('user_name', models.CharField(max_length=50, verbose_name="user's name", blank=True)),
('user_email', models.EmailField(max_length=254, verbose_name="user's email address", blank=True)),
('user_url', models.URLField(verbose_name="user's URL", blank=True)),
('comment', models.TextField(max_length=3000, verbose_name='comment')),
('submit_date', models.DateTimeField(default=None, verbose_name='date/time submitted', db_index=True)),
('ip_address', models.GenericIPAddressField(unpack_ipv4=True, null=True, verbose_name='IP address', blank=True)),
('is_public', models.BooleanField(default=True, help_text='Uncheck this box to make the comment effectively disappear from the site.', verbose_name='is public')),
('is_removed', models.BooleanField(default=False, help_text='Check this box if the comment is inappropriate. A "This comment has been removed" message will be displayed instead.', verbose_name='is removed')),
('content_type', models.ForeignKey(related_name='content_type_set_for_comment', verbose_name='content type', to='contenttypes.ContentType', on_delete=models.CASCADE)),
('site', models.ForeignKey(to='sites.Site', on_delete=models.CASCADE)),
('user', models.ForeignKey(related_name='comment_comments', verbose_name='user', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE)),
],
options={
'ordering': ('submit_date',),
'abstract': False,
'verbose_name': 'comment',
'verbose_name_plural': 'comments',
'permissions': [('can_moderate', 'Can moderate comments')],
},
),
]
| claudep/pootle | pootle/apps/pootle_comment/migrations/0001_initial.py | Python | gpl-3.0 | 2,477 |
########################################################################
# $HeadURL$
# File: ReqProxyHandler.py
# Author: Krzysztof.Ciba@NOSPAMgmail.com
# Date: 2013/06/04 13:18:41
########################################################################
"""
:mod: RequestProxyHandler
.. module: ReqtProxyHandler
:synopsis: ReqProxy service
.. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
Careful with that axe, Eugene! Some 'transfer' requests are using local fs
and they never should be forwarded to the central RequestManager.
"""
__RCSID__ = "$Id$"
# #
# @file RequestProxyHandler.py
# @author Krzysztof.Ciba@NOSPAMgmail.com
# @date 2012/07/20 13:18:58
# @brief Definition of RequestProxyHandler class.
# # imports
import os
from types import DictType
try:
from hashlib import md5
except ImportError:
from md5 import md5
# # from DIRAC
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.RequestManagementSystem.Client.Request import Request
from DIRAC.Core.Utilities.ThreadScheduler import gThreadScheduler
def initializeReqProxyHandler( serviceInfo ):
""" init RequestProxy handler
:param serviceInfo: whatever
"""
gLogger.info( "Initalizing ReqProxyHandler" )
gThreadScheduler.addPeriodicTask( 120, ReqProxyHandler.sweeper )
return S_OK()
########################################################################
class ReqProxyHandler( RequestHandler ):
"""
.. class:: ReqProxyHandler
:param RPCCLient requestManager: a RPCClient to RequestManager
:param str cacheDir: os.path.join( workDir, "requestCache" )
"""
__requestManager = None
__cacheDir = None
def initialize( self ):
""" service initialization
:param self: self reference
"""
gLogger.notice( "CacheDirectory: %s" % self.cacheDir() )
return S_OK()
@classmethod
def requestManager( cls ):
""" get request manager """
if not cls.__requestManager:
cls.__requestManager = RPCClient( "RequestManagement/ReqManager" )
return cls.__requestManager
@classmethod
def cacheDir( cls ):
""" get cache dir """
if not cls.__cacheDir:
cls.__cacheDir = os.path.abspath( "requestCache" )
if not os.path.exists( cls.__cacheDir ):
os.mkdir( cls.__cacheDir )
return cls.__cacheDir
@classmethod
def sweeper( cls ):
""" move cached request to the central request manager
:param self: self reference
"""
cacheDir = cls.cacheDir()
# # cache dir empty?
if not os.listdir( cacheDir ):
gLogger.always( "sweeper: CacheDir %s is empty, nothing to do" % cacheDir )
return S_OK()
else:
# # read 10 cache dir files, the oldest first
cachedRequests = [ os.path.abspath( requestFile ) for requestFile in
sorted( filter( os.path.isfile,
[ os.path.join( cacheDir, requestName )
for requestName in os.listdir( cacheDir ) ] ),
key = os.path.getctime ) ][:10]
# # set cached requests to the central RequestManager
for cachedFile in cachedRequests:
# # break if something went wrong last time
try:
requestString = "".join( open( cachedFile, "r" ).readlines() )
cachedRequest = eval( requestString )
cachedName = cachedRequest.get( "RequestName", "***UNKNOWN***" )
setRequest = cls.requestManager().putRequest( cachedRequest )
if not setRequest["OK"]:
gLogger.error( "sweeper: unable to set request %s @ ReqManager: %s" % ( cachedName,
setRequest["Message"] ) )
continue
gLogger.info( "sweeper: successfully set request '%s' @ ReqManager" % cachedName )
os.unlink( cachedFile )
except Exception, error:
gLogger.exception( "sweeper: hit by exception %s" % str( error ) )
return S_ERROR( "sweeper: hit by exception: %s" % str( error ) )
return S_OK()
def __saveRequest( self, requestName, requestJSON ):
""" save request string to the working dir cache
:param self: self reference
:param str requestName: request name
:param str requestJSON: request serialized to JSON format
"""
try:
requestFile = os.path.join( self.cacheDir(), md5( str( requestJSON ) ).hexdigest() )
request = open( requestFile, "w+" )
request.write( str( requestJSON ) )
request.close()
return S_OK( requestFile )
except OSError, error:
err = "unable to dump %s to cache file: %s" % ( requestName, str( error ) )
gLogger.exception( err )
return S_ERROR( err )
types_getStatus = []
def export_getStatus( self ):
""" get number of requests in cache """
try:
cachedRequests = len( os.listdir( self.cacheDir() ) )
except OSError, error:
err = "getStatus: unable to list cache dir contents: %s" % str( error )
gLogger.exception( err )
return S_ERROR( err )
return S_OK( cachedRequests )
types_putRequest = [ DictType ]
def export_putRequest( self, requestJSON ):
""" forward request from local RequestDB to central RequestManager
:param self: self reference
:param str requestType: request type
"""
requestName = requestJSON.get( "RequestName", "***UNKNOWN***" )
gLogger.info( "setRequest: got request '%s'" % requestName )
forwardable = self.__forwardable( requestJSON )
if not forwardable["OK"]:
gLogger.warn( "setRequest: %s" % forwardable["Message"] )
setRequest = self.requestManager().putRequest( requestJSON )
if not setRequest["OK"]:
gLogger.error( "setReqeuest: unable to set request '%s' @ RequestManager: %s" % ( requestName,
setRequest["Message"] ) )
# # put request to the request file cache
save = self.__saveRequest( requestName, requestJSON )
if not save["OK"]:
gLogger.error( "setRequest: unable to save request to the cache: %s" % save["Message"] )
return save
gLogger.info( "setRequest: %s is saved to %s file" % ( requestName, save["Value"] ) )
return S_OK( { "set" : False, "saved" : True } )
gLogger.info( "setRequest: request '%s' has been set to the ReqManager" % ( requestName ) )
return S_OK( { "set" : True, "saved" : False } )
@staticmethod
def __forwardable( requestJSON ):
""" check if request if forwardable
The sub-request of type transfer:putAndRegister, removal:physicalRemoval and removal:reTransfer are
definitely not, they should be executed locally, as they are using local fs.
:param str requestJSON: serialized request
"""
operations = requestJSON.get( "Operations", [] )
for operationDict in operations:
if operationDict.get( "Type", "" ) in ( "PutAndRegister", "PhysicalRemoval", "ReTransfer" ):
return S_ERROR( "found operation '%s' that cannot be forwarded" % operationDict.get( "Type", "" ) )
return S_OK()
| Sbalbp/DIRAC | RequestManagementSystem/Service/ReqProxyHandler.py | Python | gpl-3.0 | 7,200 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('trans', '0045_auto_20150916_1007'),
]
operations = [
migrations.CreateModel(
name='Billing',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
),
migrations.CreateModel(
name='Plan',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=100)),
('price', models.IntegerField()),
('limit_strings', models.IntegerField()),
('limit_languages', models.IntegerField()),
('limit_repositories', models.IntegerField()),
('limit_projects', models.IntegerField()),
],
options={
'ordering': ['name'],
},
),
migrations.AddField(
model_name='billing',
name='plan',
field=models.ForeignKey(to='billing.Plan'),
),
migrations.AddField(
model_name='billing',
name='projects',
field=models.ManyToManyField(to='trans.Project', blank=True),
),
migrations.AddField(
model_name='billing',
name='user',
field=models.OneToOneField(to=settings.AUTH_USER_MODEL),
),
]
| miumok98/weblate | weblate/billing/migrations/0001_initial.py | Python | gpl-3.0 | 1,726 |
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
HEADRequest,
sanitized_Request,
urlencode_postdata,
)
class GDCVaultIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?gdcvault\.com/play/(?P<id>\d+)/(?P<name>(\w|-)+)?'
_NETRC_MACHINE = 'gdcvault'
_TESTS = [
{
'url': 'http://www.gdcvault.com/play/1019721/Doki-Doki-Universe-Sweet-Simple',
'md5': '7ce8388f544c88b7ac11c7ab1b593704',
'info_dict': {
'id': '1019721',
'display_id': 'Doki-Doki-Universe-Sweet-Simple',
'ext': 'mp4',
'title': 'Doki-Doki Universe: Sweet, Simple and Genuine (GDC Next 10)'
}
},
{
'url': 'http://www.gdcvault.com/play/1015683/Embracing-the-Dark-Art-of',
'info_dict': {
'id': '1015683',
'display_id': 'Embracing-the-Dark-Art-of',
'ext': 'flv',
'title': 'Embracing the Dark Art of Mathematical Modeling in AI'
},
'params': {
'skip_download': True, # Requires rtmpdump
}
},
{
'url': 'http://www.gdcvault.com/play/1015301/Thexder-Meets-Windows-95-or',
'md5': 'a5eb77996ef82118afbbe8e48731b98e',
'info_dict': {
'id': '1015301',
'display_id': 'Thexder-Meets-Windows-95-or',
'ext': 'flv',
'title': 'Thexder Meets Windows 95, or Writing Great Games in the Windows 95 Environment',
},
'skip': 'Requires login',
},
{
'url': 'http://gdcvault.com/play/1020791/',
'only_matching': True,
},
{
# Hard-coded hostname
'url': 'http://gdcvault.com/play/1023460/Tenacious-Design-and-The-Interface',
'md5': 'a8efb6c31ed06ca8739294960b2dbabd',
'info_dict': {
'id': '1023460',
'ext': 'mp4',
'display_id': 'Tenacious-Design-and-The-Interface',
'title': 'Tenacious Design and The Interface of \'Destiny\'',
},
},
{
# Multiple audios
'url': 'http://www.gdcvault.com/play/1014631/Classic-Game-Postmortem-PAC',
'info_dict': {
'id': '1014631',
'ext': 'flv',
'title': 'How to Create a Good Game - From My Experience of Designing Pac-Man',
},
'params': {
'skip_download': True, # Requires rtmpdump
'format': 'jp', # The japanese audio
}
},
{
# gdc-player.html
'url': 'http://www.gdcvault.com/play/1435/An-American-engine-in-Tokyo',
'info_dict': {
'id': '1435',
'display_id': 'An-American-engine-in-Tokyo',
'ext': 'flv',
'title': 'An American Engine in Tokyo:/nThe collaboration of Epic Games and Square Enix/nFor THE LAST REMINANT',
},
'params': {
'skip_download': True, # Requires rtmpdump
},
},
]
def _login(self, webpage_url, display_id):
username, password = self._get_login_info()
if username is None or password is None:
self.report_warning('It looks like ' + webpage_url + ' requires a login. Try specifying a username and password and try again.')
return None
mobj = re.match(r'(?P<root_url>https?://.*?/).*', webpage_url)
login_url = mobj.group('root_url') + 'api/login.php'
logout_url = mobj.group('root_url') + 'logout'
login_form = {
'email': username,
'password': password,
}
request = sanitized_Request(login_url, urlencode_postdata(login_form))
request.add_header('Content-Type', 'application/x-www-form-urlencoded')
self._download_webpage(request, display_id, 'Logging in')
start_page = self._download_webpage(webpage_url, display_id, 'Getting authenticated video page')
self._download_webpage(logout_url, display_id, 'Logging out')
return start_page
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('name') or video_id
webpage_url = 'http://www.gdcvault.com/play/' + video_id
start_page = self._download_webpage(webpage_url, display_id)
direct_url = self._search_regex(
r's1\.addVariable\("file",\s*encodeURIComponent\("(/[^"]+)"\)\);',
start_page, 'url', default=None)
if direct_url:
title = self._html_search_regex(
r'<td><strong>Session Name</strong></td>\s*<td>(.*?)</td>',
start_page, 'title')
video_url = 'http://www.gdcvault.com' + direct_url
# resolve the url so that we can detect the correct extension
head = self._request_webpage(HEADRequest(video_url), video_id)
video_url = head.geturl()
return {
'id': video_id,
'display_id': display_id,
'url': video_url,
'title': title,
}
PLAYER_REGEX = r'<iframe src="(?P<xml_root>.+?)/(?:gdc-)?player.*?\.html.*?".*?</iframe>'
xml_root = self._html_search_regex(
PLAYER_REGEX, start_page, 'xml root', default=None)
if xml_root is None:
# Probably need to authenticate
login_res = self._login(webpage_url, display_id)
if login_res is None:
self.report_warning('Could not login.')
else:
start_page = login_res
# Grab the url from the authenticated page
xml_root = self._html_search_regex(
PLAYER_REGEX, start_page, 'xml root')
xml_name = self._html_search_regex(
r'<iframe src=".*?\?xml=(.+?\.xml).*?".*?</iframe>',
start_page, 'xml filename', default=None)
if xml_name is None:
# Fallback to the older format
xml_name = self._html_search_regex(
r'<iframe src=".*?\?xmlURL=xml/(?P<xml_file>.+?\.xml).*?".*?</iframe>',
start_page, 'xml filename')
return {
'_type': 'url_transparent',
'id': video_id,
'display_id': display_id,
'url': '%s/xml/%s' % (xml_root, xml_name),
'ie_key': 'DigitallySpeaking',
}
| stannynuytkens/youtube-dl | youtube_dl/extractor/gdcvault.py | Python | unlicense | 6,690 |
#!/usr/bin/python2
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
# To run this script please copy "out/<build_name>//pyproto/webrtc/modules/
# audio_coding/audio_network_adaptor/debug_dump_pb2.py" to this folder.
# The you can run this script with:
# "python parse_ana_dump.py -m uplink_bandwidth_bps -f dump_file.dat"
# You can add as may metrics or decisions to the plot as you like.
# form more information call:
# "python parse_ana_dump.py --help"
import struct
from optparse import OptionParser
import matplotlib.pyplot as plt
import debug_dump_pb2
def GetNextMessageSize(file_to_parse):
data = file_to_parse.read(4)
if data == '':
return 0
return struct.unpack('<I', data)[0]
def GetNextMessageFromFile(file_to_parse):
message_size = GetNextMessageSize(file_to_parse)
if message_size == 0:
return None
try:
event = debug_dump_pb2.Event()
event.ParseFromString(file_to_parse.read(message_size))
except IOError:
print 'Invalid message in file'
return None
return event
def InitMetrics():
metrics = {}
event = debug_dump_pb2.Event()
for metric in event.network_metrics.DESCRIPTOR.fields:
metrics[metric.name] = {'time': [], 'value': []}
return metrics
def InitDecisions():
decisions = {}
event = debug_dump_pb2.Event()
for decision in event.encoder_runtime_config.DESCRIPTOR.fields:
decisions[decision.name] = {'time': [], 'value': []}
return decisions
def ParseAnaDump(dump_file_to_parse):
with open(dump_file_to_parse, 'rb') as file_to_parse:
metrics = InitMetrics()
decisions = InitDecisions()
first_time_stamp = None
while True:
event = GetNextMessageFromFile(file_to_parse)
if event == None:
break
if first_time_stamp == None:
first_time_stamp = event.timestamp
if event.type == debug_dump_pb2.Event.ENCODER_RUNTIME_CONFIG:
for decision in event.encoder_runtime_config.DESCRIPTOR.fields:
if event.encoder_runtime_config.HasField(decision.name):
decisions[decision.name]['time'].append(event.timestamp -
first_time_stamp)
decisions[decision.name]['value'].append(
getattr(event.encoder_runtime_config, decision.name))
if event.type == debug_dump_pb2.Event.NETWORK_METRICS:
for metric in event.network_metrics.DESCRIPTOR.fields:
if event.network_metrics.HasField(metric.name):
metrics[metric.name]['time'].append(event.timestamp -
first_time_stamp)
metrics[metric.name]['value'].append(
getattr(event.network_metrics, metric.name))
return (metrics, decisions)
def main():
parser = OptionParser()
parser.add_option(
"-f", "--dump_file", dest="dump_file_to_parse", help="dump file to parse")
parser.add_option(
'-m',
'--metric_plot',
default=[],
type=str,
help='metric key (name of the metric) to plot',
dest='metric_keys',
action='append')
parser.add_option(
'-d',
'--decision_plot',
default=[],
type=str,
help='decision key (name of the decision) to plot',
dest='decision_keys',
action='append')
options = parser.parse_args()[0]
if options.dump_file_to_parse == None:
print "No dump file to parse is set.\n"
parser.print_help()
exit()
(metrics, decisions) = ParseAnaDump(options.dump_file_to_parse)
metric_keys = options.metric_keys
decision_keys = options.decision_keys
plot_count = len(metric_keys) + len(decision_keys)
if plot_count == 0:
print "You have to set at least one metric or decision to plot.\n"
parser.print_help()
exit()
plots = []
if plot_count == 1:
f, mp_plot = plt.subplots()
plots.append(mp_plot)
else:
f, mp_plots = plt.subplots(plot_count, sharex=True)
plots.extend(mp_plots.tolist())
for key in metric_keys:
plot = plots.pop()
plot.grid(True)
plot.set_title(key + " (metric)")
plot.plot(metrics[key]['time'], metrics[key]['value'])
for key in decision_keys:
plot = plots.pop()
plot.grid(True)
plot.set_title(key + " (decision)")
plot.plot(decisions[key]['time'], decisions[key]['value'])
f.subplots_adjust(hspace=0.3)
plt.show()
if __name__ == "__main__":
main()
| wangcy6/storm_app | frame/c++/webrtc-master/modules/audio_coding/audio_network_adaptor/parse_ana_dump.py | Python | apache-2.0 | 4,718 |
"""Support for Verisure Smartplugs."""
import logging
from time import monotonic
from homeassistant.components.switch import SwitchEntity
from . import CONF_SMARTPLUGS, HUB as hub
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Verisure switch platform."""
if not int(hub.config.get(CONF_SMARTPLUGS, 1)):
return False
hub.update_overview()
switches = []
switches.extend(
[
VerisureSmartplug(device_label)
for device_label in hub.get("$.smartPlugs[*].deviceLabel")
]
)
add_entities(switches)
class VerisureSmartplug(SwitchEntity):
"""Representation of a Verisure smartplug."""
def __init__(self, device_id):
"""Initialize the Verisure device."""
self._device_label = device_id
self._change_timestamp = 0
self._state = False
@property
def name(self):
"""Return the name or location of the smartplug."""
return hub.get_first(
"$.smartPlugs[?(@.deviceLabel == '%s')].area", self._device_label
)
@property
def is_on(self):
"""Return true if on."""
if monotonic() - self._change_timestamp < 10:
return self._state
self._state = (
hub.get_first(
"$.smartPlugs[?(@.deviceLabel == '%s')].currentState",
self._device_label,
)
== "ON"
)
return self._state
@property
def available(self):
"""Return True if entity is available."""
return (
hub.get_first("$.smartPlugs[?(@.deviceLabel == '%s')]", self._device_label)
is not None
)
def turn_on(self, **kwargs):
"""Set smartplug status on."""
hub.session.set_smartplug_state(self._device_label, True)
self._state = True
self._change_timestamp = monotonic()
def turn_off(self, **kwargs):
"""Set smartplug status off."""
hub.session.set_smartplug_state(self._device_label, False)
self._state = False
self._change_timestamp = monotonic()
# pylint: disable=no-self-use
def update(self):
"""Get the latest date of the smartplug."""
hub.update_overview()
| nkgilley/home-assistant | homeassistant/components/verisure/switch.py | Python | apache-2.0 | 2,311 |
# Copyright 2013 IBM Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest import test
class HypervisorAdminTestJSON(base.BaseV2ComputeAdminTest):
"""Tests Hypervisors API that require admin privileges"""
@classmethod
def setup_clients(cls):
super(HypervisorAdminTestJSON, cls).setup_clients()
cls.client = cls.os_adm.hypervisor_client
def _list_hypervisors(self):
# List of hypervisors
hypers = self.client.list_hypervisors()['hypervisors']
return hypers
def assertHypervisors(self, hypers):
self.assertTrue(len(hypers) > 0, "No hypervisors found: %s" % hypers)
@test.idempotent_id('7f0ceacd-c64d-4e96-b8ee-d02943142cc5')
def test_get_hypervisor_list(self):
# List of hypervisor and available hypervisors hostname
hypers = self._list_hypervisors()
self.assertHypervisors(hypers)
@test.idempotent_id('1e7fdac2-b672-4ad1-97a4-bad0e3030118')
def test_get_hypervisor_list_details(self):
# Display the details of the all hypervisor
hypers = self.client.list_hypervisors(detail=True)['hypervisors']
self.assertHypervisors(hypers)
@test.idempotent_id('94ff9eae-a183-428e-9cdb-79fde71211cc')
def test_get_hypervisor_show_details(self):
# Display the details of the specified hypervisor
hypers = self._list_hypervisors()
self.assertHypervisors(hypers)
details = self.client.show_hypervisor(hypers[0]['id'])['hypervisor']
self.assertTrue(len(details) > 0)
self.assertEqual(details['hypervisor_hostname'],
hypers[0]['hypervisor_hostname'])
@test.idempotent_id('e81bba3f-6215-4e39-a286-d52d2f906862')
def test_get_hypervisor_show_servers(self):
# Show instances about the specific hypervisors
hypers = self._list_hypervisors()
self.assertHypervisors(hypers)
hostname = hypers[0]['hypervisor_hostname']
hypervisors = (self.client.list_servers_on_hypervisor(hostname)
['hypervisors'])
self.assertTrue(len(hypervisors) > 0)
@test.idempotent_id('797e4f28-b6e0-454d-a548-80cc77c00816')
def test_get_hypervisor_stats(self):
# Verify the stats of the all hypervisor
stats = (self.client.show_hypervisor_statistics()
['hypervisor_statistics'])
self.assertTrue(len(stats) > 0)
@test.idempotent_id('91a50d7d-1c2b-4f24-b55a-a1fe20efca70')
def test_get_hypervisor_uptime(self):
# Verify that GET shows the specified hypervisor uptime
hypers = self._list_hypervisors()
# Ironic will register each baremetal node as a 'hypervisor',
# so the hypervisor list can contain many hypervisors of type
# 'ironic'. If they are ALL ironic, skip this test since ironic
# doesn't support hypervisor uptime. Otherwise, remove them
# from the list of hypervisors to test.
ironic_only = True
hypers_without_ironic = []
for hyper in hypers:
details = (self.client.show_hypervisor(hypers[0]['id'])
['hypervisor'])
if details['hypervisor_type'] != 'ironic':
hypers_without_ironic.append(hyper)
ironic_only = False
if ironic_only:
raise self.skipException(
"Ironic does not support hypervisor uptime")
has_valid_uptime = False
for hyper in hypers_without_ironic:
# because hypervisors might be disabled, this loops looking
# for any good hit.
try:
uptime = (self.client.show_hypervisor_uptime(hyper['id'])
['hypervisor'])
if len(uptime) > 0:
has_valid_uptime = True
break
except Exception:
pass
self.assertTrue(
has_valid_uptime,
"None of the hypervisors had a valid uptime: %s" % hypers)
@test.idempotent_id('d7e1805b-3b14-4a3b-b6fd-50ec6d9f361f')
def test_search_hypervisor(self):
hypers = self._list_hypervisors()
self.assertHypervisors(hypers)
hypers = self.client.search_hypervisor(
hypers[0]['hypervisor_hostname'])['hypervisors']
self.assertHypervisors(hypers)
| zsoltdudas/lis-tempest | tempest/api/compute/admin/test_hypervisor.py | Python | apache-2.0 | 4,953 |
#!/usr/bin/python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os, sys, subprocess, socket, fcntl, struct
from socket import gethostname
from xml.dom.minidom import parseString
from xmlrpclib import ServerProxy, Error
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
def is_it_up(host, port):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect((host, port))
s.close()
except:
print "host: %s:%s DOWN" % (host, port)
return False
print "host: %s:%s UP" % (host, port)
return True
# hmm master actions don't apply to a slave
master = "192.168.1.161"
port = 8899
user = "oracle"
password = "*******"
auth = "%s:%s" % (user, password)
server = ServerProxy("http://%s:%s" % ("localhost", port))
mserver = ServerProxy("http://%s@%s:%s" % (auth, master, port))
poolNode = True
interface = "c0a80100"
role = 'xen,utility'
hostname = gethostname()
ip = get_ip_address(interface)
poolMembers = []
xserver = server
print "setting up password"
server.update_agent_password(user, password)
if (is_it_up(master, port)):
print "master seems to be up, slaving"
xserver = mserver
else:
print "no master yet, will become master"
# other mechanism must be used to make interfaces equal...
try:
# pooling related same as primary storage!
poolalias = "Pool 0"
poolid = "0004fb0000020000ba9aaf00ae5e2d73"
poolfsnfsbaseuuid = "7718562d-872f-47a7-b454-8f9cac4ffa3a"
pooluuid = poolid
poolfsuuid = poolid
clusterid = "ba9aaf00ae5e2d72"
mgr = "d1a749d4295041fb99854f52ea4dea97"
poolmvip = master
poolfsnfsbaseuuid = "6824e646-5908-48c9-ba44-bb1a8a778084"
repoid = "6824e646590848c9ba44bb1a8a778084"
poolid = repoid
repo = "/OVS/Repositories/%s" % (repoid)
repomount = "cs-mgmt:/volumes/cs-data/secondary"
# primary
primuuid = "7718562d872f47a7b4548f9cac4ffa3a"
ssuuid = "7718562d-872f-47a7-b454-8f9cac4ffa3a"
fshost = "cs-mgmt"
fstarget = "/volumes/cs-data/primary"
fstype = "nfs"
fsname = "Primary storage"
fsmntpoint = "%s:%s" % (fshost, fstarget)
fsmnt = "/nfsmnt/%s" % (ssuuid)
fsplugin = "oracle.generic.NFSPlugin.GenericNFSPlugin"
# set the basics we require to "operate"
print server.take_ownership(mgr, '')
print server.update_server_roles(role,)
# if we're pooling pool...
if (poolNode == True):
poolCount = 0
pooled = False
# check pooling
try:
poolDom = parseString(xserver.discover_server_pool())
print xserver.discover_server_pool()
for node in poolDom.getElementsByTagName('Server_Pool'):
id = node.getElementsByTagName('Unique_Id')[0].firstChild.nodeValue
alias = node.getElementsByTagName('Pool_Alias')[0].firstChild.nodeValue
mvip = node.getElementsByTagName('Master_Virtual_Ip')[0].firstChild.nodeValue
print "pool: %s, %s, %s" % (id, mvip, alias)
members = node.getElementsByTagName('Member')
for member in members:
poolCount = poolCount + 1
mip = member.getElementsByTagName('Registered_IP')[0].firstChild.nodeValue
print "member: %s" % (mip)
if mip == ip:
pooled = True
else:
poolMembers.append(mip)
except Error, v:
print "no master will become master, %s" % v
if (pooled == False):
# setup the repository
print "setup repo"
print server.mount_repository_fs(repomount, repo)
try:
print "adding repo"
print server.add_repository(repomount, repo)
except Error, v:
print "will create the repo, as it's not there", v
print server.create_repository(repomount, repo, repoid, "repo")
print "not pooled!"
if (poolCount == 0):
print "no pool yet, create it"
# check if a pool exists already if not create
# pool if so add us to the pool
print "create pool fs"
print server.create_pool_filesystem(
fstype,
"%s/VirtualMachines/" % repomount,
clusterid,
poolfsuuid,
poolfsnfsbaseuuid,
mgr,
pooluuid
)
print "create pool"
print server.create_server_pool(poolalias,
pooluuid,
poolmvip,
poolCount,
hostname,
ip,
role
)
else:
print "join the pool"
print server.join_server_pool(poolalias,
pooluuid,
poolmvip,
poolCount,
hostname,
ip,
role
)
# add member to ip list ?
poolMembers.append(ip)
print "mambers for pool: %s" % poolMembers
print xserver.set_pool_member_ip_list(poolMembers)
print server.discover_server_pool()
except Error, v:
print "ERROR", v
| MissionCriticalCloud/cosmic-plugin-hypervisor-ovm3 | src/test/resources/scripts/repo_pool.py | Python | apache-2.0 | 6,325 |
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
import sys
sys.path.insert(0, "../../python/")
import mxnet as mx
kv = mx.kv.create('dist_async')
my_rank = kv.rank
nworker = kv.num_workers
def test_gluon_trainer_type():
def check_trainer_kv_update(weight_stype, update_on_kv):
x = mx.gluon.Parameter('x', shape=(10,1), lr_mult=1.0, stype=weight_stype)
x.initialize(ctx=[mx.cpu(0), mx.cpu(1)], init='zeros')
try:
trainer = mx.gluon.Trainer([x], 'sgd', {'learning_rate': 0.1},
kvstore=kv, update_on_kvstore=update_on_kv)
trainer._init_kvstore()
assert trainer._kv_initialized
assert trainer._update_on_kvstore is True
except ValueError:
assert update_on_kv is False
check_trainer_kv_update('default', False)
check_trainer_kv_update('default', True)
check_trainer_kv_update('default', None)
check_trainer_kv_update('row_sparse', False)
check_trainer_kv_update('row_sparse', True)
check_trainer_kv_update('row_sparse', None)
print('worker ' + str(my_rank) + ' passed test_gluon_trainer_type')
if __name__ == "__main__":
test_gluon_trainer_type()
| szha/mxnet | tests/nightly/dist_async_kvstore.py | Python | apache-2.0 | 1,994 |
# Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
DON'T USE THIS MODULE DIRECTLY! The classes here should be imported
via collections; they are defined here only to alleviate certain
bootstrapping issues. Unit tests are in test_collections.
"""
from abc import ABCMeta, abstractmethod
import sys
__all__ = ["Hashable", "Iterable", "Iterator",
"Sized", "Container", "Callable",
"Set", "MutableSet",
"Mapping", "MutableMapping",
"MappingView", "KeysView", "ItemsView", "ValuesView",
"Sequence", "MutableSequence",
]
### ONE-TRICK PONIES ###
class Hashable:
__metaclass__ = ABCMeta
@abstractmethod
def __hash__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Hashable:
for B in C.__mro__:
if "__hash__" in B.__dict__:
if B.__dict__["__hash__"]:
return True
break
return NotImplemented
class Iterable:
__metaclass__ = ABCMeta
@abstractmethod
def __iter__(self):
while False:
yield None
@classmethod
def __subclasshook__(cls, C):
if cls is Iterable:
if any("__iter__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
Iterable.register(str)
class Iterator(Iterable):
@abstractmethod
def __next__(self):
raise StopIteration
def __iter__(self):
return self
@classmethod
def __subclasshook__(cls, C):
if cls is Iterator:
if any("next" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Sized:
__metaclass__ = ABCMeta
@abstractmethod
def __len__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Sized:
if any("__len__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Container:
__metaclass__ = ABCMeta
@abstractmethod
def __contains__(self, x):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Container:
if any("__contains__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Callable:
__metaclass__ = ABCMeta
@abstractmethod
def __call__(self, *args, **kwds):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Callable:
if any("__call__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
### SETS ###
class Set(Sized, Iterable, Container):
"""A set is a finite, iterable container.
This class provides concrete generic implementations of all
methods except for __contains__, __iter__ and __len__.
To override the comparisons (presumably for speed, as the
semantics are fixed), all you have to do is redefine __le__ and
then the other operations will automatically follow suit.
"""
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for elem in self:
if elem not in other:
return False
return True
def __lt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) < len(other) and self.__le__(other)
def __gt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other < self
def __ge__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other <= self
def __eq__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) == len(other) and self.__le__(other)
def __ne__(self, other):
return not (self == other)
@classmethod
def _from_iterable(cls, it):
'''Construct an instance of the class from any iterable input.
Must override this method if the class constructor signature
does not accept an iterable for an input.
'''
return cls(it)
def __and__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
return self._from_iterable(value for value in other if value in self)
def isdisjoint(self, other):
for value in other:
if value in self:
return False
return True
def __or__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
chain = (e for s in (self, other) for e in s)
return self._from_iterable(chain)
def __sub__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return self._from_iterable(value for value in self
if value not in other)
def __xor__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return (self - other) | (other - self)
# Sets are not hashable by default, but subclasses can change this
__hash__ = None
def _hash(self):
"""Compute the hash value of a set.
Note that we don't define __hash__: not all sets are hashable.
But if you define a hashable set type, its __hash__ should
call this function.
This must be compatible __eq__.
All sets ought to compare equal if they contain the same
elements, regardless of how they are implemented, and
regardless of the order of the elements; so there's not much
freedom for __eq__ or __hash__. We match the algorithm used
by the built-in frozenset type.
"""
MAX = sys.maxint
MASK = 2 * MAX + 1
n = len(self)
h = 1927868237 * (n + 1)
h &= MASK
for x in self:
hx = hash(x)
h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
h &= MASK
h = h * 69069 + 907133923
h &= MASK
if h > MAX:
h -= MASK + 1
if h == -1:
h = 590923713
return h
Set.register(frozenset)
class MutableSet(Set):
@abstractmethod
def add(self, value):
"""Return True if it was added, False if already there."""
raise NotImplementedError
@abstractmethod
def discard(self, value):
"""Return True if it was deleted, False if not there."""
raise NotImplementedError
def remove(self, value):
"""Remove an element. If not a member, raise a KeyError."""
if value not in self:
raise KeyError(value)
self.discard(value)
def pop(self):
"""Return the popped value. Raise KeyError if empty."""
it = iter(self)
try:
value = it.__next__()
except StopIteration:
raise KeyError
self.discard(value)
return value
def clear(self):
"""This is slow (creates N new iterators!) but effective."""
try:
while True:
self.pop()
except KeyError:
pass
def __ior__(self, it):
for value in it:
self.add(value)
return self
def __iand__(self, c):
for value in self:
if value not in c:
self.discard(value)
return self
def __ixor__(self, it):
if not isinstance(it, Set):
it = self._from_iterable(it)
for value in it:
if value in self:
self.discard(value)
else:
self.add(value)
return self
def __isub__(self, it):
for value in it:
self.discard(value)
return self
MutableSet.register(set)
### MAPPINGS ###
class Mapping(Sized, Iterable, Container):
@abstractmethod
def __getitem__(self, key):
raise KeyError
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
else:
return True
def iterkeys(self):
return iter(self)
def itervalues(self):
for key in self:
yield self[key]
def iteritems(self):
for key in self:
yield (key, self[key])
def keys(self):
return list(self)
def items(self):
return [(key, self[key]) for key in self]
def values(self):
return [self[key] for key in self]
# Mappings are not hashable by default, but subclasses can change this
__hash__ = None
def __eq__(self, other):
return isinstance(other, Mapping) and \
dict(self.items()) == dict(other.items())
def __ne__(self, other):
return not (self == other)
class MappingView(Sized):
def __init__(self, mapping):
self._mapping = mapping
def __len__(self):
return len(self._mapping)
class KeysView(MappingView, Set):
def __contains__(self, key):
return key in self._mapping
def __iter__(self):
for key in self._mapping:
yield key
class ItemsView(MappingView, Set):
def __contains__(self, item):
key, value = item
try:
v = self._mapping[key]
except KeyError:
return False
else:
return v == value
def __iter__(self):
for key in self._mapping:
yield (key, self._mapping[key])
class ValuesView(MappingView):
def __contains__(self, value):
for key in self._mapping:
if value == self._mapping[key]:
return True
return False
def __iter__(self):
for key in self._mapping:
yield self._mapping[key]
class MutableMapping(Mapping):
@abstractmethod
def __setitem__(self, key, value):
raise KeyError
@abstractmethod
def __delitem__(self, key):
raise KeyError
__marker = object()
def pop(self, key, default=__marker):
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def popitem(self):
try:
key = next(iter(self))
except StopIteration:
raise KeyError
value = self[key]
del self[key]
return key, value
def clear(self):
try:
while True:
self.popitem()
except KeyError:
pass
def update(self, other=(), **kwds):
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
MutableMapping.register(dict)
### SEQUENCES ###
class Sequence(Sized, Iterable, Container):
"""All the operations on a read-only sequence.
Concrete subclasses must override __new__ or __init__,
__getitem__, and __len__.
"""
@abstractmethod
def __getitem__(self, index):
raise IndexError
def __iter__(self):
i = 0
try:
while True:
v = self[i]
yield v
i += 1
except IndexError:
return
def __contains__(self, value):
for v in self:
if v == value:
return True
return False
def __reversed__(self):
for i in reversed(range(len(self))):
yield self[i]
def index(self, value):
for i, v in enumerate(self):
if v == value:
return i
raise ValueError
def count(self, value):
return sum(1 for v in self if v == value)
Sequence.register(tuple)
Sequence.register(basestring)
Sequence.register(buffer)
class MutableSequence(Sequence):
@abstractmethod
def __setitem__(self, index, value):
raise IndexError
@abstractmethod
def __delitem__(self, index):
raise IndexError
@abstractmethod
def insert(self, index, value):
raise IndexError
def append(self, value):
self.insert(len(self), value)
def reverse(self):
n = len(self)
for i in range(n//2):
self[i], self[n-i-1] = self[n-i-1], self[i]
def extend(self, values):
for v in values:
self.append(v)
def pop(self, index=-1):
v = self[index]
del self[index]
return v
def remove(self, value):
del self[self.index(value)]
def __iadd__(self, values):
self.extend(values)
MutableSequence.register(list)
| tempbottle/restcommander | play-1.2.4/python/Lib/_abcoll.py | Python | apache-2.0 | 13,666 |
#!/usr/bin/env python
# example setselection.py
import pygtk
pygtk.require('2.0')
import gtk
import time
class SetSelectionExample:
# Callback when the user toggles the selection
def selection_toggled(self, widget, window):
if widget.get_active():
self.have_selection = window.selection_owner_set("PRIMARY")
# if claiming the selection failed, we return the button to
# the out state
if not self.have_selection:
widget.set_active(False)
else:
if self.have_selection:
# Not possible to release the selection in PyGTK
# just mark that we don't have it
self.have_selection = False
return
# Called when another application claims the selection
def selection_clear(self, widget, event):
self.have_selection = False
widget.set_active(False)
return True
# Supplies the current time as the selection.
def selection_handle(self, widget, selection_data, info, time_stamp):
current_time = time.time()
timestr = time.asctime(time.localtime(current_time))
# When we return a single string, it should not be null terminated.
# That will be done for us
selection_data.set_text(timestr, len(timestr))
return
def __init__(self):
self.have_selection = False
# Create the toplevel window
window = gtk.Window(gtk.WINDOW_TOPLEVEL)
window.set_title("Set Selection")
window.set_border_width(10)
window.connect("destroy", lambda w: gtk.main_quit())
self.window = window
# Create an eventbox to hold the button since it no longer has
# a GdkWindow
eventbox = gtk.EventBox()
eventbox.show()
window.add(eventbox)
# Create a toggle button to act as the selection
selection_button = gtk.ToggleButton("Claim Selection")
eventbox.add(selection_button)
selection_button.connect("toggled", self.selection_toggled, eventbox)
eventbox.connect_object("selection_clear_event", self.selection_clear,
selection_button)
eventbox.selection_add_target("PRIMARY", "STRING", 1)
eventbox.selection_add_target("PRIMARY", "COMPOUND_TEXT", 1)
eventbox.connect("selection_get", self.selection_handle)
selection_button.show()
window.show()
def main():
gtk.main()
return 0
if __name__ == "__main__":
SetSelectionExample()
main()
| certik/pyjamas | pygtkweb/demos/065-setselection.py | Python | apache-2.0 | 2,570 |
# class generated by DeVIDE::createDeVIDEModuleFromVTKObject
from module_kits.vtk_kit.mixins import SimpleVTKClassModuleBase
import vtk
class vtkRuledSurfaceFilter(SimpleVTKClassModuleBase):
def __init__(self, module_manager):
SimpleVTKClassModuleBase.__init__(
self, module_manager,
vtk.vtkRuledSurfaceFilter(), 'Processing.',
('vtkPolyData',), ('vtkPolyData',),
replaceDoc=True,
inputFunctions=None, outputFunctions=None)
| nagyistoce/devide | modules/vtk_basic/vtkRuledSurfaceFilter.py | Python | bsd-3-clause | 497 |
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
| catapult-project/catapult-csm | telemetry/telemetry/internal/browser/possible_browser.py | Python | bsd-3-clause | 1,414 |
#!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Implements a standard mechanism for Chrome Infra Python environment setup.
This library provides a central location to define Chrome Infra environment
setup. It also provides several faculties to install this environment.
Within a cooperating script, the environment can be setup by importing this
module and running its 'Install' method:
# Install Chrome-Infra environment (replaces 'sys.path').
sys.path.insert(0,
os.path.join(os.path.dirname(__file__), os.pardir, ...))
# (/path/to/build/scripts)
import common.env
common.env.Install()
When attempting to export the Chrome Infra path to external scripts, this
script can be invoked as an executable with various subcommands to emit a valid
PYTHONPATH clause.
In addition, this module has several functions to construct the path.
The goal is to deploy this module universally among Chrome-Infra scripts,
BuildBot configurations, tool invocations, and tests to ensure that they all
execute with the same centrally-defined environment.
"""
import argparse
import collections
import contextlib
import imp
import itertools
import os
import sys
import traceback
# Export for bootstrapping.
__all__ = [
'Install',
'PythonPath',
]
# Name of enviornment extension file to seek.
ENV_EXTENSION_NAME = 'environment.cfg.py'
# Standard directories (based on this file's location in the <build> tree).
def path_if(*args):
if not all(args):
return None
path = os.path.abspath(os.path.join(*args))
return (path) if os.path.exists(path) else (None)
# The path to the <build> directory in which this script resides.
Build = path_if(os.path.dirname(__file__), os.pardir, os.pardir)
# The path to the <build_internal> directory.
BuildInternal = path_if(Build, os.pardir, 'build_internal')
def SetPythonPathEnv(value):
"""Sets the system's PYTHONPATH environemnt variable.
Args:
value (str): The value to use. If this is empty/None, the system's
PYTHONPATH will be cleared.
"""
# Since we can't assign None to the environment "dictionary", we have to
# either set or delete the key depending on the original value.
if value is not None:
os.environ['PYTHONPATH'] = str(value)
else:
os.environ.pop('PYTHONPATH', None)
def Install(**kwargs):
"""Replaces the current 'sys.path' with a hermetic Chrome-Infra path.
Args:
kwargs (dict): See GetInfraPythonPath arguments.
Returns (PythonPath): The PythonPath object that was installed.
"""
infra_python_path = GetInfraPythonPath(**kwargs)
infra_python_path.Install()
return infra_python_path
def SplitPath(path):
"""Returns (list): A list of path elements.
Splits a path into path elements. For example (assuming '/' is the local
system path separator):
>>> print SplitPath('/a/b/c/d')
['/', 'a', 'b', 'c', 'd']
>>> print SplitPath('a/b/c')
['a', 'b,' 'c']
"""
parts = []
while True:
path, component = os.path.split(path)
if not component:
if path:
parts.append(path)
break
parts.append(component)
parts.reverse()
return parts
def ExtendPath(base, root_dir):
"""Returns (PythonPath): The extended python path.
This method looks for the ENV_EXTENSION_NAME file within "root_dir". If
present, it will be loaded as a Python module and have its "Extend" method
called.
If no extension is found, the base PythonPath will be returned.
Args:
base (PythonPath): The base python path.
root_dir (str): The path to check for an extension.
"""
extension_path = os.path.join(root_dir, ENV_EXTENSION_NAME)
if not os.path.isfile(extension_path):
return base
with open(extension_path, 'r') as fd:
extension = fd.read()
extension_module = imp.new_module('env-extension')
# Execute the enviornment extension.
try:
exec extension in extension_module.__dict__
extend_func = getattr(extension_module, 'Extend', None)
assert extend_func, (
"The environment extension module is missing the 'Extend()' method.")
base = extend_func(base, root_dir)
if not isinstance(base, PythonPath):
raise TypeError("Extension module returned non-PythonPath object (%s)" % (
type(base).__name__,))
except Exception:
# Re-raise the exception, but include the configuration file name.
tb = traceback.format_exc()
raise RuntimeError("Environment extension [%s] raised exception: %s" % (
extension_path, tb))
return base
def IsSystemPythonPath(path):
"""Returns (bool): If a python path is user-installed.
Paths that are known to be user-installed paths can be ignored when setting
up a hermetic Python path environment to avoid user libraries that would not
be present in other environments falsely affecting code.
This function can be updated as-needed to exclude other non-system paths
encountered on bots and in the wild.
"""
components = SplitPath(path)
for component in components:
if component in ('dist-packages', 'site-packages'):
return False
return True
class PythonPath(collections.Sequence):
"""An immutable set of Python path elements.
All paths represented in this structure are absolute. If a relative path
is passed into this structure, it will be converted to absolute based on
the current working directory (via os.path.abspath).
"""
def __init__(self, components=None):
"""Initializes a new PythonPath instance.
Args:
components (list): A list of path component strings.
"""
seen = set()
self._components = []
for component in (components or ()):
component = os.path.abspath(component)
assert isinstance(component, basestring), (
"Path component '%s' is not a string (%s)" % (
component, type(component).__name__))
if component in seen:
continue
seen.add(component)
self._components.append(component)
def __getitem__(self, value):
return self._components[value]
def __len__(self):
return len(self._components)
def __iadd__(self, other):
return self.Append(other)
def __repr__(self):
return self.pathstr
def __eq__(self, other):
assert isinstance(other, type(self))
return self._components == other._components
@classmethod
def Flatten(cls, *paths):
"""Returns (list): A single-level list containing flattened path elements.
>>> print PythonPath.Flatten('a', ['b', ['c', 'd']])
['a', 'b', 'c', 'd']
"""
result = []
for path in paths:
if not isinstance(path, basestring):
# Assume it's an iterable of paths.
result += cls.Flatten(*path)
else:
result.append(path)
return result
@classmethod
def FromPaths(cls, *paths):
"""Returns (PythonPath): A PythonPath instantiated from path elements.
Args:
paths (tuple): A tuple of path elements or iterables containing path
elements (e.g., PythonPath instances).
"""
return cls(cls.Flatten(*paths))
@classmethod
def FromPathStr(cls, pathstr):
"""Returns (PythonPath): A PythonPath instantiated from the path string.
Args:
pathstr (str): An os.pathsep()-delimited path string.
"""
return cls(pathstr.split(os.pathsep))
@property
def pathstr(self):
"""Returns (str): A path string for the instance's path elements."""
return os.pathsep.join(self)
def IsHermetic(self):
"""Returns (bool): True if this instance contains only system paths."""
return all(IsSystemPythonPath(p) for p in self)
def GetHermetic(self):
"""Returns (PythonPath): derivative PythonPath containing only system paths.
"""
return type(self).FromPaths(*(p for p in self if IsSystemPythonPath(p)))
def Append(self, *paths):
"""Returns (PythonPath): derivative PythonPath with paths added to the end.
Args:
paths (tuple): A tuple of path elements to append to the current instance.
"""
return type(self)(itertools.chain(self, self.FromPaths(*paths)))
def Override(self, *paths):
"""Returns (PythonPath): derivative PythonPath with paths prepended.
Args:
paths (tuple): A tuple of path elements to prepend to the current
instance.
"""
return self.FromPaths(*paths).Append(self)
def Install(self):
"""Overwrites Python runtime variables based on the current instance.
Performs the following operations:
- Replaces sys.path with the current instance's path.
- Replaces os.environ['PYTHONPATH'] with the current instance's path
string.
"""
sys.path = list(self)
SetPythonPathEnv(self.pathstr)
@contextlib.contextmanager
def Enter(self):
"""Context manager wrapper for Install.
On exit, the context manager will restore the original environment.
"""
orig_sys_path = sys.path[:]
orig_pythonpath = os.environ.get('PYTHONPATH')
try:
self.Install()
yield
finally:
sys.path = orig_sys_path
SetPythonPathEnv(orig_pythonpath)
def GetSysPythonPath(hermetic=True):
"""Returns (PythonPath): A path based on 'sys.path'.
Args:
hermetic (bool): If True, prune any non-system path.
"""
path = PythonPath.FromPaths(*sys.path)
if hermetic:
path = path.GetHermetic()
return path
def GetEnvPythonPath():
"""Returns (PythonPath): A path based on the PYTHONPATH environment variable.
"""
pythonpath = os.environ.get('PYTHONPATH')
if not pythonpath:
return PythonPath.FromPaths()
return PythonPath.FromPathStr(pythonpath)
def GetMasterPythonPath(master_dir):
"""Returns (PythonPath): A path including a BuildBot master's directory.
Args:
master_dir (str): The BuildBot master root directory.
"""
return PythonPath.FromPaths(master_dir)
def GetBuildPythonPath():
"""Returns (PythonPath): The Chrome Infra build path."""
build_path = PythonPath.FromPaths()
for extension_dir in (
Build,
BuildInternal,
):
if extension_dir:
build_path = ExtendPath(build_path, extension_dir)
return build_path
def GetInfraPythonPath(hermetic=True, master_dir=None):
"""Returns (PythonPath): The full working Chrome Infra utility path.
This path is consistent for master, slave, and tool usage. It includes (in
this order):
- Any environment PYTHONPATH overrides.
- If 'master_dir' is supplied, the master's python path component.
- The Chrome Infra build path.
- The system python path.
Args:
hermetic (bool): True, prune any non-system path from the system path.
master_dir (str): If not None, include a master path component.
"""
path = GetEnvPythonPath()
if master_dir:
path += GetMasterPythonPath(master_dir)
path += GetBuildPythonPath()
path += GetSysPythonPath(hermetic=hermetic)
return path
def _InfraPathFromArgs(args):
"""Returns (PythonPath): A PythonPath populated from command-line arguments.
Args:
args (argparse.Namespace): The command-line arguments constructed by 'main'.
"""
return GetInfraPythonPath(
master_dir=args.master_dir,
)
def _Command_Echo(args, path):
"""Returns (int): Return code.
Command function for the 'echo' subcommand. Outputs the path string for
'path'.
Args:
args (argparse.Namespace): The command-line arguments constructed by 'main'.
path (PythonPath): The python path to use.
"""
args.output.write(path.pathstr)
return 0
def _Command_Print(args, path):
"""Returns (int): Return code.
Command function for the 'print' subcommand. Outputs each path component in
path on a separate line.
Args:
args (argparse.Namespace): The command-line arguments constructed by 'main'.
path (PythonPath): The python path to use.
"""
for component in path:
print >>args.output, component
return 0
def main():
"""Main execution function."""
parser = argparse.ArgumentParser()
parser.add_argument('-M', '--master_dir',
help="Augment the path with the master's directory.")
parser.add_argument('-o', '--output', metavar='PATH',
type=argparse.FileType('w'), default='-',
help="File to output to (use '-' for STDOUT).")
subparsers = parser.add_subparsers()
# 'echo'
subparser = subparsers.add_parser('echo')
subparser.set_defaults(func=_Command_Echo)
# 'print'
subparser = subparsers.add_parser('print')
subparser.set_defaults(func=_Command_Print)
# Parse
args = parser.parse_args()
# Execute our subcommand function, which will return the exit code.
path = _InfraPathFromArgs(args)
return args.func(args, path)
if __name__ == '__main__':
sys.exit(main())
| hgl888/chromium-crosswalk | infra/scripts/legacy/scripts/common/env.py | Python | bsd-3-clause | 12,777 |
#!/usr/bin/env python
from translate.convert import xliff2po
from translate.misc import wStringIO
from translate.storage.test_base import headerless_len, first_translatable
class TestXLIFF2PO:
xliffskeleton = '''<?xml version="1.0" ?>
<xliff version="1.1" xmlns="urn:oasis:names:tc:xliff:document:1.1">
<file original="filename.po" source-language="en-US" datatype="po">
<body>
%s
</body>
</file>
</xliff>'''
def xliff2po(self, xliffsource):
"""helper that converts xliff source to po source without requiring files"""
inputfile = wStringIO.StringIO(xliffsource)
convertor = xliff2po.xliff2po()
outputpo = convertor.convertstore(inputfile)
print "The generated po:"
print type(outputpo)
print str(outputpo)
return outputpo
def test_minimal(self):
minixlf = self.xliffskeleton % '''<trans-unit>
<source>red</source>
<target>rooi</target>
</trans-unit>'''
pofile = self.xliff2po(minixlf)
assert headerless_len(pofile.units) == 1
assert pofile.translate("red") == "rooi"
assert pofile.translate("bla") is None
def test_basic(self):
headertext = '''Project-Id-Version: program 2.1-branch
Report-Msgid-Bugs-To:
POT-Creation-Date: 2006-01-09 07:15+0100
PO-Revision-Date: 2004-03-30 17:02+0200
Last-Translator: Zuza Software Foundation <xxx@translate.org.za>
Language-Team: Afrikaans <translate-discuss-xxx@lists.sourceforge.net>
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit'''
minixlf = (self.xliffskeleton % '''<trans-unit id="1" restype="x-gettext-domain-header" approved="no" xml:space="preserve">
<source>%s</source>
<target>%s</target>
<note from="po-translator">Zulu translation of program ABC</note>
</trans-unit>
<trans-unit>
<source>gras</source>
<target>utshani</target>
</trans-unit>''') % (headertext, headertext)
print minixlf
pofile = self.xliff2po(minixlf)
assert pofile.translate("gras") == "utshani"
assert pofile.translate("bla") is None
potext = str(pofile)
assert potext.index('# Zulu translation of program ABC') == 0
assert potext.index('msgid "gras"\n')
assert potext.index('msgstr "utshani"\n')
assert potext.index('MIME-Version: 1.0\\n')
def test_translatorcomments(self):
"""Tests translator comments"""
minixlf = self.xliffskeleton % '''<trans-unit>
<source>nonsense</source>
<target>matlhapolosa</target>
<context-group name="po-entry" purpose="information">
<context context-type="x-po-trancomment">Couldn't do
it</context>
</context-group>
<note from="po-translator">Couldn't do
it</note>
</trans-unit>'''
pofile = self.xliff2po(minixlf)
assert pofile.translate("nonsense") == "matlhapolosa"
assert pofile.translate("bla") is None
unit = first_translatable(pofile)
assert unit.getnotes("translator") == "Couldn't do it"
potext = str(pofile)
assert potext.index("# Couldn't do it\n") >= 0
minixlf = self.xliffskeleton % '''<trans-unit xml:space="preserve">
<source>nonsense</source>
<target>matlhapolosa</target>
<context-group name="po-entry" purpose="information">
<context context-type="x-po-trancomment">Couldn't do
it</context>
</context-group>
<note from="po-translator">Couldn't do
it</note>
</trans-unit>'''
pofile = self.xliff2po(minixlf)
assert pofile.translate("nonsense") == "matlhapolosa"
assert pofile.translate("bla") is None
unit = first_translatable(pofile)
assert unit.getnotes("translator") == "Couldn't do\nit"
potext = str(pofile)
assert potext.index("# Couldn't do\n# it\n") >= 0
def test_autocomment(self):
"""Tests automatic comments"""
minixlf = self.xliffskeleton % '''<trans-unit>
<source>nonsense</source>
<target>matlhapolosa</target>
<context-group name="po-entry" purpose="information">
<context context-type="x-po-autocomment">Note that this is
garbage</context>
</context-group>
<note from="developer">Note that this is
garbage</note>
</trans-unit>'''
pofile = self.xliff2po(minixlf)
assert pofile.translate("nonsense") == "matlhapolosa"
assert pofile.translate("bla") is None
unit = first_translatable(pofile)
assert unit.getnotes("developer") == "Note that this is garbage"
potext = str(pofile)
assert potext.index("#. Note that this is garbage\n") >= 0
minixlf = self.xliffskeleton % '''<trans-unit xml:space="preserve">
<source>nonsense</source>
<target>matlhapolosa</target>
<context-group name="po-entry" purpose="information">
<context context-type="x-po-autocomment">Note that this is
garbage</context>
</context-group>
<note from="developer">Note that this is
garbage</note>
</trans-unit>'''
pofile = self.xliff2po(minixlf)
assert pofile.translate("nonsense") == "matlhapolosa"
assert pofile.translate("bla") is None
unit = first_translatable(pofile)
assert unit.getnotes("developer") == "Note that this is\ngarbage"
potext = str(pofile)
assert potext.index("#. Note that this is\n#. garbage\n") >= 0
def test_locations(self):
"""Tests location comments (#:)"""
minixlf = self.xliffskeleton % '''<trans-unit id="1">
<source>nonsense</source>
<target>matlhapolosa</target>
<context-group name="po-reference" purpose="location">
<context context-type="sourcefile">example.c</context>
<context context-type="linenumber">123</context>
</context-group>
<context-group name="po-reference" purpose="location">
<context context-type="sourcefile">place.py</context>
</context-group>
</trans-unit>'''
pofile = self.xliff2po(minixlf)
assert pofile.translate("nonsense") == "matlhapolosa"
assert pofile.translate("bla") is None
unit = first_translatable(pofile)
locations = unit.getlocations()
assert len(locations) == 2
assert "example.c:123" in locations
assert "place.py" in locations
def test_fuzzy(self):
"""Tests fuzzyness"""
minixlf = self.xliffskeleton % '''<trans-unit approved="no">
<source>book</source>
</trans-unit>
<trans-unit id="2" approved="yes">
<source>nonsense</source>
<target>matlhapolosa</target>
</trans-unit>
<trans-unit id="2" approved="no">
<source>verb</source>
<target state="needs-review-translation">lediri</target>
</trans-unit>'''
pofile = self.xliff2po(minixlf)
assert pofile.translate("nonsense") == "matlhapolosa"
assert pofile.translate("verb") == "lediri"
assert pofile.translate("book") is None
assert pofile.translate("bla") is None
assert headerless_len(pofile.units) == 3
#TODO: decide if this one should be fuzzy:
#assert pofile.units[0].isfuzzy()
assert not pofile.units[2].isfuzzy()
assert pofile.units[3].isfuzzy()
def test_plurals(self):
"""Tests fuzzyness"""
minixlf = self.xliffskeleton % '''<group id="1" restype="x-gettext-plurals">
<trans-unit id="1[0]" xml:space="preserve">
<source>cow</source>
<target>inkomo</target>
</trans-unit>
<trans-unit id="1[1]" xml:space="preserve">
<source>cows</source>
<target>iinkomo</target>
</trans-unit>
</group>'''
pofile = self.xliff2po(minixlf)
print str(pofile)
potext = str(pofile)
assert headerless_len(pofile.units) == 1
assert potext.index('msgid_plural "cows"')
assert potext.index('msgstr[0] "inkomo"')
assert potext.index('msgstr[1] "iinkomo"')
class TestBasicXLIFF2PO(TestXLIFF2PO):
"""This tests a basic XLIFF file without xmlns attribute"""
xliffskeleton = '''<?xml version="1.0" ?>
<xliff version="1.1">
<file original="filename.po" source-language="en-US" datatype="po">
<body>
%s
</body>
</file>
</xliff>'''
| dbbhattacharya/kitsune | vendor/packages/translate-toolkit/translate/convert/test_xliff2po.py | Python | bsd-3-clause | 8,448 |
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCTWDistributionAnalysis
from .mbcssm import EUCTW_SM_MODEL
class EUCTWProber(MultiByteCharSetProber):
def __init__(self):
super(EUCTWProber, self).__init__()
self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL)
self.distribution_analyzer = EUCTWDistributionAnalysis()
self.reset()
@property
def charset_name(self):
return "EUC-TW"
@property
def language(self):
return "Taiwan"
| ncos/lisa | src/lisa_drive/scripts/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/euctwprober.py | Python | mit | 1,793 |
"""The tests for the heat control thermostat."""
import unittest
from homeassistant.bootstrap import _setup_component
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
STATE_OFF,
TEMP_CELSIUS,
)
from homeassistant.components import thermostat
from tests.common import get_test_home_assistant
ENTITY = 'thermostat.test'
ENT_SENSOR = 'sensor.test'
ENT_SWITCH = 'switch.test'
MIN_TEMP = 3.0
MAX_TEMP = 65.0
TARGET_TEMP = 42.0
class TestSetupThermostatHeatControl(unittest.TestCase):
"""Test the Heat Control thermostat with custom config."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop down everything that was started."""
self.hass.stop()
def test_setup_missing_conf(self):
"""Test set up heat_control with missing config values."""
config = {
'name': 'test',
'target_sensor': ENT_SENSOR
}
self.assertFalse(_setup_component(self.hass, 'thermostat', {
'thermostat': config}))
def test_valid_conf(self):
"""Test set up heat_control with valid config values."""
self.assertTrue(_setup_component(self.hass, 'thermostat',
{'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR}}))
def test_setup_with_sensor(self):
"""Test set up heat_control with sensor to trigger update at init."""
self.hass.states.set(ENT_SENSOR, 22.0, {
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS
})
thermostat.setup(self.hass, {'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR
}})
state = self.hass.states.get(ENTITY)
self.assertEqual(
TEMP_CELSIUS, state.attributes.get('unit_of_measurement'))
self.assertEqual(22.0, state.attributes.get('current_temperature'))
class TestThermostatHeatControl(unittest.TestCase):
"""Test the Heat Control thermostat."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.hass.config.temperature_unit = TEMP_CELSIUS
thermostat.setup(self.hass, {'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR
}})
def tearDown(self): # pylint: disable=invalid-name
"""Stop down everything that was started."""
self.hass.stop()
def test_setup_defaults_to_unknown(self):
"""Test the setting of defaults to unknown."""
self.assertEqual('unknown', self.hass.states.get(ENTITY).state)
def test_default_setup_params(self):
"""Test the setup with default parameters."""
state = self.hass.states.get(ENTITY)
self.assertEqual(7, state.attributes.get('min_temp'))
self.assertEqual(35, state.attributes.get('max_temp'))
self.assertEqual(None, state.attributes.get('temperature'))
def test_custom_setup_params(self):
"""Test the setup with custom parameters."""
thermostat.setup(self.hass, {'thermostat': {
'platform': 'heat_control',
'name': 'test',
'heater': ENT_SWITCH,
'target_sensor': ENT_SENSOR,
'min_temp': MIN_TEMP,
'max_temp': MAX_TEMP,
'target_temp': TARGET_TEMP
}})
state = self.hass.states.get(ENTITY)
self.assertEqual(MIN_TEMP, state.attributes.get('min_temp'))
self.assertEqual(MAX_TEMP, state.attributes.get('max_temp'))
self.assertEqual(TARGET_TEMP, state.attributes.get('temperature'))
self.assertEqual(str(TARGET_TEMP), self.hass.states.get(ENTITY).state)
def test_set_target_temp(self):
"""Test the setting of the target temperature."""
thermostat.set_temperature(self.hass, 30)
self.hass.pool.block_till_done()
self.assertEqual('30.0', self.hass.states.get(ENTITY).state)
def test_sensor_bad_unit(self):
"""Test sensor that have bad unit."""
self._setup_sensor(22.0, unit='bad_unit')
self.hass.pool.block_till_done()
state = self.hass.states.get(ENTITY)
self.assertEqual(None, state.attributes.get('unit_of_measurement'))
self.assertEqual(None, state.attributes.get('current_temperature'))
def test_sensor_bad_value(self):
"""Test sensor that have None as state."""
self._setup_sensor(None)
self.hass.pool.block_till_done()
state = self.hass.states.get(ENTITY)
self.assertEqual(None, state.attributes.get('unit_of_measurement'))
self.assertEqual(None, state.attributes.get('current_temperature'))
def test_set_target_temp_heater_on(self):
"""Test if target temperature turn heater on."""
self._setup_switch(False)
self._setup_sensor(25)
self.hass.pool.block_till_done()
thermostat.set_temperature(self.hass, 30)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_ON, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def test_set_target_temp_heater_off(self):
"""Test if target temperature turn heater off."""
self._setup_switch(True)
self._setup_sensor(30)
self.hass.pool.block_till_done()
thermostat.set_temperature(self.hass, 25)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_OFF, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def test_set_temp_change_heater_on(self):
"""Test if temperature change turn heater on."""
self._setup_switch(False)
thermostat.set_temperature(self.hass, 30)
self.hass.pool.block_till_done()
self._setup_sensor(25)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_ON, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def test_temp_change_heater_off(self):
"""Test if temperature change turn heater off."""
self._setup_switch(True)
thermostat.set_temperature(self.hass, 25)
self.hass.pool.block_till_done()
self._setup_sensor(30)
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
call = self.calls[0]
self.assertEqual('switch', call.domain)
self.assertEqual(SERVICE_TURN_OFF, call.service)
self.assertEqual(ENT_SWITCH, call.data['entity_id'])
def _setup_sensor(self, temp, unit=TEMP_CELSIUS):
"""Setup the test sensor."""
self.hass.states.set(ENT_SENSOR, temp, {
ATTR_UNIT_OF_MEASUREMENT: unit
})
def _setup_switch(self, is_on):
"""Setup the test switch."""
self.hass.states.set(ENT_SWITCH, STATE_ON if is_on else STATE_OFF)
self.calls = []
def log_call(call):
"""Log service calls."""
self.calls.append(call)
self.hass.services.register('switch', SERVICE_TURN_ON, log_call)
self.hass.services.register('switch', SERVICE_TURN_OFF, log_call)
| deisi/home-assistant | tests/components/thermostat/test_heat_control.py | Python | mit | 7,976 |
#!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
StringIO # workaround for pyflakes issue #13
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from filestat import FilestatCollector
################################################################################
class TestFilestatCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('FilestatCollector', {
'interval': 10
})
self.collector = FilestatCollector(config, None)
def test_import(self):
self.assertTrue(FilestatCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_sys_fs_file_nr(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/sys/fs/file-nr')
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
FilestatCollector.PROC = self.getFixturePath('proc_sys_fs_file-nr')
self.collector.collect()
metrics = {
'assigned': 576,
'unused': 0,
'max': 4835852
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
################################################################################
if __name__ == "__main__":
unittest.main()
| datafiniti/Diamond | src/collectors/filestat/test/testfilestat.py | Python | mit | 1,903 |
import chainer
from chainer import backend
from chainer import utils
def sign(x):
"""Elementwise sign function.
For a given input :math:`x`, this function returns :math:`sgn(x)`
defined as
.. math::
sgn(x) = \\left \\{ \\begin{array}{cc}
-1 & {\\rm if~x < 0} \\\\
0 & {\\rm if~x = 0} \\\\
1 & {\\rm if~x > 0} \\\\
\\end{array} \\right.
.. note::
The gradient of this function is ``None`` everywhere and therefore
unchains the computational graph.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`):
Input variable for which the sign is computed.
Returns:
~chainer.Variable: Output variable.
"""
if isinstance(x, chainer.variable.Variable):
x = x.array
xp = backend.get_array_module(x)
return chainer.as_variable(utils.force_array(xp.sign(x)))
| okuta/chainer | chainer/functions/math/sign.py | Python | mit | 893 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Manager of RPC calls from plugins.
"""
from golismero.api.config import Config
__license__ = """
GoLismero 2.0 - The web knife - Copyright (C) 2011-2014
Golismero project site: https://github.com/golismero
Golismero project mail: contact@golismero-project.com
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
__all__ = ["RPCManager"]
from ..common import pickle
from ..messaging.codes import MessageCode, MSG_RPC_CODES
from ..messaging.manager import MessageManager
from functools import partial
from threading import Thread
import sys
import traceback
#------------------------------------------------------------------------------
# Decorators to automatically register RPC implementors at import time.
# Global map of RPC codes to implementors.
# dict( int -> tuple(callable, bool) )
rpcMap = {}
def implementor(rpc_code, blocking=False):
"""
RPC implementation function.
"""
return partial(_add_implementor, rpc_code, blocking)
def _add_implementor(rpc_code, blocking, fn):
# Validate the argument types.
if type(rpc_code) is not int:
raise TypeError("Expected int, got %r instead" % type(rpc_code))
if type(blocking) is not bool:
raise TypeError("Expected bool, got %r instead" % type(blocking))
if not callable(fn):
raise TypeError("Expected callable, got %r instead" % type(fn))
# Validate the RPC code.
if rpc_code in rpcMap:
try:
msg = "Duplicated RPC implementors for code %d: %s and %s"
msg %= (rpc_code, rpcMap[rpc_code][0].__name__, fn.__name__)
except Exception:
msg = "Duplicated RPC implementors for code: %d" % rpc_code
raise SyntaxError(msg)
# TODO: use introspection to validate the function signature
# Register the implementor.
rpcMap[rpc_code] = (fn, blocking)
# Return the implementor. No wrapping is needed! :)
return fn
#------------------------------------------------------------------------------
# Implementor for the special MSG_RPC_BULK code for bulk RPC calls.
@implementor(MessageCode.MSG_RPC_BULK)
def rpc_bulk(orchestrator, audit_name, rpc_code, *arguments):
# Get the implementor for the RPC code.
# Raise NotImplementedError if it's not defined.
try:
method, blocking = rpcMap[rpc_code]
except KeyError:
raise NotImplementedError("RPC code not implemented: %r" % rpc_code)
# This can't be done with blocking implementors!
if blocking:
raise NotImplementedError(
"Cannot run blocking RPC calls in bulk. Code: %r" % rpc_code)
# Prepare a partial function call to the implementor.
caller = partial(method, orchestrator, audit_name)
# Use the built-in map() function to issue all the calls.
# This ensures we support the exact same interface and functionality.
return map(caller, *arguments)
#------------------------------------------------------------------------------
# Ensures the message is received by the Orchestrator.
@implementor(MessageCode.MSG_RPC_SEND_MESSAGE)
def rpc_send_message(orchestrator, audit_name, message):
# Enqueue the ACK message.
orchestrator.enqueue_msg(message)
#------------------------------------------------------------------------------
class RPCManager (object):
"""
Executes remote procedure calls from plugins.
"""
#--------------------------------------------------------------------------
def __init__(self, orchestrator):
"""
:param orchestrator: Orchestrator instance.
:type orchestrator: Orchestrator
"""
# Keep a reference to the Orchestrator.
self.__orchestrator = orchestrator
# Keep a reference to the global RPC map (it's faster this way).
self.__rpcMap = rpcMap
# Check all RPC messages have been mapped at this point.
missing = MSG_RPC_CODES.difference(self.__rpcMap.keys())
if missing:
msg = "Missing RPC implementors for codes: %s"
msg %= ", ".join(str(x) for x in sorted(missing))
raise SyntaxError(msg)
#--------------------------------------------------------------------------
@property
def orchestrator(self):
"""
:returns: Orchestrator instance.
:rtype: Orchestrator
"""
return self.__orchestrator
#--------------------------------------------------------------------------
def execute_rpc(self, audit_name, rpc_code, response_queue, args, kwargs):
"""
Honor a remote procedure call request from a plugin.
:param audit_name: Name of the audit requesting the call.
:type audit_name: str
:param rpc_code: RPC code.
:type rpc_code: int
:param response_queue: Response queue identity.
:type response_queue: str
:param args: Positional arguments to the call.
:type args: tuple
:param kwargs: Keyword arguments to the call.
:type kwargs: dict
"""
try:
# Get the implementor for the RPC code.
# Raise NotImplementedError if it's not defined.
try:
target, blocking = self.__rpcMap[rpc_code]
except KeyError:
raise NotImplementedError(
"RPC code not implemented: %r" % rpc_code)
# If it's a blocking call...
if blocking:
# Run the implementor in a new thread.
thread = Thread(
target = self._execute_rpc_implementor_background,
args = (
Config._context,
audit_name,
target,
response_queue,
args, kwargs),
)
thread.daemon = True
thread.start()
# If it's a non-blocking call...
else:
# Call the implementor directly.
self.execute_rpc_implementor(
audit_name, target, response_queue, args, kwargs)
# Catch exceptions and send them back.
except Exception:
if response_queue:
error = self.prepare_exception(*sys.exc_info())
try:
self.orchestrator.messageManager.send(
response_queue, (False, error))
except IOError:
import warnings
warnings.warn("RPC caller died!")
pass
#--------------------------------------------------------------------------
def _execute_rpc_implementor_background(self, context, audit_name, target,
response_queue, args, kwargs):
"""
Honor a remote procedure call request from a plugin,
from a background thread. Must only be used as the entry
point for said background thread!
:param context: Plugin execution context.
:type context: PluginContext
:param audit_name: Name of the audit requesting the call.
:type audit_name: str
:param target: RPC implementor function.
:type target: callable
:param response_queue: Response queue identity.
:type response_queue: str
:param args: Positional arguments to the call.
:type args: tuple
:param kwargs: Keyword arguments to the call.
:type kwargs: dict
"""
Config._context = context
self.execute_rpc_implementor(
audit_name, target, response_queue, args, kwargs)
#--------------------------------------------------------------------------
def execute_rpc_implementor(self, audit_name, target, response_queue,
args, kwargs):
"""
Honor a remote procedure call request from a plugin.
:param audit_name: Name of the audit requesting the call.
:type audit_name: str
:param target: RPC implementor function.
:type target: callable
:param response_queue: Response queue identity.
:type response_queue: str
:param args: Positional arguments to the call.
:type args: tuple
:param kwargs: Keyword arguments to the call.
:type kwargs: dict
"""
try:
# Call the implementor and get the response.
response = target(self.orchestrator, audit_name, *args, **kwargs)
success = True
# Catch exceptions and prepare them for sending.
except Exception:
if response_queue:
response = self.prepare_exception(*sys.exc_info())
success = False
# If the call was synchronous,
# send the response/error back to the plugin.
if response_queue:
self.orchestrator.messageManager.send(
response_queue, (success, response))
#--------------------------------------------------------------------------
@staticmethod
def prepare_exception(exc_type, exc_value, exc_traceback):
"""
Prepare an exception for sending back to the plugins.
:param exc_type: Exception type.
:type exc_type: class
:param exc_value: Exception value.
:type exc_value:
:returns: Exception type, exception value
and formatted traceback. The exception value may be formatted too
and the exception type replaced by Exception if it's not possible
to serialize it for sending.
:rtype: tuple(class, object, str)
"""
exc_type, exc_value, exc_traceback = sys.exc_info()
try:
pickle.dumps(exc_value, -1)
except Exception:
exc_value = traceback.format_exception_only(exc_type, exc_value)
try:
pickle.dumps(exc_type, -1)
except Exception:
exc_type = Exception
exc_traceback = traceback.extract_tb(exc_traceback)
return exc_type, exc_value, exc_traceback
| golismero/golismero | golismero/managers/rpcmanager.py | Python | gpl-2.0 | 10,792 |
""" Encoding Aliases Support
This module is used by the encodings package search function to
map encodings names to module names.
Note that the search function converts the encoding names to lower
case and replaces hyphens with underscores *before* performing the
lookup.
"""
aliases = {
# Latin-1
'latin': 'latin_1',
'latin1': 'latin_1',
# UTF-7
'utf7': 'utf_7',
'u7': 'utf_7',
# UTF-8
'utf': 'utf_8',
'utf8': 'utf_8',
'u8': 'utf_8',
'utf8@ucs2': 'utf_8',
'utf8@ucs4': 'utf_8',
# UTF-16
'utf16': 'utf_16',
'u16': 'utf_16',
'utf_16be': 'utf_16_be',
'utf_16le': 'utf_16_le',
'unicodebigunmarked': 'utf_16_be',
'unicodelittleunmarked': 'utf_16_le',
# ASCII
'us_ascii': 'ascii',
'ansi_x3.4_1968': 'ascii', # used on Linux
'ansi_x3_4_1968': 'ascii', # used on BSD?
'646': 'ascii', # used on Solaris
# EBCDIC
'ebcdic_cp_us': 'cp037',
'ibm039': 'cp037',
'ibm1140': 'cp1140',
# ISO
'8859': 'latin_1',
'iso8859': 'latin_1',
'iso8859_1': 'latin_1',
'iso_8859_1': 'latin_1',
'iso_8859_10': 'iso8859_10',
'iso_8859_13': 'iso8859_13',
'iso_8859_14': 'iso8859_14',
'iso_8859_15': 'iso8859_15',
'iso_8859_2': 'iso8859_2',
'iso_8859_3': 'iso8859_3',
'iso_8859_4': 'iso8859_4',
'iso_8859_5': 'iso8859_5',
'iso_8859_6': 'iso8859_6',
'iso_8859_7': 'iso8859_7',
'iso_8859_8': 'iso8859_8',
'iso_8859_9': 'iso8859_9',
# Mac
'maclatin2': 'mac_latin2',
'maccentraleurope': 'mac_latin2',
'maccyrillic': 'mac_cyrillic',
'macgreek': 'mac_greek',
'maciceland': 'mac_iceland',
'macroman': 'mac_roman',
'macturkish': 'mac_turkish',
# Windows
'windows_1251': 'cp1251',
'windows_1252': 'cp1252',
'windows_1254': 'cp1254',
'windows_1255': 'cp1255',
'windows_1256': 'cp1256',
'windows_1257': 'cp1257',
'windows_1258': 'cp1258',
# MBCS
'dbcs': 'mbcs',
# Code pages
'437': 'cp437',
# CJK
#
# The codecs for these encodings are not distributed with the
# Python core, but are included here for reference, since the
# locale module relies on having these aliases available.
#
'jis_7': 'jis_7',
'iso_2022_jp': 'jis_7',
'ujis': 'euc_jp',
'ajec': 'euc_jp',
'eucjp': 'euc_jp',
'tis260': 'tactis',
'sjis': 'shift_jis',
# Content transfer/compression encodings
'rot13': 'rot_13',
'base64': 'base64_codec',
'base_64': 'base64_codec',
'zlib': 'zlib_codec',
'zip': 'zlib_codec',
'hex': 'hex_codec',
'uu': 'uu_codec',
'quopri': 'quopri_codec',
'quotedprintable': 'quopri_codec',
'quoted_printable': 'quopri_codec',
}
| remybaranx/qtaste | tools/jython/lib/Lib/encodings/aliases.py | Python | gpl-3.0 | 2,790 |
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
from __future__ import absolute_import, print_function, unicode_literals
import base64
import json
import os
import os.path as path
import re
import shutil
import sys
import urllib2
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
import servo.bootstrap as bootstrap
from servo.command_base import CommandBase, BIN_SUFFIX
from servo.util import download_bytes, download_file, extract, host_triple
@CommandProvider
class MachCommands(CommandBase):
@Command('env',
description='Print environment setup commands',
category='bootstrap')
def env(self):
env = self.build_env()
print("export PATH=%s" % env["PATH"])
if sys.platform == "darwin":
print("export DYLD_LIBRARY_PATH=%s" % env["DYLD_LIBRARY_PATH"])
else:
print("export LD_LIBRARY_PATH=%s" % env["LD_LIBRARY_PATH"])
@Command('bootstrap',
description='Install required packages for building.',
category='bootstrap')
@CommandArgument('--force', '-f',
action='store_true',
help='Boostrap without confirmation')
def bootstrap(self, force=False):
return bootstrap.bootstrap(self.context, force=force)
@Command('bootstrap-rust',
description='Download the Rust compiler',
category='bootstrap')
@CommandArgument('--force', '-f',
action='store_true',
help='Force download even if a copy already exists')
@CommandArgument('--target',
action='append',
default=[],
help='Download rust stdlib for specified target')
@CommandArgument('--stable',
action='store_true',
help='Use stable rustc version')
def bootstrap_rustc(self, force=False, target=[], stable=False):
self.set_use_stable_rust(stable)
version = self.rust_version()
rust_path = self.rust_path()
rust_dir = path.join(self.context.sharedir, "rust", rust_path)
install_dir = path.join(self.context.sharedir, "rust", version)
if not self.config["build"]["llvm-assertions"]:
install_dir += "-alt"
if not force and path.exists(path.join(rust_dir, "rustc", "bin", "rustc" + BIN_SUFFIX)):
print("Rust compiler already downloaded.", end=" ")
print("Use |bootstrap-rust --force| to download again.")
else:
if path.isdir(rust_dir):
shutil.rmtree(rust_dir)
os.makedirs(rust_dir)
# The nightly Rust compiler is hosted on the nightly server under the date with a name
# rustc-nightly-HOST-TRIPLE.tar.gz, whereas the stable compiler is named
# rustc-VERSION-HOST-TRIPLE.tar.gz. We just need to pull down and extract it,
# giving a directory name that will be the same as the tarball name (rustc is
# in that directory).
if stable:
tarball = "rustc-%s-%s.tar.gz" % (version, host_triple())
rustc_url = "https://static-rust-lang-org.s3.amazonaws.com/dist/" + tarball
else:
tarball = "%s/rustc-nightly-%s.tar.gz" % (version, host_triple())
base_url = "https://s3.amazonaws.com/rust-lang-ci/rustc-builds"
if not self.config["build"]["llvm-assertions"]:
base_url += "-alt"
rustc_url = base_url + "/" + tarball
tgz_file = rust_dir + '-rustc.tar.gz'
download_file("Rust compiler", rustc_url, tgz_file)
print("Extracting Rust compiler...")
extract(tgz_file, install_dir)
print("Rust compiler ready.")
# Each Rust stdlib has a name of the form `rust-std-nightly-TRIPLE.tar.gz` for the nightly
# releases, or rust-std-VERSION-TRIPLE.tar.gz for stable releases, with
# a directory of the name `rust-std-TRIPLE` inside and then a `lib` directory.
# This `lib` directory needs to be extracted and merged with the `rustc/lib`
# directory from the host compiler above.
nightly_suffix = "" if stable else "-nightly"
stable_version = "-{}".format(version) if stable else ""
lib_dir = path.join(install_dir,
"rustc{}{}-{}".format(nightly_suffix, stable_version, host_triple()),
"rustc", "lib", "rustlib")
# ensure that the libs for the host's target is downloaded
host_target = host_triple()
if host_target not in target:
target.append(host_target)
for target_triple in target:
target_lib_dir = path.join(lib_dir, target_triple)
if path.exists(target_lib_dir):
# No need to check for force. If --force the directory is already deleted
print("Rust lib for target {} already downloaded.".format(target_triple), end=" ")
print("Use |bootstrap-rust --force| to download again.")
continue
if self.use_stable_rust():
std_url = ("https://static-rust-lang-org.s3.amazonaws.com/dist/rust-std-%s-%s.tar.gz"
% (version, target_triple))
tgz_file = install_dir + ('rust-std-%s-%s.tar.gz' % (version, target_triple))
else:
std_url = ("https://s3.amazonaws.com/rust-lang-ci/rustc-builds/%s/rust-std-nightly-%s.tar.gz"
% (version, target_triple))
tgz_file = install_dir + ('rust-std-nightly-%s.tar.gz' % target_triple)
download_file("Host rust library for target %s" % target_triple, std_url, tgz_file)
print("Extracting Rust stdlib for target %s..." % target_triple)
extract(tgz_file, install_dir)
shutil.copytree(path.join(install_dir,
"rust-std%s%s-%s" % (nightly_suffix, stable_version, target_triple),
"rust-std-%s" % target_triple, "lib", "rustlib", target_triple),
path.join(install_dir,
"rustc%s%s-%s" % (nightly_suffix, stable_version, host_triple()),
"rustc", "lib", "rustlib", target_triple))
shutil.rmtree(path.join(install_dir,
"rust-std%s%s-%s" % (nightly_suffix, stable_version, target_triple)))
print("Rust {} libs ready.".format(target_triple))
@Command('bootstrap-rust-docs',
description='Download the Rust documentation',
category='bootstrap')
@CommandArgument('--force', '-f',
action='store_true',
help='Force download even if docs already exist')
def bootstrap_rustc_docs(self, force=False):
self.ensure_bootstrapped()
rust_root = self.config["tools"]["rust-root"]
docs_dir = path.join(rust_root, "doc")
if not force and path.exists(docs_dir):
print("Rust docs already downloaded.", end=" ")
print("Use |bootstrap-rust-docs --force| to download again.")
return
if path.isdir(docs_dir):
shutil.rmtree(docs_dir)
docs_name = self.rust_path().replace("rustc-", "rust-docs-")
docs_url = ("https://static-rust-lang-org.s3.amazonaws.com/dist/rust-docs-nightly-%s.tar.gz"
% host_triple())
tgz_file = path.join(rust_root, 'doc.tar.gz')
download_file("Rust docs", docs_url, tgz_file)
print("Extracting Rust docs...")
temp_dir = path.join(rust_root, "temp_docs")
if path.isdir(temp_dir):
shutil.rmtree(temp_dir)
extract(tgz_file, temp_dir)
shutil.move(path.join(temp_dir, docs_name.split("/")[1],
"rust-docs", "share", "doc", "rust", "html"),
docs_dir)
shutil.rmtree(temp_dir)
print("Rust docs ready.")
@Command('bootstrap-cargo',
description='Download the Cargo build tool',
category='bootstrap')
@CommandArgument('--force', '-f',
action='store_true',
help='Force download even if cargo already exists')
def bootstrap_cargo(self, force=False):
cargo_dir = path.join(self.context.sharedir, "cargo",
self.cargo_build_id())
if not force and path.exists(path.join(cargo_dir, "cargo", "bin", "cargo" + BIN_SUFFIX)):
print("Cargo already downloaded.", end=" ")
print("Use |bootstrap-cargo --force| to download again.")
return
if path.isdir(cargo_dir):
shutil.rmtree(cargo_dir)
os.makedirs(cargo_dir)
tgz_file = "cargo-nightly-%s.tar.gz" % host_triple()
nightly_url = "https://s3.amazonaws.com/rust-lang-ci/cargo-builds/%s/%s" % \
(self.cargo_build_id(), tgz_file)
download_file("Cargo nightly", nightly_url, tgz_file)
print("Extracting Cargo nightly...")
nightly_dir = path.join(cargo_dir,
path.basename(tgz_file).replace(".tar.gz", ""))
extract(tgz_file, cargo_dir, movedir=nightly_dir)
print("Cargo ready.")
@Command('update-hsts-preload',
description='Download the HSTS preload list',
category='bootstrap')
def bootstrap_hsts_preload(self, force=False):
preload_filename = "hsts_preload.json"
preload_path = path.join(self.context.topdir, "resources")
chromium_hsts_url = "https://chromium.googlesource.com/chromium/src" + \
"/net/+/master/http/transport_security_state_static.json?format=TEXT"
try:
content_base64 = download_bytes("Chromium HSTS preload list", chromium_hsts_url)
except urllib2.URLError:
print("Unable to download chromium HSTS preload list; are you connected to the internet?")
sys.exit(1)
content_decoded = base64.b64decode(content_base64)
# The chromium "json" has single line comments in it which, of course,
# are non-standard/non-valid json. Simply strip them out before parsing
content_json = re.sub(r'(^|\s+)//.*$', '', content_decoded, flags=re.MULTILINE)
try:
pins_and_static_preloads = json.loads(content_json)
entries = {
"entries": [
{
"host": e["name"],
"include_subdomains": e.get("include_subdomains", False)
}
for e in pins_and_static_preloads["entries"]
]
}
with open(path.join(preload_path, preload_filename), 'w') as fd:
json.dump(entries, fd, indent=4)
except ValueError, e:
print("Unable to parse chromium HSTS preload list, has the format changed?")
sys.exit(1)
@Command('update-pub-domains',
description='Download the public domains list and update resources/public_domains.txt',
category='bootstrap')
def bootstrap_pub_suffix(self, force=False):
list_url = "https://publicsuffix.org/list/public_suffix_list.dat"
dst_filename = path.join(self.context.topdir, "resources", "public_domains.txt")
not_implemented_case = re.compile(r'^[^*]+\*')
try:
content = download_bytes("Public suffix list", list_url)
except urllib2.URLError:
print("Unable to download the public suffix list; are you connected to the internet?")
sys.exit(1)
lines = [l.strip() for l in content.decode("utf8").split("\n")]
suffixes = [l for l in lines if not l.startswith("//") and not l == ""]
with open(dst_filename, "wb") as fo:
for suffix in suffixes:
if not_implemented_case.match(suffix):
print("Warning: the new list contains a case that servo can't handle: %s" % suffix)
fo.write(suffix.encode("idna") + "\n")
@Command('clean-nightlies',
description='Clean unused nightly builds of Rust and Cargo',
category='bootstrap')
@CommandArgument('--force', '-f',
action='store_true',
help='Actually remove stuff')
def clean_nightlies(self, force=False):
rust_current = self.rust_path().split('/')[0]
cargo_current = self.cargo_build_id()
print("Current Rust version: " + rust_current)
print("Current Cargo version: " + cargo_current)
removing_anything = False
for current, base in [(rust_current, "rust"), (cargo_current, "cargo")]:
base = path.join(self.context.sharedir, base)
for name in os.listdir(base):
if name != current:
removing_anything = True
name = path.join(base, name)
if force:
print("Removing " + name)
if os.path.isdir(name):
shutil.rmtree(name)
else:
os.remove(name)
else:
print("Would remove " + name)
if not removing_anything:
print("Nothing to remove.")
elif not force:
print("Nothing done. "
"Run `./mach clean-nightlies -f` to actually remove.")
| ddrmanxbxfr/servo | python/servo/bootstrap_commands.py | Python | mpl-2.0 | 14,058 |
from click_plugins import with_plugins
from pkg_resources import iter_entry_points
import click
@with_plugins(iter_entry_points('girder.cli_plugins'))
@click.group(help='Girder: data management platform for the web.',
context_settings=dict(help_option_names=['-h', '--help']))
@click.version_option(message='%(version)s')
def main():
pass
| manthey/girder | girder/cli/__init__.py | Python | apache-2.0 | 358 |
#!/usr/bin/env python3
class UniqueIndexViolationCheck:
unique_indexes_query = """
select table_oid, index_name, table_name, array_agg(attname) as column_names
from pg_attribute, (
select pg_index.indrelid as table_oid, index_class.relname as index_name, table_class.relname as table_name, unnest(pg_index.indkey) as column_index
from pg_index, pg_class index_class, pg_class table_class
where pg_index.indisunique='t'
and index_class.relnamespace = (select oid from pg_namespace where nspname = 'pg_catalog')
and index_class.relkind = 'i'
and index_class.oid = pg_index.indexrelid
and table_class.oid = pg_index.indrelid
) as unique_catalog_index_columns
where attnum = column_index
and attrelid = table_oid
group by table_oid, index_name, table_name;
"""
def __init__(self):
self.violated_segments_query = """
select distinct(gp_segment_id) from (
(select gp_segment_id, %s
from gp_dist_random('%s')
where (%s) is not null
group by gp_segment_id, %s
having count(*) > 1)
union
(select gp_segment_id, %s
from %s
where (%s) is not null
group by gp_segment_id, %s
having count(*) > 1)
) as violations
"""
def runCheck(self, db_connection):
unique_indexes = db_connection.query(self.unique_indexes_query).getresult()
violations = []
for (table_oid, index_name, table_name, column_names) in unique_indexes:
column_names = ",".join(column_names)
sql = self.get_violated_segments_query(table_name, column_names)
violated_segments = db_connection.query(sql).getresult()
if violated_segments:
violations.append(dict(table_oid=table_oid,
table_name=table_name,
index_name=index_name,
column_names=column_names,
violated_segments=[row[0] for row in violated_segments]))
return violations
def get_violated_segments_query(self, table_name, column_names):
return self.violated_segments_query % (
column_names, table_name, column_names, column_names, column_names, table_name, column_names, column_names
)
| 50wu/gpdb | gpMgmt/bin/gpcheckcat_modules/unique_index_violation_check.py | Python | apache-2.0 | 2,547 |
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, jmcnamara@cpan.org
#
import unittest
from ...compatibility import StringIO
from ...worksheet import Worksheet
class TestWriteWorksheet(unittest.TestCase):
"""
Test the Worksheet _write_worksheet() method.
"""
def setUp(self):
self.fh = StringIO()
self.worksheet = Worksheet()
self.worksheet._set_filehandle(self.fh)
def test_write_worksheet(self):
"""Test the _write_worksheet() method"""
self.worksheet._write_worksheet()
exp = """<worksheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships">"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
| jvrsantacruz/XlsxWriter | xlsxwriter/test/worksheet/test_write_worksheet.py | Python | bsd-2-clause | 887 |
VERSION = (1, 3, 0, 'alpha', 1)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
| hunch/hunch-gift-app | django/__init__.py | Python | mit | 565 |
# -*- coding: utf-8 -*-
import re
import django, logging, warnings
from django import forms
from django.conf import settings
from django.core.urlresolvers import reverse
from django.forms.models import formset_factory
from django.middleware.csrf import _get_new_csrf_key
from django.template import (
loader, TemplateSyntaxError, Context
)
from django.utils.translation import ugettext_lazy as _
from .base import CrispyTestCase
from .forms import TestForm
from crispy_forms.bootstrap import (
FieldWithButtons, PrependedAppendedText, AppendedText, PrependedText,
StrictButton
)
from crispy_forms.compatibility import text_type
from crispy_forms.helper import FormHelper, FormHelpersException
from crispy_forms.layout import (
Layout, Submit, Reset, Hidden, Button, MultiField, Field
)
from crispy_forms.utils import render_crispy_form
from crispy_forms.templatetags.crispy_forms_tags import CrispyFormNode
class TestFormHelper(CrispyTestCase):
urls = 'crispy_forms.tests.urls'
def test_inputs(self):
form_helper = FormHelper()
form_helper.add_input(Submit('my-submit', 'Submit', css_class="button white"))
form_helper.add_input(Reset('my-reset', 'Reset'))
form_helper.add_input(Hidden('my-hidden', 'Hidden'))
form_helper.add_input(Button('my-button', 'Button'))
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
c = Context({'form': TestForm(), 'form_helper': form_helper})
html = template.render(c)
self.assertTrue('button white' in html)
self.assertTrue('id="submit-id-my-submit"' in html)
self.assertTrue('id="reset-id-my-reset"' in html)
self.assertTrue('name="my-hidden"' in html)
self.assertTrue('id="button-id-my-button"' in html)
if self.current_template_pack == 'uni_form':
self.assertTrue('submit submitButton' in html)
self.assertTrue('reset resetButton' in html)
self.assertTrue('class="button"' in html)
else:
self.assertTrue('class="btn"' in html)
self.assertTrue('btn btn-primary' in html)
self.assertTrue('btn btn-inverse' in html)
self.assertEqual(len(re.findall(r'<input[^>]+> <', html)), 8)
def test_invalid_form_method(self):
form_helper = FormHelper()
try:
form_helper.form_method = "superPost"
self.fail("Setting an invalid form_method within the helper should raise an Exception")
except FormHelpersException:
pass
def test_form_with_helper_without_layout(self):
form_helper = FormHelper()
form_helper.form_id = 'this-form-rocks'
form_helper.form_class = 'forms-that-rock'
form_helper.form_method = 'GET'
form_helper.form_action = 'simpleAction'
form_helper.form_error_title = 'ERRORS'
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy testForm form_helper %}
""")
# now we render it, with errors
form = TestForm({'password1': 'wargame','password2': 'god'})
form.is_valid()
c = Context({'testForm': form, 'form_helper': form_helper})
html = template.render(c)
# Lets make sure everything loads right
self.assertTrue(html.count('<form'), 1)
self.assertTrue('forms-that-rock' in html)
self.assertTrue('method="get"' in html)
self.assertTrue('id="this-form-rocks"' in html)
self.assertTrue('action="%s"' % reverse('simpleAction') in html)
if (self.current_template_pack == 'uni_form'):
self.assertTrue('class="uniForm' in html)
self.assertTrue("ERRORS" in html)
self.assertTrue("<li>Passwords dont match</li>" in html)
# now lets remove the form tag and render it again. All the True items above
# should now be false because the form tag is removed.
form_helper.form_tag = False
html = template.render(c)
self.assertFalse('<form' in html)
self.assertFalse('forms-that-rock' in html)
self.assertFalse('method="get"' in html)
self.assertFalse('id="this-form-rocks"' in html)
def test_form_show_errors_non_field_errors(self):
form = TestForm({'password1': 'wargame', 'password2': 'god'})
form.helper = FormHelper()
form.helper.form_show_errors = True
form.is_valid()
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy testForm %}
""")
# First we render with errors
c = Context({'testForm': form})
html = template.render(c)
# Ensure those errors were rendered
self.assertTrue('<li>Passwords dont match</li>' in html)
self.assertTrue(text_type(_('This field is required.')) in html)
self.assertTrue('error' in html)
# Now we render without errors
form.helper.form_show_errors = False
c = Context({'testForm': form})
html = template.render(c)
# Ensure errors were not rendered
self.assertFalse('<li>Passwords dont match</li>' in html)
self.assertFalse(text_type(_('This field is required.')) in html)
self.assertFalse('error' in html)
def test_html5_required(self):
form = TestForm()
form.helper = FormHelper()
form.helper.html5_required = True
html = render_crispy_form(form)
# 6 out of 7 fields are required and an extra one for the SplitDateTimeWidget makes 7.
self.assertEqual(html.count('required="required"'), 7)
form = TestForm()
form.helper = FormHelper()
form.helper.html5_required = False
html = render_crispy_form(form)
def test_attrs(self):
form = TestForm()
form.helper = FormHelper()
form.helper.attrs = {'id': 'TestIdForm', 'autocomplete': "off"}
html = render_crispy_form(form)
self.assertTrue('autocomplete="off"' in html)
self.assertTrue('id="TestIdForm"' in html)
def test_template_context(self):
helper = FormHelper()
helper.attrs = {
'id': 'test-form',
'class': 'test-forms',
'action': 'submit/test/form',
'autocomplete': 'off',
}
node = CrispyFormNode('form', 'helper')
context = node.get_response_dict(helper, {}, False)
self.assertEqual(context['form_id'], "test-form")
self.assertEqual(context['form_attrs']['id'], "test-form")
self.assertTrue("test-forms" in context['form_class'])
self.assertTrue("test-forms" in context['form_attrs']['class'])
self.assertEqual(context['form_action'], "submit/test/form")
self.assertEqual(context['form_attrs']['action'], "submit/test/form")
self.assertEqual(context['form_attrs']['autocomplete'], "off")
def test_template_context_using_form_attrs(self):
helper = FormHelper()
helper.form_id = 'test-form'
helper.form_class = 'test-forms'
helper.form_action = 'submit/test/form'
node = CrispyFormNode('form', 'helper')
context = node.get_response_dict(helper, {}, False)
self.assertEqual(context['form_id'], "test-form")
self.assertEqual(context['form_attrs']['id'], "test-form")
self.assertTrue("test-forms" in context['form_class'])
self.assertTrue("test-forms" in context['form_attrs']['class'])
self.assertEqual(context['form_action'], "submit/test/form")
self.assertEqual(context['form_attrs']['action'], "submit/test/form")
def test_template_helper_access(self):
helper = FormHelper()
helper.form_id = 'test-form'
self.assertEqual(helper['form_id'], 'test-form')
def test_without_helper(self):
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form %}
""")
c = Context({'form': TestForm()})
html = template.render(c)
# Lets make sure everything loads right
self.assertTrue('<form' in html)
self.assertTrue('method="post"' in html)
self.assertFalse('action' in html)
if (self.current_template_pack == 'uni_form'):
self.assertTrue('uniForm' in html)
def test_template_pack_override_compact(self):
current_pack = self.current_template_pack
override_pack = current_pack == 'uni_form' and 'bootstrap' or 'uni_form'
# {% crispy form 'template_pack_name' %}
template = loader.get_template_from_string(u"""
{%% load crispy_forms_tags %%}
{%% crispy form "%s" %%}
""" % override_pack)
c = Context({'form': TestForm()})
html = template.render(c)
if (current_pack == 'uni_form'):
self.assertTrue('control-group' in html)
else:
self.assertTrue('uniForm' in html)
def test_template_pack_override_verbose(self):
current_pack = self.current_template_pack
override_pack = current_pack == 'uni_form' and 'bootstrap' or 'uni_form'
# {% crispy form helper 'template_pack_name' %}
template = loader.get_template_from_string(u"""
{%% load crispy_forms_tags %%}
{%% crispy form form_helper "%s" %%}
""" % override_pack)
c = Context({'form': TestForm(), 'form_helper': FormHelper()})
html = template.render(c)
if (current_pack == 'uni_form'):
self.assertTrue('control-group' in html)
else:
self.assertTrue('uniForm' in html)
def test_template_pack_override_wrong(self):
try:
loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form 'foo' %}
""")
except TemplateSyntaxError:
pass
def test_invalid_helper(self):
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
c = Context({'form': TestForm(), 'form_helper': "invalid"})
settings.CRISPY_FAIL_SILENTLY = False
# Django >= 1.4 is not wrapping exceptions in TEMPLATE_DEBUG mode
if settings.TEMPLATE_DEBUG and django.VERSION < (1, 4):
self.assertRaises(TemplateSyntaxError, lambda:template.render(c))
else:
self.assertRaises(TypeError, lambda:template.render(c))
del settings.CRISPY_FAIL_SILENTLY
def test_formset_with_helper_without_layout(self):
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy testFormSet formset_helper %}
""")
form_helper = FormHelper()
form_helper.form_id = 'thisFormsetRocks'
form_helper.form_class = 'formsets-that-rock'
form_helper.form_method = 'POST'
form_helper.form_action = 'simpleAction'
TestFormSet = formset_factory(TestForm, extra = 3)
testFormSet = TestFormSet()
c = Context({'testFormSet': testFormSet, 'formset_helper': form_helper, 'csrf_token': _get_new_csrf_key()})
html = template.render(c)
self.assertEqual(html.count('<form'), 1)
self.assertEqual(html.count("<input type='hidden' name='csrfmiddlewaretoken'"), 1)
# Check formset management form
self.assertTrue('form-TOTAL_FORMS' in html)
self.assertTrue('form-INITIAL_FORMS' in html)
self.assertTrue('form-MAX_NUM_FORMS' in html)
self.assertTrue('formsets-that-rock' in html)
self.assertTrue('method="post"' in html)
self.assertTrue('id="thisFormsetRocks"' in html)
self.assertTrue('action="%s"' % reverse('simpleAction') in html)
if (self.current_template_pack == 'uni_form'):
self.assertTrue('class="uniForm' in html)
def test_CSRF_token_POST_form(self):
form_helper = FormHelper()
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
# The middleware only initializes the CSRF token when processing a real request
# So using RequestContext or csrf(request) here does not work.
# Instead I set the key `csrf_token` to a CSRF token manually, which `csrf_token` tag uses
c = Context({'form': TestForm(), 'form_helper': form_helper, 'csrf_token': _get_new_csrf_key()})
html = template.render(c)
self.assertTrue("<input type='hidden' name='csrfmiddlewaretoken'" in html)
def test_CSRF_token_GET_form(self):
form_helper = FormHelper()
form_helper.form_method = 'GET'
template = loader.get_template_from_string(u"""
{% load crispy_forms_tags %}
{% crispy form form_helper %}
""")
c = Context({'form': TestForm(), 'form_helper': form_helper, 'csrf_token': _get_new_csrf_key()})
html = template.render(c)
self.assertFalse("<input type='hidden' name='csrfmiddlewaretoken'" in html)
def test_disable_csrf(self):
form = TestForm()
helper = FormHelper()
helper.disable_csrf = True
html = render_crispy_form(form, helper, {'csrf_token': _get_new_csrf_key()})
self.assertFalse('csrf' in html)
def test_render_hidden_fields(self):
test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
'email'
)
test_form.helper.render_hidden_fields = True
html = render_crispy_form(test_form)
self.assertEqual(html.count('<input'), 1)
# Now hide a couple of fields
for field in ('password1', 'password2'):
test_form.fields[field].widget = forms.HiddenInput()
html = render_crispy_form(test_form)
self.assertEqual(html.count('<input'), 3)
self.assertEqual(html.count('hidden'), 2)
if django.VERSION < (1, 5):
self.assertEqual(html.count('type="hidden" name="password1"'), 1)
self.assertEqual(html.count('type="hidden" name="password2"'), 1)
else:
self.assertEqual(html.count('name="password1" type="hidden"'), 1)
self.assertEqual(html.count('name="password2" type="hidden"'), 1)
def test_render_required_fields(self):
test_form = TestForm()
test_form.helper = FormHelper()
test_form.helper.layout = Layout(
'email'
)
test_form.helper.render_required_fields = True
html = render_crispy_form(test_form)
self.assertEqual(html.count('<input'), 7)
def test_helper_custom_template(self):
form = TestForm()
form.helper = FormHelper()
form.helper.template = 'custom_form_template.html'
html = render_crispy_form(form)
self.assertTrue("<h1>Special custom form</h1>" in html)
def test_helper_custom_field_template(self):
form = TestForm()
form.helper = FormHelper()
form.helper.layout = Layout(
'password1',
'password2',
)
form.helper.field_template = 'custom_field_template.html'
html = render_crispy_form(form)
self.assertEqual(html.count("<h1>Special custom field</h1>"), 2)
class TestUniformFormHelper(TestFormHelper):
def test_form_show_errors(self):
if settings.CRISPY_TEMPLATE_PACK != 'uni_form':
warnings.warn('skipping uniform tests with CRISPY_TEMPLATE_PACK=%s' % settings.CRISPY_TEMPLATE_PACK)
return
form = TestForm({
'email': 'invalidemail',
'first_name': 'first_name_too_long',
'last_name': 'last_name_too_long',
'password1': 'yes',
'password2': 'yes',
})
form.helper = FormHelper()
form.helper.layout = Layout(
Field('email'),
Field('first_name'),
Field('last_name'),
Field('password1'),
Field('password2'),
)
form.is_valid()
form.helper.form_show_errors = True
html = render_crispy_form(form)
self.assertEqual(html.count('error'), 9)
form.helper.form_show_errors = False
html = render_crispy_form(form)
self.assertEqual(html.count('error'), 0)
def test_multifield_errors(self):
if settings.CRISPY_TEMPLATE_PACK != 'uni_form':
warnings.warn('skipping uniform tests with CRISPY_TEMPLATE_PACK=%s' % settings.CRISPY_TEMPLATE_PACK)
return
form = TestForm({
'email': 'invalidemail',
'password1': 'yes',
'password2': 'yes',
})
form.helper = FormHelper()
form.helper.layout = Layout(
MultiField('legend', 'email')
)
form.is_valid()
form.helper.form_show_errors = True
html = render_crispy_form(form)
self.assertEqual(html.count('error'), 3)
# Reset layout for avoiding side effects
form.helper.layout = Layout(
MultiField('legend', 'email')
)
form.helper.form_show_errors = False
html = render_crispy_form(form)
self.assertEqual(html.count('error'), 0)
class TestBootstrapFormHelper(TestFormHelper):
def test_form_show_errors(self):
form = TestForm({
'email': 'invalidemail',
'first_name': 'first_name_too_long',
'last_name': 'last_name_too_long',
'password1': 'yes',
'password2': 'yes',
})
form.helper = FormHelper()
form.helper.layout = Layout(
AppendedText('email', 'whatever'),
PrependedText('first_name', 'blabla'),
PrependedAppendedText('last_name', 'foo', 'bar'),
AppendedText('password1', 'whatever'),
PrependedText('password2', 'blabla'),
)
form.is_valid()
form.helper.form_show_errors = True
html = render_crispy_form(form)
self.assertEqual(html.count('error'), 6)
form.helper.form_show_errors = False
html = render_crispy_form(form)
self.assertEqual(html.count('error'), 0)
def test_error_text_inline(self):
form = TestForm({'email': 'invalidemail'})
form.helper = FormHelper()
layout = Layout(
AppendedText('first_name', 'wat'),
PrependedText('email', '@'),
PrependedAppendedText('last_name', '@', 'wat'),
)
form.helper.layout = layout
form.is_valid()
html = render_crispy_form(form)
help_class = 'help-inline'
if self.current_template_pack == 'bootstrap3':
help_class = 'help-block'
matches = re.findall(
'<span id="error_\d_\w*" class="%s"' % help_class, html, re.MULTILINE
)
self.assertEqual(len(matches), 3)
form = TestForm({'email': 'invalidemail'})
form.helper = FormHelper()
form.helper.layout = layout
form.helper.error_text_inline = False
html = render_crispy_form(form)
matches = re.findall('<p id="error_\d_\w*" class="help-block"', html, re.MULTILINE)
self.assertEqual(len(matches), 3)
def test_error_and_help_inline(self):
form = TestForm({'email': 'invalidemail'})
form.helper = FormHelper()
form.helper.error_text_inline = False
form.helper.help_text_inline = True
form.helper.layout = Layout('email')
form.is_valid()
html = render_crispy_form(form)
# Check that help goes before error, otherwise CSS won't work
help_position = html.find('<span id="hint_id_email" class="help-inline">')
error_position = html.find('<p id="error_1_id_email" class="help-block">')
self.assertTrue(help_position < error_position)
# Viceversa
form = TestForm({'email': 'invalidemail'})
form.helper = FormHelper()
form.helper.error_text_inline = True
form.helper.help_text_inline = False
form.helper.layout = Layout('email')
form.is_valid()
html = render_crispy_form(form)
# Check that error goes before help, otherwise CSS won't work
error_position = html.find('<span id="error_1_id_email" class="help-inline">')
help_position = html.find('<p id="hint_id_email" class="help-block">')
self.assertTrue(error_position < help_position)
def test_form_show_labels(self):
form = TestForm()
form.helper = FormHelper()
form.helper.layout = Layout(
'password1',
FieldWithButtons(
'password2',
StrictButton("Confirm")
),
PrependedText(
'first_name',
'Mr.'
),
AppendedText(
'last_name',
'@'
),
PrependedAppendedText(
'datetime_field',
'on',
'secs'
)
)
form.helper.form_show_labels = False
html = render_crispy_form(form)
self.assertEqual(html.count("<label"), 0)
class TestBootstrap3FormHelper(TestFormHelper):
def test_label_class_and_field_class(self):
if settings.CRISPY_TEMPLATE_PACK != 'bootstrap3':
warnings.warn('skipping bootstrap3 tests with CRISPY_TEMPLATE_PACK=%s' % settings.CRISPY_TEMPLATE_PACK)
return
form = TestForm()
form.helper = FormHelper()
form.helper.label_class = 'col-lg-2'
form.helper.field_class = 'col-lg-8'
html = render_crispy_form(form)
self.assertTrue('<div class="form-group"> <div class="controls col-lg-offset-2 col-lg-8"> <div id="div_id_is_company" class="checkbox"> <label for="id_is_company" class=""> <input class="checkboxinput checkbox" id="id_is_company" name="is_company" type="checkbox" />')
self.assertEqual(html.count('col-lg-8'), 7)
form.helper.label_class = 'col-sm-3'
form.helper.field_class = 'col-sm-8'
html = render_crispy_form(form)
self.assertTrue('<div class="form-group"> <div class="controls col-sm-offset-3 col-sm-8"> <div id="div_id_is_company" class="checkbox"> <label for="id_is_company" class=""> <input class="checkboxinput checkbox" id="id_is_company" name="is_company" type="checkbox" />')
self.assertEqual(html.count('col-sm-8'), 7)
def test_template_pack(self):
if settings.CRISPY_TEMPLATE_PACK != 'bootstrap3':
warnings.warn('skipping bootstrap3 tests with CRISPY_TEMPLATE_PACK=%s' % settings.CRISPY_TEMPLATE_PACK)
return
form = TestForm()
form.helper = FormHelper()
form.helper.template_pack = 'uni_form'
html = render_crispy_form(form)
self.assertFalse('form-control' in html)
self.assertTrue('ctrlHolder' in html)
| zixan/django-crispy-forms | crispy_forms/tests/test_form_helper.py | Python | mit | 23,197 |
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""Implement standard (and unused) TCP protocols.
These protocols are either provided by inetd, or are not provided at all.
"""
from __future__ import absolute_import, division
import time
import struct
from zope.interface import implementer
from twisted.internet import protocol, interfaces
from twisted.python.compat import _PY3
class Echo(protocol.Protocol):
"""As soon as any data is received, write it back (RFC 862)"""
def dataReceived(self, data):
self.transport.write(data)
class Discard(protocol.Protocol):
"""Discard any received data (RFC 863)"""
def dataReceived(self, data):
# I'm ignoring you, nyah-nyah
pass
@implementer(interfaces.IProducer)
class Chargen(protocol.Protocol):
"""Generate repeating noise (RFC 864)"""
noise = r'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ !"#$%&?'
def connectionMade(self):
self.transport.registerProducer(self, 0)
def resumeProducing(self):
self.transport.write(self.noise)
def pauseProducing(self):
pass
def stopProducing(self):
pass
class QOTD(protocol.Protocol):
"""Return a quote of the day (RFC 865)"""
def connectionMade(self):
self.transport.write(self.getQuote())
self.transport.loseConnection()
def getQuote(self):
"""Return a quote. May be overrriden in subclasses."""
return "An apple a day keeps the doctor away.\r\n"
class Who(protocol.Protocol):
"""Return list of active users (RFC 866)"""
def connectionMade(self):
self.transport.write(self.getUsers())
self.transport.loseConnection()
def getUsers(self):
"""Return active users. Override in subclasses."""
return "root\r\n"
class Daytime(protocol.Protocol):
"""Send back the daytime in ASCII form (RFC 867)"""
def connectionMade(self):
self.transport.write(time.asctime(time.gmtime(time.time())) + '\r\n')
self.transport.loseConnection()
class Time(protocol.Protocol):
"""Send back the time in machine readable form (RFC 868)"""
def connectionMade(self):
# is this correct only for 32-bit machines?
result = struct.pack("!i", int(time.time()))
self.transport.write(result)
self.transport.loseConnection()
__all__ = ["Echo", "Discard", "Chargen", "QOTD", "Who", "Daytime", "Time"]
if _PY3:
__all3__ = ["Echo"]
for name in __all__[:]:
if name not in __all3__:
__all__.remove(name)
del globals()[name]
del name, __all3__
| Architektor/PySnip | venv/lib/python2.7/site-packages/twisted/protocols/wire.py | Python | gpl-3.0 | 2,659 |
# -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
| Boussadia/weboob | weboob/tools/browser2/cookies.py | Python | agpl-3.0 | 2,534 |
#!/usr/bin/env python
# Line too long - pylint: disable=C0301
# Invalid name - pylint: disable=C0103
"""
parseutils.py
Routines to parse "flexible" configuration files for tools like
gpaddmirrors, gprecoverseg, gpexpand, etc.
Copyright (c) EMC/Greenplum Inc 2011. All Rights Reserved.
"""
import sys
from gppylib.mainUtils import ExceptionNoStackTraceNeeded
from gppylib.gplog import get_default_logger, logging_is_verbose
logger = get_default_logger()
def caller():
"Return name of calling function"
if logging_is_verbose():
return sys._getframe(1).f_code.co_name + '()'
return ''
def canonicalize_address(addr):
"""
Encases addresses in [ ] per RFC 2732. Generally used to deal with ':'
characters which are also often used as delimiters.
Returns the addr string if it doesn't contain any ':' characters.
If addr contains ':' and also contains a '[' then the addr string is
simply returned under the assumption that it is already escaped as needed.
Otherwise return a new string from addr by adding '[' prefix and ']' suffix.
Examples
--------
>>> canonicalize_address('myhost')
'myhost'
>>> canonicalize_address('127.0.0.1')
'127.0.0.1'
>>> canonicalize_address('::1')
'[::1]'
>>> canonicalize_address('[::1]')
'[::1]'
>>> canonicalize_address('2620:0:170:610::13')
'[2620:0:170:610::13]'
>>> canonicalize_address('[2620:0:170:610::13]')
'[2620:0:170:610::13]'
@param addr: the address to possibly encase in [ ]
@returns: the addresss, encased in [] if necessary
"""
if ':' not in addr: return addr
if '[' in addr: return addr
return '[' + addr + ']'
#
# line parsing
#
def consume_to(delimiter, rest):
"""
Consume characters from rest string until we encounter the delimiter.
Returns (None, after, None) where after are the characters after delimiter
or (None, rest, 'does not contain '+delimiter) when delimiter is not encountered.
Examples
--------
>>> consume_to('=', 'abc=def:ghi')
(None, 'def:ghi', None)
@param delimiter: the delimiter string
@param rest: the string to read such as 'abc:def:ghi'
@returns: (None, after, None) tuple such as (None, 'def:ghi', None)
"""
p = rest.find(delimiter)
if p < 0:
return None, rest, 'does not contain '+delimiter
return None, rest[p+1:], None
def read_to(delimiter, rest):
"""
Read characters from rest string until we encounter the delimiter.
Separate the string into characters 'before' and 'after' the delimiter.
If no delimiter is found, assign entire string to 'before' and None to 'after'.
Examples
--------
>>> read_to(':', 'abc:def:ghi')
('abc', 'def:ghi', None)
>>> read_to(':', 'abc:def')
('abc', 'def', None)
>>> read_to(':', 'abc')
('abc', None, None)
>>> read_to(':', '')
('', None, None)
Note this returns a 3-tuple for compatibility with other routines
which use the third element as an error message
@param delimiter: the delimiter string
@param rest: the string to read such as 'abc:def:ghi'
@returns: (before, after, None) tuple such as ('abc', 'def:ghi', None)
"""
p = rest.find(delimiter)
if p < 0:
return rest, None, None
return rest[0:p], rest[p+1:], None
def read_to_bracketed(delimiter, rest):
"""
Read characters from rest string which is expected to start with a '['.
If rest does not start with '[', return a tuple (None, rest, 'does not begin with [').
If rest string starts with a '[', then read until we find ']'.
If no ']' is found, return a tuple (None, rest, 'does not contain ending ]').
Otherwise separate the string into 'before' representing characters between
'[' and ']' and 'after' representing characters after the ']' and then check
that the first character found after the ']' is a :'.
If there are no characters after the ']', return a tuple (before, None, None)
where before contains the characters between '[' and ']'.
If there are characters after ']' other than the delimiter, return a tuple
(None, rest, 'characters not allowed after ending ]')
Otherwise return a tuple (before, after, None) where before contains the
characters between '[' and ']' and after contains the characters after the ']:'.
This function avoids raising Exceptions for these particular cases of
malformed input since they are easier to report in the calling function.
Examples
--------
>>> read_to_bracketed(':', '[abc:def]')
('abc:def', None, None)
>>> read_to_bracketed(':', '[abc]:def:ghi')
('abc', 'def:ghi', None)
>>> read_to_bracketed(':', '[abc:def]:ghi:jkl')
('abc:def', 'ghi:jkl', None)
>>> read_to_bracketed(':', 'abc:def:ghi:jkl')
(None, 'abc:def:ghi:jkl', 'does not begin with [')
>>> read_to_bracketed(':', '[abc:def:ghi:jkl')
(None, '[abc:def:ghi:jkl', 'does not contain ending ]')
>>> read_to_bracketed(':', '[abc]extra:def:ghi:jkl')
(None, '[abc]extra:def:ghi:jkl', 'characters not allowed after ending ]')
@param delimiter: the delimiter string
@param rest: the string to read such as '[abc:def]:ghi'
@returns: (before, after, reason) tuple such as ('abc:def', 'ghi', None)
"""
if not rest.startswith('['):
return None, rest, 'does not begin with ['
p = rest.find(']')
if p < 0:
return None, rest, 'does not contain ending ]'
if len(rest[p+1:]) < 1:
return rest[1:p], None, None
if rest[p+1] != delimiter:
return None, rest, 'characters not allowed after ending ]'
return rest[1:p], rest[p+2:], None
def read_to_possibly_bracketed(delimiter, rest):
"""
Behave as read_bracketed above when rest starts with a '[',
otherwise as read_to_colon. This is intended to support fields
which may contain an IPv6 address, an IPv4 address or a hostname.
Examples
--------
>>> read_to_possibly_bracketed(':', 'abc:def:ghi')
('abc', 'def:ghi', None)
>>> read_to_possibly_bracketed(':', '[abc]:def:ghi')
('abc', 'def:ghi', None)
>>> read_to_possibly_bracketed(':', '[abc:def]:ghi')
('abc:def', 'ghi', None)
>>> read_to_possibly_bracketed(':', '[]:ghi')
('', 'ghi', None)
>>> read_to_possibly_bracketed(':', ':ghi')
('', 'ghi', None)
>>> read_to_possibly_bracketed(':', '[ghi]')
('ghi', None, None)
>>> read_to_possibly_bracketed(':', '[]')
('', None, None)
>>> read_to_possibly_bracketed(':', '')
('', None, None)
@param delimiter: the delimiter string
@param rest: the string to read such as '[abc:def]:ghi'
@returns: (before, after, reason) tuple such as ('abc:def', 'ghi', None)
"""
if rest.startswith('['):
return read_to_bracketed(delimiter, rest)
return read_to(delimiter, rest)
class LineParser:
"""
Manage state to parse a single line, generally from a configuration
file with fields delimited by colons.
"""
def __init__(self, caller, filename, lineno, line):
"Initialize"
(self.caller, self.filename, self.lineno, self.line, self.rest, self.error) = (caller, filename, lineno, line, line, None)
self.logger = logger
if logging_is_verbose():
self.logger.debug("%s:%s" % (filename, lineno))
def ensure_more_to_process(self, name):
"Raise an exception if we've exhausted the input line"
if self.rest is None:
msg = "out of values (reading %s)" % name
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (self.filename, self.lineno, self.caller, self.line, msg))
def read_delimited_field(self, delimiter, name="next field", reader=read_to):
"""
Attempts to read the next field in the line up to the specified delimiter
using the specified reading method, raising any error encountered as an
exception. Returns the read field when successful.
"""
self.ensure_more_to_process(name)
value, self.rest, error = reader(delimiter, self.rest)
if error is not None:
msg = "%s (reading %s) >>%s" % (error, name, self.rest)
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (self.filename, self.lineno, self.caller, self.line, msg))
if logging_is_verbose():
self.logger.debug(" name=%-30s delimiter='%s' value=%s" % (name, delimiter, value))
return value
def does_starts_with(self, expected):
"Returns true if line starts with expected value, or return false"
return self.line.startswith(expected)
def ensure_starts_with(self, expected):
"Returns true if line starts with expected value, or raise an exception otherwise"
if not self.does_starts_with(expected):
msg = "does not start with %s" % expected
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (self.filename, self.lineno, self.caller, self.line, msg))
self.rest = self.rest[len(expected):]
def handle_field(self, name, dst=None, delimiter=':', stripchars=None):
"""
Attempts to read the next field up to a given delimiter.
Names starting with '[' indicate that the field should use the bracketed parsing logic.
If dst is not none, also assigns the value to dst[name].
If stripchars is not none, value is first stripped of leading and trailing stripchars.
"""
if name[0] == '[':
name = name.strip('[]')
value = self.read_delimited_field(delimiter, name, read_to_possibly_bracketed)
else:
value = self.read_delimited_field(delimiter, name)
if stripchars is not None:
value = value.strip(stripchars)
if dst is not None:
dst[name] = value
return value
#
# file parsing
#
def line_reader(f):
"""
Read the contents of the given input, generating the non-blank non-comment
lines found within as a series of tuples of the form (line number, line).
>>> [l for l in line_reader(['', '# test', 'abc:def'])]
[(3, 'abc:def')]
"""
for offset, line in enumerate(f):
line = line.strip()
if len(line) < 1 or line[0] == '#':
continue
yield offset+1, line
################
# gpfilespace format
#
# First line in file is the filespace name, remaining lines are
# specify hostname, dbid, and a path:
#
# filespace:name
# hostname:dbid:path
# ...
################
def parse_fspacename(filename, lineno, line):
"""
Parse the filespace: line which appears at the beginning of the gpfilespace configuration file.
>>> parse_fspacename('file', 1, 'filespace:blah')
'blah'
"""
p = LineParser(caller(), filename, lineno, line)
p.ensure_starts_with('filespace:')
fspacename = p.read_delimited_field(':')
if p.rest is not None:
msg = "unexpected characters after filespace name >>%s" % p.rest
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg))
return fspacename
def parse_dfs_url(filename, lineno, line):
"""
Parse the filespace: line which appears at the beginning of the gpfilespace configuration file.
>>> parse_dfs_url('file', 1, 'dfs_url::localhost:9000/gpsql')
'localhost:9000/gpsql'
"""
p = LineParser(caller(), filename, lineno, line)
p.ensure_starts_with('dfs_url::')
dfs_url = p.read_delimited_field('::')
if p.rest is not None:
msg = "unexpected characters after filespace name >>%s" % p.rest
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg))
return dfs_url
def parse_fspacesys(filename, lineno, line):
"""
Pasrse the filesystem name: the optional second line in the gpfilespace configuration file.
>>> parse_fspacetype('file', 2, 'fsysname:local|filesystem_name')
local|filesystem_name
"""
p = LineParser(caller(), filename, lineno, line)
if not p.does_starts_with('fsysname:'):
return None
p.ensure_starts_with('fsysname:')
fsysname = p.read_delimited_field(':')
if p.rest is not None:
msg = "unexpected characters after filespace type >>%s" % p.rest
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg))
return fsysname
def parse_fspacereplica(filename, lineno, line):
"""
Pasrse the filespace replica: the optional third line in the gpfilespace configuration file.
>>> parse_fspacereplica('file', 3, 'fsreplica:repnum')
repnum
"""
p = LineParser(caller(), filename, lineno, line)
if not p.does_starts_with('fsreplica:'):
return None
p.ensure_starts_with('fsreplica:')
fsreplica = p.read_delimited_field(':')
if p.rest is not None:
msg = "unexpected characters after filespace replica >>%s" % p.rest
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg))
return fsreplica
def parse_gpfilespace_line(filename, lineno, line):
"""
Parse a line of the gpfilespace configuration file other than the first.
>>> parse_gpfilespace_line('file', 1, '[::1]:dbid:path')
('::1', 'dbid', 'path')
>>> parse_gpfilespace_line('file', 1, 'host:dbid:path')
('host', 'dbid', 'path')
"""
p = LineParser(caller(), filename, lineno, line)
host = p.handle_field('[host]') # [host] indicates possible IPv6 address
dbid = p.handle_field('dbid')
path = p.handle_field('[path]') # url contains the ':'.
if p.rest is not None:
msg = "unexpected characters after path name >>%s" % p.rest
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg))
return host, dbid, path
################
# gpexpand segment file format:
#
# Form of file is hostname:address:port:dtadir:dbid:contentId:role[:replicationPort]
################
def parse_gpexpand_segment_line(filename, lineno, line):
"""
Parse a line of the gpexpand configuration file.
>>> parse_gpexpand_segment_line('file', 1, "localhost:[::1]:40001:/Users/ctaylor/data/p2/gpseg1:4:1:p")
('localhost', '::1', '40001', '/Users/ctaylor/data/p2/gpseg1', '4', '1', 'p', None)
>>> parse_gpexpand_segment_line('file', 1, "localhost:[::1]:40001:/Users/ctaylor/data/p2/gpseg1:4:1:p:41001")
('localhost', '::1', '40001', '/Users/ctaylor/data/p2/gpseg1', '4', '1', 'p', '41001')
"""
p = LineParser(caller(), filename, lineno, line)
hostname = p.handle_field('[host]') # [host] indicates possible IPv6 address
address = p.handle_field('[address]') # [address] indicates possible IPv6 address
port = p.handle_field('port')
datadir = p.handle_field('datadir')
dbid = p.handle_field('dbid')
contentId = p.handle_field('contentId')
role = p.handle_field('role')
replicationPort = None
if p.rest is not None:
replicationPort = p.handle_field('replicationPort')
if p.rest is not None:
msg = "unexpected characters after replicationPort >>%s" % p.rest
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg))
return hostname, address, port, datadir, dbid, contentId, role, replicationPort
################
# gpaddmirrors format:
#
# filespaceOrder=[filespace1_fsname[:filespace2_fsname:...]]
# mirror[content]=content:address:port:mir_replication_port:pri_replication_port:fselocation[:fselocation:...]
#
################
def parse_filespace_order(filename, lineno, line):
"""
Parse the filespaceOrder= line appearing at the beginning of the gpaddmirrors,
gpmovemirrors and gprecoverseg configuration files.
>>> parse_filespace_order('file', 1, "filespaceOrder=fs1:fs2:fs3")
['fs1', 'fs2', 'fs3']
>>> parse_filespace_order('file', 1, "filespaceOrder=")
[]
"""
p = LineParser(caller(), filename, lineno, line)
p.ensure_starts_with('filespaceOrder=')
fslist = []
while p.rest:
fslist.append( p.read_delimited_field(':', 'next filespace') )
return fslist
def parse_gpaddmirrors_line(filename, lineno, line, fslist):
"""
Parse a line in the gpaddmirrors configuration file other than the first.
>>> line = "mirror0=0:[::1]:40001:50001:60001:/Users/ctaylor/data/p2/gpseg1"
>>> fixed, flex = parse_gpaddmirrors_line('file', 1, line, [])
>>> fixed["address"], fixed["contentId"], fixed["dataDirectory"]
('::1', '0', '/Users/ctaylor/data/p2/gpseg1')
"""
fixed = {}
flexible = {}
p = LineParser(caller(), filename, lineno, line)
p.ensure_starts_with('mirror')
p.read_delimited_field('=', 'content id', consume_to)
# [address] indicates possible IPv6 address
for field in [ 'contentId', '[address]', 'port', 'replicationPort', 'primarySegmentReplicationPort', 'dataDirectory' ]:
p.handle_field(field, fixed)
for fsname in fslist:
p.handle_field('[' + fsname + ']', flexible)
return fixed, flexible
################
# gpmovemirrors format:
#
# This is basically the same as the gprecoverseg format (since gpmovemirrors ultimately just
# passes the input file after validating it) but the field names are slightly different.
#
# filespaceOrder=[filespace1_fsname[:filespace2_fsname:...]
# old_address:port:datadir new_address:port:replication_port:datadir[:fselocation:...]
# ^
# note space
################
def parse_gpmovemirrors_line(filename, lineno, line, fslist):
"""
Parse a line in the gpmovemirrors configuration file other than the first.
>>> line = "[::1]:40001:/Users/ctaylor/data/m2/gpseg1 [::2]:40101:50101:/Users/ctaylor/data/m2/gpseg1:/fs1"
>>> fixed, flex = parse_gpmovemirrors_line('file', 1, line, ['fs1'])
>>> fixed["oldAddress"], fixed["newAddress"]
('::1', '::2')
>>> flex
{'fs1': '/fs1'}
"""
groups = len(line.split())
if groups != 2:
msg = "need two groups of fields delimited by a space for old and new mirror, not %d" % groups
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg))
fixed = {}
flexible = {}
p = LineParser(caller(), filename, lineno, line)
p.handle_field('[oldAddress]', fixed) # [oldAddress] indicates possible IPv6 address
p.handle_field('oldPort', fixed)
p.handle_field('oldDataDirectory', fixed, delimiter=' ', stripchars=' \t') # MPP-15675 note stripchars here and next line
p.handle_field('[newAddress]', fixed, stripchars=' \t') # [newAddress] indicates possible IPv6 address
p.handle_field('newPort', fixed)
p.handle_field('newReplicationPort', fixed)
p.handle_field('newDataDirectory', fixed)
for fsname in fslist:
p.handle_field(fsname, flexible)
if p.rest is not None:
msg = "unexpected characters after mirror fields >>%s" % p.rest
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg))
return fixed, flexible
################
# gprecoverseg format:
#
# filespaceOrder=[filespace1_fsname[:filespace2_fsname:...]]
# failed_host_address:port:datadir [recovery_host_address:port:replication_port:datadir[:fselocation:...]]
# ^
# note space
#
# filespace locations are only present at the end of the other fields when there
# are two groups of fields separated by a space. If there is only one group of
# fields then we assume the entire line is only three fields as below with no
# filespace locations:
#
# failed_host_address:port:datadir
################
def parse_gprecoverseg_line(filename, lineno, line, fslist):
"""
Parse a line in the gprecoverseg configuration file other than the first.
>>> line = "[::1]:40001:/Users/ctaylor/data/m2/gpseg1"
>>> fixed, flex = parse_gprecoverseg_line('file', 1, line, [])
>>> fixed["failedAddress"], fixed["failedPort"], fixed["failedDataDirectory"]
('::1', '40001', '/Users/ctaylor/data/m2/gpseg1')
>>> line = "[::1]:40001:/Users/ctaylor/data/m2/gpseg1 [::2]:40101:50101:/Users/ctaylor/data/m2/gpseg1:/fs1"
>>> fixed, flex = parse_gprecoverseg_line('file', 1, line, ['fs1'])
>>> fixed["newAddress"], fixed["newPort"], fixed["newReplicationPort"], fixed["newDataDirectory"]
('::2', '40101', '50101', '/Users/ctaylor/data/m2/gpseg1')
>>> flex
{'fs1': '/fs1'}
"""
groups = len(line.split())
if groups not in [1, 2]:
msg = "only one or two groups of fields delimited by a space, not %d" % groups
raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg))
fixed = {}
flexible = {}
p = LineParser(caller(), filename, lineno, line)
p.handle_field('[failedAddress]', fixed) # [failedAddress] indicates possible IPv6 address
p.handle_field('failedPort', fixed)
if groups == 1:
p.handle_field('failedDataDirectory', fixed)
else:
p.handle_field('failedDataDirectory', fixed, delimiter=' ', stripchars=' \t') # MPP-15675 note stripchars here and next line
p.handle_field('[newAddress]', fixed, stripchars=' \t') # [newAddress] indicates possible IPv6 address
p.handle_field('newPort', fixed)
p.handle_field('newReplicationPort', fixed)
p.handle_field('newDataDirectory', fixed)
for fsname in fslist:
p.handle_field('[' + fsname + ']', flexible)
return fixed, flexible
if __name__ == '__main__':
import doctest
doctest.testmod()
| PGer/incubator-hawq | tools/bin/gppylib/parseutils.py | Python | apache-2.0 | 22,013 |
from collections import OrderedDict
import copy
import operator
from functools import partial, reduce, update_wrapper
import warnings
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.admin import widgets, helpers
from django.contrib.admin import validation
from django.contrib.admin.checks import (BaseModelAdminChecks, ModelAdminChecks,
InlineModelAdminChecks)
from django.contrib.admin.exceptions import DisallowedModelAdminToField
from django.contrib.admin.utils import (quote, unquote, flatten_fieldsets,
get_deleted_objects, model_format_dict, NestedObjects,
lookup_needs_distinct)
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.auth import get_permission_codename
from django.core import checks
from django.core.exceptions import (PermissionDenied, ValidationError,
FieldError, ImproperlyConfigured)
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
from django.db import models, transaction, router
from django.db.models.constants import LOOKUP_SEP
from django.db.models.related import RelatedObject
from django.db.models.fields import BLANK_CHOICE_DASH, FieldDoesNotExist
from django.db.models.sql.constants import QUERY_TERMS
from django.forms.formsets import all_valid, DELETION_FIELD_NAME
from django.forms.models import (modelform_factory, modelformset_factory,
inlineformset_factory, BaseInlineFormSet, modelform_defines_fields)
from django.http import Http404, HttpResponseRedirect
from django.http.response import HttpResponseBase
from django.shortcuts import get_object_or_404
from django.template.response import SimpleTemplateResponse, TemplateResponse
from django.utils import six
from django.utils.decorators import method_decorator
from django.utils.deprecation import (RenameMethodsBase,
RemovedInDjango18Warning, RemovedInDjango19Warning)
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.html import escape, escapejs
from django.utils.http import urlencode
from django.utils.text import capfirst, get_text_list
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext
from django.utils.safestring import mark_safe
from django.views.decorators.csrf import csrf_protect
IS_POPUP_VAR = '_popup'
TO_FIELD_VAR = '_to_field'
HORIZONTAL, VERTICAL = 1, 2
def get_content_type_for_model(obj):
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level.
from django.contrib.contenttypes.models import ContentType
return ContentType.objects.get_for_model(obj, for_concrete_model=False)
def get_ul_class(radio_style):
return 'radiolist' if radio_style == VERTICAL else 'radiolist inline'
class IncorrectLookupParameters(Exception):
pass
# Defaults for formfield_overrides. ModelAdmin subclasses can change this
# by adding to ModelAdmin.formfield_overrides.
FORMFIELD_FOR_DBFIELD_DEFAULTS = {
models.DateTimeField: {
'form_class': forms.SplitDateTimeField,
'widget': widgets.AdminSplitDateTime
},
models.DateField: {'widget': widgets.AdminDateWidget},
models.TimeField: {'widget': widgets.AdminTimeWidget},
models.TextField: {'widget': widgets.AdminTextareaWidget},
models.URLField: {'widget': widgets.AdminURLFieldWidget},
models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget},
models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget},
models.CharField: {'widget': widgets.AdminTextInputWidget},
models.ImageField: {'widget': widgets.AdminFileWidget},
models.FileField: {'widget': widgets.AdminFileWidget},
models.EmailField: {'widget': widgets.AdminEmailInputWidget},
}
csrf_protect_m = method_decorator(csrf_protect)
class RenameBaseModelAdminMethods(forms.MediaDefiningClass, RenameMethodsBase):
renamed_methods = (
('queryset', 'get_queryset', RemovedInDjango18Warning),
)
class BaseModelAdmin(six.with_metaclass(RenameBaseModelAdminMethods)):
"""Functionality common to both ModelAdmin and InlineAdmin."""
raw_id_fields = ()
fields = None
exclude = None
fieldsets = None
form = forms.ModelForm
filter_vertical = ()
filter_horizontal = ()
radio_fields = {}
prepopulated_fields = {}
formfield_overrides = {}
readonly_fields = ()
ordering = None
view_on_site = True
# Validation of ModelAdmin definitions
# Old, deprecated style:
validator_class = None
default_validator_class = validation.BaseValidator
# New style:
checks_class = BaseModelAdminChecks
@classmethod
def validate(cls, model):
warnings.warn(
'ModelAdmin.validate() is deprecated. Use "check()" instead.',
RemovedInDjango19Warning)
if cls.validator_class:
validator = cls.validator_class()
else:
validator = cls.default_validator_class()
validator.validate(cls, model)
@classmethod
def check(cls, model, **kwargs):
if cls.validator_class:
warnings.warn(
'ModelAdmin.validator_class is deprecated. '
'ModeAdmin validators must be converted to use '
'the system check framework.',
RemovedInDjango19Warning)
validator = cls.validator_class()
try:
validator.validate(cls, model)
except ImproperlyConfigured as e:
return [checks.Error(e.args[0], hint=None, obj=cls)]
else:
return []
else:
return cls.checks_class().check(cls, model, **kwargs)
def __init__(self):
overrides = FORMFIELD_FOR_DBFIELD_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Hook for specifying the form Field instance for a given database Field
instance.
If kwargs are given, they're passed to the form Field's constructor.
"""
request = kwargs.pop("request", None)
# If the field specifies choices, we don't need to look for special
# admin widgets - we just need to use a select widget of some kind.
if db_field.choices:
return self.formfield_for_choice_field(db_field, request, **kwargs)
# ForeignKey or ManyToManyFields
if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)):
# Combine the field kwargs with any options for formfield_overrides.
# Make sure the passed in **kwargs override anything in
# formfield_overrides because **kwargs is more specific, and should
# always win.
if db_field.__class__ in self.formfield_overrides:
kwargs = dict(self.formfield_overrides[db_field.__class__], **kwargs)
# Get the correct formfield.
if isinstance(db_field, models.ForeignKey):
formfield = self.formfield_for_foreignkey(db_field, request, **kwargs)
elif isinstance(db_field, models.ManyToManyField):
formfield = self.formfield_for_manytomany(db_field, request, **kwargs)
# For non-raw_id fields, wrap the widget with a wrapper that adds
# extra HTML -- the "add other" interface -- to the end of the
# rendered output. formfield can be None if it came from a
# OneToOneField with parent_link=True or a M2M intermediary.
if formfield and db_field.name not in self.raw_id_fields:
related_modeladmin = self.admin_site._registry.get(db_field.rel.to)
can_add_related = bool(related_modeladmin and
related_modeladmin.has_add_permission(request))
formfield.widget = widgets.RelatedFieldWidgetWrapper(
formfield.widget, db_field.rel, self.admin_site,
can_add_related=can_add_related)
return formfield
# If we've got overrides for the formfield defined, use 'em. **kwargs
# passed to formfield_for_dbfield override the defaults.
for klass in db_field.__class__.mro():
if klass in self.formfield_overrides:
kwargs = dict(copy.deepcopy(self.formfield_overrides[klass]), **kwargs)
return db_field.formfield(**kwargs)
# For any other type of field, just call its formfield() method.
return db_field.formfield(**kwargs)
def formfield_for_choice_field(self, db_field, request=None, **kwargs):
"""
Get a form Field for a database Field that has declared choices.
"""
# If the field is named as a radio_field, use a RadioSelect
if db_field.name in self.radio_fields:
# Avoid stomping on custom widget/choices arguments.
if 'widget' not in kwargs:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
if 'choices' not in kwargs:
kwargs['choices'] = db_field.get_choices(
include_blank=db_field.blank,
blank_choice=[('', _('None'))]
)
return db_field.formfield(**kwargs)
def get_field_queryset(self, db, db_field, request):
"""
If the ModelAdmin specifies ordering, the queryset should respect that
ordering. Otherwise don't specify the queryset, let the field decide
(returns None in that case).
"""
related_admin = self.admin_site._registry.get(db_field.rel.to, None)
if related_admin is not None:
ordering = related_admin.get_ordering(request)
if ordering is not None and ordering != ():
return db_field.rel.to._default_manager.using(db).order_by(*ordering)
return None
def formfield_for_foreignkey(self, db_field, request=None, **kwargs):
"""
Get a form Field for a ForeignKey.
"""
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.rel,
self.admin_site, using=db)
elif db_field.name in self.radio_fields:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
kwargs['empty_label'] = _('None') if db_field.blank else None
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
return db_field.formfield(**kwargs)
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
"""
Get a form Field for a ManyToManyField.
"""
# If it uses an intermediary model that isn't auto created, don't show
# a field in admin.
if not db_field.rel.through._meta.auto_created:
return None
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ManyToManyRawIdWidget(db_field.rel,
self.admin_site, using=db)
kwargs['help_text'] = ''
elif db_field.name in (list(self.filter_vertical) + list(self.filter_horizontal)):
kwargs['widget'] = widgets.FilteredSelectMultiple(db_field.verbose_name, (db_field.name in self.filter_vertical))
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
return db_field.formfield(**kwargs)
def get_view_on_site_url(self, obj=None):
if obj is None or not self.view_on_site:
return None
if callable(self.view_on_site):
return self.view_on_site(obj)
elif self.view_on_site and hasattr(obj, 'get_absolute_url'):
# use the ContentType lookup if view_on_site is True
return reverse('admin:view_on_site', kwargs={
'content_type_id': get_content_type_for_model(obj).pk,
'object_id': obj.pk
})
@property
def declared_fieldsets(self):
warnings.warn(
"ModelAdmin.declared_fieldsets is deprecated and "
"will be removed in Django 1.9.",
RemovedInDjango19Warning, stacklevel=2
)
if self.fieldsets:
return self.fieldsets
elif self.fields:
return [(None, {'fields': self.fields})]
return None
def get_fields(self, request, obj=None):
"""
Hook for specifying fields.
"""
return self.fields
def get_fieldsets(self, request, obj=None):
"""
Hook for specifying fieldsets.
"""
# We access the property and check if it triggers a warning.
# If it does, then it's ours and we can safely ignore it, but if
# it doesn't then it has been overridden so we must warn about the
# deprecation.
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
declared_fieldsets = self.declared_fieldsets
if len(w) != 1 or not issubclass(w[0].category, RemovedInDjango19Warning):
warnings.warn(
"ModelAdmin.declared_fieldsets is deprecated and "
"will be removed in Django 1.9.",
RemovedInDjango19Warning
)
if declared_fieldsets:
return declared_fieldsets
if self.fieldsets:
return self.fieldsets
return [(None, {'fields': self.get_fields(request, obj)})]
def get_ordering(self, request):
"""
Hook for specifying field ordering.
"""
return self.ordering or () # otherwise we might try to *None, which is bad ;)
def get_readonly_fields(self, request, obj=None):
"""
Hook for specifying custom readonly fields.
"""
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
"""
Hook for specifying custom prepopulated fields.
"""
return self.prepopulated_fields
def get_queryset(self, request):
"""
Returns a QuerySet of all model instances that can be edited by the
admin site. This is used by changelist_view.
"""
qs = self.model._default_manager.get_queryset()
# TODO: this should be handled by some parameter to the ChangeList.
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
def lookup_allowed(self, lookup, value):
from django.contrib.admin.filters import SimpleListFilter
model = self.model
# Check FKey lookups that are allowed, so that popups produced by
# ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to,
# are allowed to work.
for l in model._meta.related_fkey_lookups:
# As ``limit_choices_to`` can be a callable, invoke it here.
if callable(l):
l = l()
for k, v in widgets.url_params_from_lookup_dict(l).items():
if k == lookup and v == value:
return True
parts = lookup.split(LOOKUP_SEP)
# Last term in lookup is a query term (__exact, __startswith etc)
# This term can be ignored.
if len(parts) > 1 and parts[-1] in QUERY_TERMS:
parts.pop()
# Special case -- foo__id__exact and foo__id queries are implied
# if foo has been specifically included in the lookup list; so
# drop __id if it is the last part. However, first we need to find
# the pk attribute name.
rel_name = None
for part in parts[:-1]:
try:
field, _, _, _ = model._meta.get_field_by_name(part)
except FieldDoesNotExist:
# Lookups on non-existent fields are ok, since they're ignored
# later.
return True
if hasattr(field, 'rel'):
if field.rel is None:
# This property or relation doesn't exist, but it's allowed
# since it's ignored in ChangeList.get_filters().
return True
model = field.rel.to
if hasattr(field.rel, 'get_related_field'):
rel_name = field.rel.get_related_field().name
else:
rel_name = None
elif isinstance(field, RelatedObject):
model = field.model
rel_name = model._meta.pk.name
else:
rel_name = None
if rel_name and len(parts) > 1 and parts[-1] == rel_name:
parts.pop()
if len(parts) == 1:
return True
clean_lookup = LOOKUP_SEP.join(parts)
valid_lookups = [self.date_hierarchy]
for filter_item in self.list_filter:
if isinstance(filter_item, type) and issubclass(filter_item, SimpleListFilter):
valid_lookups.append(filter_item.parameter_name)
elif isinstance(filter_item, (list, tuple)):
valid_lookups.append(filter_item[0])
else:
valid_lookups.append(filter_item)
return clean_lookup in valid_lookups
def to_field_allowed(self, request, to_field):
"""
Returns True if the model associated with this admin should be
allowed to be referenced by the specified field.
"""
opts = self.model._meta
try:
field = opts.get_field(to_field)
except FieldDoesNotExist:
return False
# Check whether this model is the origin of a M2M relationship
# in which case to_field has to be the pk on this model.
if opts.many_to_many and field.primary_key:
return True
# Make sure at least one of the models registered for this site
# references this field through a FK or a M2M relationship.
registered_models = set()
for model, admin in self.admin_site._registry.items():
registered_models.add(model)
for inline in admin.inlines:
registered_models.add(inline.model)
for related_object in (opts.get_all_related_objects(include_hidden=True) +
opts.get_all_related_many_to_many_objects()):
related_model = related_object.model
if (any(issubclass(model, related_model) for model in registered_models) and
related_object.field.rel.get_related_field() == field):
return True
return False
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overridden by the user in subclasses.
"""
opts = self.opts
codename = get_permission_codename('add', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('delete', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
@python_2_unicode_compatible
class ModelAdmin(BaseModelAdmin):
"Encapsulates all admin options and functionality for a given model."
list_display = ('__str__',)
list_display_links = ()
list_filter = ()
list_select_related = False
list_per_page = 100
list_max_show_all = 200
list_editable = ()
search_fields = ()
date_hierarchy = None
save_as = False
save_on_top = False
paginator = Paginator
preserve_filters = True
inlines = []
# Custom templates (designed to be over-ridden in subclasses)
add_form_template = None
change_form_template = None
change_list_template = None
delete_confirmation_template = None
delete_selected_confirmation_template = None
object_history_template = None
# Actions
actions = []
action_form = helpers.ActionForm
actions_on_top = True
actions_on_bottom = False
actions_selection_counter = True
# validation
# Old, deprecated style:
default_validator_class = validation.ModelAdminValidator
# New style:
checks_class = ModelAdminChecks
def __init__(self, model, admin_site):
self.model = model
self.opts = model._meta
self.admin_site = admin_site
super(ModelAdmin, self).__init__()
def __str__(self):
return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__)
def get_inline_instances(self, request, obj=None):
inline_instances = []
for inline_class in self.inlines:
inline = inline_class(self.model, self.admin_site)
if request:
if not (inline.has_add_permission(request) or
inline.has_change_permission(request, obj) or
inline.has_delete_permission(request, obj)):
continue
if not inline.has_add_permission(request):
inline.max_num = 0
inline_instances.append(inline)
return inline_instances
def get_urls(self):
from django.conf.urls import patterns, url
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
return update_wrapper(wrapper, view)
info = self.model._meta.app_label, self.model._meta.model_name
urlpatterns = patterns('',
url(r'^$', wrap(self.changelist_view), name='%s_%s_changelist' % info),
url(r'^add/$', wrap(self.add_view), name='%s_%s_add' % info),
url(r'^(.+)/history/$', wrap(self.history_view), name='%s_%s_history' % info),
url(r'^(.+)/delete/$', wrap(self.delete_view), name='%s_%s_delete' % info),
url(r'^(.+)/$', wrap(self.change_view), name='%s_%s_change' % info),
)
return urlpatterns
def urls(self):
return self.get_urls()
urls = property(urls)
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = [
'core.js',
'admin/RelatedObjectLookups.js',
'jquery%s.js' % extra,
'jquery.init.js'
]
if self.actions is not None:
js.append('actions%s.js' % extra)
if self.prepopulated_fields:
js.extend(['urlify.js', 'prepopulate%s.js' % extra])
return forms.Media(js=[static('admin/js/%s' % url) for url in js])
def get_model_perms(self, request):
"""
Returns a dict of all perms for this model. This dict has the keys
``add``, ``change``, and ``delete`` mapping to the True/False for each
of those actions.
"""
return {
'add': self.has_add_permission(request),
'change': self.has_change_permission(request),
'delete': self.has_delete_permission(request),
}
def get_fields(self, request, obj=None):
if self.fields:
return self.fields
form = self.get_form(request, obj, fields=None)
return list(form.base_fields) + list(self.get_readonly_fields(request, obj))
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# ModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we pass None to be consistent with the
# default on modelform_factory
exclude = exclude or None
defaults = {
"form": self.form,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
try:
return modelform_factory(self.model, **defaults)
except FieldError as e:
raise FieldError('%s. Check fields/fieldsets/exclude attributes of class %s.'
% (e, self.__class__.__name__))
def get_changelist(self, request, **kwargs):
"""
Returns the ChangeList class for use on the changelist page.
"""
from django.contrib.admin.views.main import ChangeList
return ChangeList
def get_object(self, request, object_id):
"""
Returns an instance matching the primary key provided. ``None`` is
returned if no match is found (or the object_id failed validation
against the primary key field).
"""
queryset = self.get_queryset(request)
model = queryset.model
try:
object_id = model._meta.pk.to_python(object_id)
return queryset.get(pk=object_id)
except (model.DoesNotExist, ValidationError, ValueError):
return None
def get_changelist_form(self, request, **kwargs):
"""
Returns a Form class for use in the Formset on the changelist page.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if (defaults.get('fields') is None
and not modelform_defines_fields(defaults.get('form'))):
defaults['fields'] = forms.ALL_FIELDS
return modelform_factory(self.model, **defaults)
def get_changelist_formset(self, request, **kwargs):
"""
Returns a FormSet class for use on the changelist page if list_editable
is used.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
return modelformset_factory(self.model,
self.get_changelist_form(request), extra=0,
fields=self.list_editable, **defaults)
def _get_formsets(self, request, obj):
"""
Helper function that exists to allow the deprecation warning to be
executed while this function continues to return a generator.
"""
for inline in self.get_inline_instances(request, obj):
yield inline.get_formset(request, obj)
def get_formsets(self, request, obj=None):
warnings.warn(
"ModelAdmin.get_formsets() is deprecated and will be removed in "
"Django 1.9. Use ModelAdmin.get_formsets_with_inlines() instead.",
RemovedInDjango19Warning, stacklevel=2
)
return self._get_formsets(request, obj)
def get_formsets_with_inlines(self, request, obj=None):
"""
Yields formsets and the corresponding inlines.
"""
# We call get_formsets() [deprecated] and check if it triggers a
# warning. If it does, then it's ours and we can safely ignore it, but
# if it doesn't then it has been overridden so we must warn about the
# deprecation.
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
formsets = self.get_formsets(request, obj)
if len(w) != 1 or not issubclass(w[0].category, RemovedInDjango19Warning):
warnings.warn(
"ModelAdmin.get_formsets() is deprecated and will be removed in "
"Django 1.9. Use ModelAdmin.get_formsets_with_inlines() instead.",
RemovedInDjango19Warning
)
if formsets:
zipped = zip(formsets, self.get_inline_instances(request, None))
for formset, inline in zipped:
yield formset, inline
else:
for inline in self.get_inline_instances(request, obj):
yield inline.get_formset(request, obj), inline
def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True):
return self.paginator(queryset, per_page, orphans, allow_empty_first_page)
def log_addition(self, request, object):
"""
Log that an object has been successfully added.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, ADDITION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=ADDITION
)
def log_change(self, request, object, message):
"""
Log that an object has been successfully changed.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, CHANGE
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=CHANGE,
change_message=message
)
def log_deletion(self, request, object, object_repr):
"""
Log that an object will be deleted. Note that this method must be
called before the deletion.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, DELETION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=object_repr,
action_flag=DELETION
)
def action_checkbox(self, obj):
"""
A list_display column containing a checkbox widget.
"""
return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, force_text(obj.pk))
action_checkbox.short_description = mark_safe('<input type="checkbox" id="action-toggle" />')
action_checkbox.allow_tags = True
def get_actions(self, request):
"""
Return a dictionary mapping the names of all actions for this
ModelAdmin to a tuple of (callable, name, description) for each action.
"""
# If self.actions is explicitly set to None that means that we don't
# want *any* actions enabled on this page.
from django.contrib.admin.views.main import _is_changelist_popup
if self.actions is None or _is_changelist_popup(request):
return OrderedDict()
actions = []
# Gather actions from the admin site first
for (name, func) in self.admin_site.actions:
description = getattr(func, 'short_description', name.replace('_', ' '))
actions.append((func, name, description))
# Then gather them from the model admin and all parent classes,
# starting with self and working back up.
for klass in self.__class__.mro()[::-1]:
class_actions = getattr(klass, 'actions', [])
# Avoid trying to iterate over None
if not class_actions:
continue
actions.extend(self.get_action(action) for action in class_actions)
# get_action might have returned None, so filter any of those out.
actions = filter(None, actions)
# Convert the actions into an OrderedDict keyed by name.
actions = OrderedDict(
(name, (func, name, desc))
for func, name, desc in actions
)
return actions
def get_action_choices(self, request, default_choices=BLANK_CHOICE_DASH):
"""
Return a list of choices for use in a form object. Each choice is a
tuple (name, description).
"""
choices = [] + default_choices
for func, name, description in six.itervalues(self.get_actions(request)):
choice = (name, description % model_format_dict(self.opts))
choices.append(choice)
return choices
def get_action(self, action):
"""
Return a given action from a parameter, which can either be a callable,
or the name of a method on the ModelAdmin. Return is a tuple of
(callable, name, description).
"""
# If the action is a callable, just use it.
if callable(action):
func = action
action = action.__name__
# Next, look for a method. Grab it off self.__class__ to get an unbound
# method instead of a bound one; this ensures that the calling
# conventions are the same for functions and methods.
elif hasattr(self.__class__, action):
func = getattr(self.__class__, action)
# Finally, look for a named method on the admin site
else:
try:
func = self.admin_site.get_action(action)
except KeyError:
return None
if hasattr(func, 'short_description'):
description = func.short_description
else:
description = capfirst(action.replace('_', ' '))
return func, action, description
def get_list_display(self, request):
"""
Return a sequence containing the fields to be displayed on the
changelist.
"""
return self.list_display
def get_list_display_links(self, request, list_display):
"""
Return a sequence containing the fields to be displayed as links
on the changelist. The list_display parameter is the list of fields
returned by get_list_display().
"""
if self.list_display_links or self.list_display_links is None or not list_display:
return self.list_display_links
else:
# Use only the first item in list_display as link
return list(list_display)[:1]
def get_list_filter(self, request):
"""
Returns a sequence containing the fields to be displayed as filters in
the right sidebar of the changelist page.
"""
return self.list_filter
def get_search_fields(self, request):
"""
Returns a sequence containing the fields to be searched whenever
somebody submits a search query.
"""
return self.search_fields
def get_search_results(self, request, queryset, search_term):
"""
Returns a tuple containing a queryset to implement the search,
and a boolean indicating if the results may contain duplicates.
"""
# Apply keyword searches.
def construct_search(field_name):
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
use_distinct = False
search_fields = self.get_search_fields(request)
if search_fields and search_term:
orm_lookups = [construct_search(str(search_field))
for search_field in search_fields]
for bit in search_term.split():
or_queries = [models.Q(**{orm_lookup: bit})
for orm_lookup in orm_lookups]
queryset = queryset.filter(reduce(operator.or_, or_queries))
if not use_distinct:
for search_spec in orm_lookups:
if lookup_needs_distinct(self.opts, search_spec):
use_distinct = True
break
return queryset, use_distinct
def get_preserved_filters(self, request):
"""
Returns the preserved filters querystring.
"""
match = request.resolver_match
if self.preserve_filters and match:
opts = self.model._meta
current_url = '%s:%s' % (match.app_name, match.url_name)
changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
if current_url == changelist_url:
preserved_filters = request.GET.urlencode()
else:
preserved_filters = request.GET.get('_changelist_filters')
if preserved_filters:
return urlencode({'_changelist_filters': preserved_filters})
return ''
def construct_change_message(self, request, form, formsets):
"""
Construct a change message from a changed object.
"""
change_message = []
if form.changed_data:
change_message.append(_('Changed %s.') % get_text_list(form.changed_data, _('and')))
if formsets:
for formset in formsets:
for added_object in formset.new_objects:
change_message.append(_('Added %(name)s "%(object)s".')
% {'name': force_text(added_object._meta.verbose_name),
'object': force_text(added_object)})
for changed_object, changed_fields in formset.changed_objects:
change_message.append(_('Changed %(list)s for %(name)s "%(object)s".')
% {'list': get_text_list(changed_fields, _('and')),
'name': force_text(changed_object._meta.verbose_name),
'object': force_text(changed_object)})
for deleted_object in formset.deleted_objects:
change_message.append(_('Deleted %(name)s "%(object)s".')
% {'name': force_text(deleted_object._meta.verbose_name),
'object': force_text(deleted_object)})
change_message = ' '.join(change_message)
return change_message or _('No fields changed.')
def message_user(self, request, message, level=messages.INFO, extra_tags='',
fail_silently=False):
"""
Send a message to the user. The default implementation
posts a message using the django.contrib.messages backend.
Exposes almost the same API as messages.add_message(), but accepts the
positional arguments in a different order to maintain backwards
compatibility. For convenience, it accepts the `level` argument as
a string rather than the usual level number.
"""
if not isinstance(level, int):
# attempt to get the level if passed a string
try:
level = getattr(messages.constants, level.upper())
except AttributeError:
levels = messages.constants.DEFAULT_TAGS.values()
levels_repr = ', '.join('`%s`' % l for l in levels)
raise ValueError('Bad message level string: `%s`. '
'Possible values are: %s' % (level, levels_repr))
messages.add_message(request, level, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def save_form(self, request, form, change):
"""
Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added.
"""
return form.save(commit=False)
def save_model(self, request, obj, form, change):
"""
Given a model instance save it to the database.
"""
obj.save()
def delete_model(self, request, obj):
"""
Given a model instance delete it from the database.
"""
obj.delete()
def save_formset(self, request, form, formset, change):
"""
Given an inline formset save it to the database.
"""
formset.save()
def save_related(self, request, form, formsets, change):
"""
Given the ``HttpRequest``, the parent ``ModelForm`` instance, the
list of inline formsets and a boolean value based on whether the
parent is being added or changed, save the related objects to the
database. Note that at this point save_form() and save_model() have
already been called.
"""
form.save_m2m()
for formset in formsets:
self.save_formset(request, form, formset, change=change)
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
opts = self.model._meta
app_label = opts.app_label
preserved_filters = self.get_preserved_filters(request)
form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url)
view_on_site_url = self.get_view_on_site_url(obj)
context.update({
'add': add,
'change': change,
'has_add_permission': self.has_add_permission(request),
'has_change_permission': self.has_change_permission(request, obj),
'has_delete_permission': self.has_delete_permission(request, obj),
'has_file_field': True, # FIXME - this should check if form or formsets have a FileField,
'has_absolute_url': view_on_site_url is not None,
'absolute_url': view_on_site_url,
'form_url': form_url,
'opts': opts,
'content_type_id': get_content_type_for_model(self.model).pk,
'save_as': self.save_as,
'save_on_top': self.save_on_top,
'to_field_var': TO_FIELD_VAR,
'is_popup_var': IS_POPUP_VAR,
'app_label': app_label,
})
if add and self.add_form_template is not None:
form_template = self.add_form_template
else:
form_template = self.change_form_template
return TemplateResponse(request, form_template or [
"admin/%s/%s/change_form.html" % (app_label, opts.model_name),
"admin/%s/change_form.html" % app_label,
"admin/change_form.html"
], context, current_app=self.admin_site.name)
def response_add(self, request, obj, post_url_continue=None):
"""
Determines the HttpResponse for the add_view stage.
"""
opts = obj._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
msg_dict = {'name': force_text(opts.verbose_name), 'obj': force_text(obj)}
# Here, we distinguish between different save types by checking for
# the presence of keys in request.POST.
if IS_POPUP_VAR in request.POST:
to_field = request.POST.get(TO_FIELD_VAR)
if to_field:
attr = str(to_field)
else:
attr = obj._meta.pk.attname
value = obj.serializable_value(attr)
return SimpleTemplateResponse('admin/popup_response.html', {
'pk_value': escape(pk_value), # for possible backwards-compatibility
'value': escape(value),
'obj': escapejs(obj)
})
elif "_continue" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
if post_url_continue is None:
post_url_continue = reverse('admin:%s_%s_change' %
(opts.app_label, opts.model_name),
args=(quote(pk_value),),
current_app=self.admin_site.name)
post_url_continue = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url_continue)
return HttpResponseRedirect(post_url_continue)
elif "_addanother" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may add another %(name)s below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = _('The %(name)s "%(obj)s" was added successfully.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_add(request, obj)
def response_change(self, request, obj):
"""
Determines the HttpResponse for the change_view stage.
"""
opts = self.model._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
msg_dict = {'name': force_text(opts.verbose_name), 'obj': force_text(obj)}
if "_continue" in request.POST:
msg = _('The %(name)s "%(obj)s" was changed successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_saveasnew" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_change' %
(opts.app_label, opts.model_name),
args=(pk_value,),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_addanother" in request.POST:
msg = _('The %(name)s "%(obj)s" was changed successfully. You may add another %(name)s below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_add' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = _('The %(name)s "%(obj)s" was changed successfully.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_change(request, obj)
def response_post_save_add(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when adding a new object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_post_save_change(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when editing an existing object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_action(self, request, queryset):
"""
Handle an admin action. This is called if a request is POSTed to the
changelist; it returns an HttpResponse if the action was handled, and
None otherwise.
"""
# There can be multiple action forms on the page (at the top
# and bottom of the change list, for example). Get the action
# whose button was pushed.
try:
action_index = int(request.POST.get('index', 0))
except ValueError:
action_index = 0
# Construct the action form.
data = request.POST.copy()
data.pop(helpers.ACTION_CHECKBOX_NAME, None)
data.pop("index", None)
# Use the action whose button was pushed
try:
data.update({'action': data.getlist('action')[action_index]})
except IndexError:
# If we didn't get an action from the chosen form that's invalid
# POST data, so by deleting action it'll fail the validation check
# below. So no need to do anything here
pass
action_form = self.action_form(data, auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
# If the form's valid we can handle the action.
if action_form.is_valid():
action = action_form.cleaned_data['action']
select_across = action_form.cleaned_data['select_across']
func = self.get_actions(request)[action][0]
# Get the list of selected PKs. If nothing's selected, we can't
# perform an action on it, so bail. Except we want to perform
# the action explicitly on all objects.
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
if not selected and not select_across:
# Reminder that something needs to be selected or nothing will happen
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
return None
if not select_across:
# Perform the action only on the selected objects
queryset = queryset.filter(pk__in=selected)
response = func(self, request, queryset)
# Actions may return an HttpResponse-like object, which will be
# used as the response from the POST. If not, we'll be a good
# little HTTP citizen and redirect back to the changelist page.
if isinstance(response, HttpResponseBase):
return response
else:
return HttpResponseRedirect(request.get_full_path())
else:
msg = _("No action selected.")
self.message_user(request, msg, messages.WARNING)
return None
def response_delete(self, request, obj_display):
"""
Determines the HttpResponse for the delete_view stage.
"""
opts = self.model._meta
self.message_user(request,
_('The %(name)s "%(obj)s" was deleted successfully.') % {
'name': force_text(opts.verbose_name),
'obj': force_text(obj_display)
}, messages.SUCCESS)
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters(
{'preserved_filters': preserved_filters, 'opts': opts}, post_url
)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def render_delete_form(self, request, context):
opts = self.model._meta
app_label = opts.app_label
return TemplateResponse(request,
self.delete_confirmation_template or [
"admin/{}/{}/delete_confirmation.html".format(app_label, opts.model_name),
"admin/{}/delete_confirmation.html".format(app_label),
"admin/delete_confirmation.html"
], context, current_app=self.admin_site.name)
def get_inline_formsets(self, request, formsets, inline_instances,
obj=None):
inline_admin_formsets = []
for inline, formset in zip(inline_instances, formsets):
fieldsets = list(inline.get_fieldsets(request, obj))
readonly = list(inline.get_readonly_fields(request, obj))
prepopulated = dict(inline.get_prepopulated_fields(request, obj))
inline_admin_formset = helpers.InlineAdminFormSet(inline, formset,
fieldsets, prepopulated, readonly, model_admin=self)
inline_admin_formsets.append(inline_admin_formset)
return inline_admin_formsets
def get_changeform_initial_data(self, request):
"""
Get the initial form data.
Unless overridden, this populates from the GET params.
"""
initial = dict(request.GET.items())
for k in initial:
try:
f = self.model._meta.get_field(k)
except models.FieldDoesNotExist:
continue
# We have to special-case M2Ms as a list of comma-separated PKs.
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(",")
return initial
@csrf_protect_m
@transaction.atomic
def changeform_view(self, request, object_id=None, form_url='', extra_context=None):
to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR))
if to_field and not self.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field)
model = self.model
opts = model._meta
add = object_id is None
if add:
if not self.has_add_permission(request):
raise PermissionDenied
obj = None
else:
obj = self.get_object(request, unquote(object_id))
if not self.has_change_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name), 'key': escape(object_id)})
if request.method == 'POST' and "_saveasnew" in request.POST:
return self.add_view(request, form_url=reverse('admin:%s_%s_add' % (
opts.app_label, opts.model_name),
current_app=self.admin_site.name))
ModelForm = self.get_form(request, obj)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES, instance=obj)
if form.is_valid():
form_validated = True
new_object = self.save_form(request, form, change=not add)
else:
form_validated = False
new_object = form.instance
formsets, inline_instances = self._create_formsets(request, new_object)
if all_valid(formsets) and form_validated:
self.save_model(request, new_object, form, not add)
self.save_related(request, form, formsets, not add)
if add:
self.log_addition(request, new_object)
return self.response_add(request, new_object)
else:
change_message = self.construct_change_message(request, form, formsets)
self.log_change(request, new_object, change_message)
return self.response_change(request, new_object)
else:
if add:
initial = self.get_changeform_initial_data(request)
form = ModelForm(initial=initial)
formsets, inline_instances = self._create_formsets(request, self.model())
else:
form = ModelForm(instance=obj)
formsets, inline_instances = self._create_formsets(request, obj)
adminForm = helpers.AdminForm(
form,
list(self.get_fieldsets(request, obj)),
self.get_prepopulated_fields(request, obj),
self.get_readonly_fields(request, obj),
model_admin=self)
media = self.media + adminForm.media
inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj)
for inline_formset in inline_formsets:
media = media + inline_formset.media
context = dict(self.admin_site.each_context(),
title=(_('Add %s') if add else _('Change %s')) % force_text(opts.verbose_name),
adminform=adminForm,
object_id=object_id,
original=obj,
is_popup=(IS_POPUP_VAR in request.POST or
IS_POPUP_VAR in request.GET),
to_field=to_field,
media=media,
inline_admin_formsets=inline_formsets,
errors=helpers.AdminErrorList(form, formsets),
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url)
def add_view(self, request, form_url='', extra_context=None):
return self.changeform_view(request, None, form_url, extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
return self.changeform_view(request, object_id, form_url, extra_context)
@csrf_protect_m
def changelist_view(self, request, extra_context=None):
"""
The 'change list' admin view for this model.
"""
from django.contrib.admin.views.main import ERROR_FLAG
opts = self.model._meta
app_label = opts.app_label
if not self.has_change_permission(request, None):
raise PermissionDenied
list_display = self.get_list_display(request)
list_display_links = self.get_list_display_links(request, list_display)
list_filter = self.get_list_filter(request)
search_fields = self.get_search_fields(request)
# Check actions to see if any are available on this changelist
actions = self.get_actions(request)
if actions:
# Add the action checkboxes if there are any actions available.
list_display = ['action_checkbox'] + list(list_display)
ChangeList = self.get_changelist(request)
try:
cl = ChangeList(request, self.model, list_display,
list_display_links, list_filter, self.date_hierarchy,
search_fields, self.list_select_related, self.list_per_page,
self.list_max_show_all, self.list_editable, self)
except IncorrectLookupParameters:
# Wacky lookup parameters were given, so redirect to the main
# changelist page, without parameters, and pass an 'invalid=1'
# parameter via the query string. If wacky parameters were given
# and the 'invalid=1' parameter was already in the query string,
# something is screwed up with the database, so display an error
# page.
if ERROR_FLAG in request.GET.keys():
return SimpleTemplateResponse('admin/invalid_setup.html', {
'title': _('Database error'),
})
return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1')
# If the request was POSTed, this might be a bulk action or a bulk
# edit. Try to look up an action or confirmation first, but if this
# isn't an action the POST will fall through to the bulk edit check,
# below.
action_failed = False
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
# Actions with no confirmation
if (actions and request.method == 'POST' and
'index' in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
else:
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
action_failed = True
# Actions with confirmation
if (actions and request.method == 'POST' and
helpers.ACTION_CHECKBOX_NAME in request.POST and
'index' not in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
# If we're allowing changelist editing, we need to construct a formset
# for the changelist given all the fields to be edited. Then we'll
# use the formset to validate/process POSTed data.
formset = cl.formset = None
# Handle POSTed bulk-edit data.
if (request.method == "POST" and cl.list_editable and
'_save' in request.POST and not action_failed):
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(request.POST, request.FILES, queryset=cl.result_list)
if formset.is_valid():
changecount = 0
for form in formset.forms:
if form.has_changed():
obj = self.save_form(request, form, change=True)
self.save_model(request, obj, form, change=True)
self.save_related(request, form, formsets=[], change=True)
change_msg = self.construct_change_message(request, form, None)
self.log_change(request, obj, change_msg)
changecount += 1
if changecount:
if changecount == 1:
name = force_text(opts.verbose_name)
else:
name = force_text(opts.verbose_name_plural)
msg = ungettext("%(count)s %(name)s was changed successfully.",
"%(count)s %(name)s were changed successfully.",
changecount) % {'count': changecount,
'name': name,
'obj': force_text(obj)}
self.message_user(request, msg, messages.SUCCESS)
return HttpResponseRedirect(request.get_full_path())
# Handle GET -- construct a formset for display.
elif cl.list_editable:
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(queryset=cl.result_list)
# Build the list of media to be used by the formset.
if formset:
media = self.media + formset.media
else:
media = self.media
# Build the action form and populate it with available actions.
if actions:
action_form = self.action_form(auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
else:
action_form = None
selection_note_all = ungettext('%(total_count)s selected',
'All %(total_count)s selected', cl.result_count)
context = dict(
self.admin_site.each_context(),
module_name=force_text(opts.verbose_name_plural),
selection_note=_('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)},
selection_note_all=selection_note_all % {'total_count': cl.result_count},
title=cl.title,
is_popup=cl.is_popup,
to_field=cl.to_field,
cl=cl,
media=media,
has_add_permission=self.has_add_permission(request),
opts=cl.opts,
action_form=action_form,
actions_on_top=self.actions_on_top,
actions_on_bottom=self.actions_on_bottom,
actions_selection_counter=self.actions_selection_counter,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
return TemplateResponse(request, self.change_list_template or [
'admin/%s/%s/change_list.html' % (app_label, opts.model_name),
'admin/%s/change_list.html' % app_label,
'admin/change_list.html'
], context, current_app=self.admin_site.name)
@csrf_protect_m
@transaction.atomic
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
obj = self.get_object(request, unquote(object_id))
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') %
{'name': force_text(opts.verbose_name), 'key': escape(object_id)}
)
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_text(obj)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
return self.response_delete(request, obj_display)
object_name = force_text(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = dict(
self.admin_site.each_context(),
title=title,
object_name=object_name,
object=obj,
deleted_objects=deleted_objects,
perms_lacking=perms_needed,
protected=protected,
opts=opts,
app_label=app_label,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
return self.render_delete_form(request, context)
def history_view(self, request, object_id, extra_context=None):
"The 'history' admin view for this model."
from django.contrib.admin.models import LogEntry
# First check if the user can see this history.
model = self.model
obj = get_object_or_404(self.get_queryset(request), pk=unquote(object_id))
if not self.has_change_permission(request, obj):
raise PermissionDenied
# Then get the history for this object.
opts = model._meta
app_label = opts.app_label
action_list = LogEntry.objects.filter(
object_id=unquote(object_id),
content_type=get_content_type_for_model(model)
).select_related().order_by('action_time')
context = dict(self.admin_site.each_context(),
title=_('Change history: %s') % force_text(obj),
action_list=action_list,
module_name=capfirst(force_text(opts.verbose_name_plural)),
object=obj,
opts=opts,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
return TemplateResponse(request, self.object_history_template or [
"admin/%s/%s/object_history.html" % (app_label, opts.model_name),
"admin/%s/object_history.html" % app_label,
"admin/object_history.html"
], context, current_app=self.admin_site.name)
def _create_formsets(self, request, obj):
"Helper function to generate formsets for add/change_view."
formsets = []
inline_instances = []
prefixes = {}
get_formsets_args = [request]
if obj.pk:
get_formsets_args.append(obj)
for FormSet, inline in self.get_formsets_with_inlines(*get_formsets_args):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset_params = {
'instance': obj,
'prefix': prefix,
'queryset': inline.get_queryset(request),
}
if request.method == 'POST':
formset_params.update({
'data': request.POST,
'files': request.FILES,
'save_as_new': '_saveasnew' in request.POST
})
formsets.append(FormSet(**formset_params))
inline_instances.append(inline)
return formsets, inline_instances
class InlineModelAdmin(BaseModelAdmin):
"""
Options for inline editing of ``model`` instances.
Provide ``fk_name`` to specify the attribute name of the ``ForeignKey``
from ``model`` to its parent. This is required if ``model`` has more than
one ``ForeignKey`` to its parent.
"""
model = None
fk_name = None
formset = BaseInlineFormSet
extra = 3
min_num = None
max_num = None
template = None
verbose_name = None
verbose_name_plural = None
can_delete = True
checks_class = InlineModelAdminChecks
def __init__(self, parent_model, admin_site):
self.admin_site = admin_site
self.parent_model = parent_model
self.opts = self.model._meta
super(InlineModelAdmin, self).__init__()
if self.verbose_name is None:
self.verbose_name = self.model._meta.verbose_name
if self.verbose_name_plural is None:
self.verbose_name_plural = self.model._meta.verbose_name_plural
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = ['jquery%s.js' % extra, 'jquery.init.js', 'inlines%s.js' % extra]
if self.prepopulated_fields:
js.extend(['urlify.js', 'prepopulate%s.js' % extra])
if self.filter_vertical or self.filter_horizontal:
js.extend(['SelectBox.js', 'SelectFilter2.js'])
return forms.Media(js=[static('admin/js/%s' % url) for url in js])
def get_extra(self, request, obj=None, **kwargs):
"""Hook for customizing the number of extra inline forms."""
return self.extra
def get_min_num(self, request, obj=None, **kwargs):
"""Hook for customizing the min number of inline forms."""
return self.min_num
def get_max_num(self, request, obj=None, **kwargs):
"""Hook for customizing the max number of extra inline forms."""
return self.max_num
def get_formset(self, request, obj=None, **kwargs):
"""Returns a BaseInlineFormSet class for use in admin add/change views."""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# InlineModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# If exclude is an empty list we use None, since that's the actual
# default.
exclude = exclude or None
can_delete = self.can_delete and self.has_delete_permission(request, obj)
defaults = {
"form": self.form,
"formset": self.formset,
"fk_name": self.fk_name,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
"extra": self.get_extra(request, obj, **kwargs),
"min_num": self.get_min_num(request, obj, **kwargs),
"max_num": self.get_max_num(request, obj, **kwargs),
"can_delete": can_delete,
}
defaults.update(kwargs)
base_model_form = defaults['form']
class DeleteProtectedModelForm(base_model_form):
def hand_clean_DELETE(self):
"""
We don't validate the 'DELETE' field itself because on
templates it's not rendered using the field information, but
just using a generic "deletion_field" of the InlineModelAdmin.
"""
if self.cleaned_data.get(DELETION_FIELD_NAME, False):
using = router.db_for_write(self._meta.model)
collector = NestedObjects(using=using)
collector.collect([self.instance])
if collector.protected:
objs = []
for p in collector.protected:
objs.append(
# Translators: Model verbose name and instance representation, suitable to be an item in a list
_('%(class_name)s %(instance)s') % {
'class_name': p._meta.verbose_name,
'instance': p}
)
params = {'class_name': self._meta.model._meta.verbose_name,
'instance': self.instance,
'related_objects': get_text_list(objs, _('and'))}
msg = _("Deleting %(class_name)s %(instance)s would require "
"deleting the following protected related objects: "
"%(related_objects)s")
raise ValidationError(msg, code='deleting_protected', params=params)
def is_valid(self):
result = super(DeleteProtectedModelForm, self).is_valid()
self.hand_clean_DELETE()
return result
defaults['form'] = DeleteProtectedModelForm
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
return inlineformset_factory(self.parent_model, self.model, **defaults)
def get_fields(self, request, obj=None):
if self.fields:
return self.fields
form = self.get_formset(request, obj, fields=None).form
return list(form.base_fields) + list(self.get_readonly_fields(request, obj))
def get_queryset(self, request):
queryset = super(InlineModelAdmin, self).get_queryset(request)
if not self.has_change_permission(request):
queryset = queryset.none()
return queryset
def has_add_permission(self, request):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request)
return super(InlineModelAdmin, self).has_add_permission(request)
def has_change_permission(self, request, obj=None):
opts = self.opts
if opts.auto_created:
# The model was auto-created as intermediary for a
# ManyToMany-relationship, find the target model
for field in opts.fields:
if field.rel and field.rel.to != self.parent_model:
opts = field.rel.to._meta
break
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request, obj)
return super(InlineModelAdmin, self).has_delete_permission(request, obj)
class StackedInline(InlineModelAdmin):
template = 'admin/edit_inline/stacked.html'
class TabularInline(InlineModelAdmin):
template = 'admin/edit_inline/tabular.html'
| boooka/GeoPowerOff | venv/lib/python2.7/site-packages/django/contrib/admin/options.py | Python | apache-2.0 | 80,797 |
#!/usr/bin/env python
# Copyright (c) 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Valid certificate chain where the target certificate contains a public key
with a 512-bit modulus (weak)."""
import sys
sys.path += ['../..']
import gencerts
# Self-signed root certificate.
root = gencerts.create_self_signed_root_certificate('Root')
# Intermediate
intermediate = gencerts.create_intermediate_certificate('Intermediate', root)
# Target certificate.
target = gencerts.create_end_entity_certificate('Target', intermediate)
target.set_key(gencerts.get_or_generate_rsa_key(
512, gencerts.create_key_path(target.name)))
chain = [target, intermediate, root]
gencerts.write_chain(__doc__, chain, 'chain.pem')
| nwjs/chromium.src | net/data/verify_certificate_chain_unittest/target-has-512bit-rsa-key/generate-chains.py | Python | bsd-3-clause | 820 |
from sys import *
from pdflib_py import *
p = PDF_new()
PDF_open_file(p, "gradients.pdf")
PDF_set_parameter(p, "usercoordinates", "true")
PDF_set_value(p, "compress", 0)
PDF_set_info(p, "Author", "pdflib")
PDF_set_info(p, "Creator", "pdflib_py")
PDF_set_info(p, "Title", "gradients")
width = 1024
height = 800
PDF_begin_page(p, width, height)
type,x,params = "radial",0,"r0=0 r1=320"
y = 0
PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.0, 1.0)
shading = PDF_shading(p, type, 160+x,160+y, 160+x, 160+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial
pattern = PDF_shading_pattern(p,shading,"")
PDF_setcolor(p, "fill", "pattern", pattern,0,0,0)
PDF_moveto(p, x,y)
PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320)
PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320)
PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y)
PDF_curveto(p, x+240, y+80, x+80, y+80, x, y)
PDF_fill(p)
PDF_moveto(p, x,y)
PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320)
PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320)
PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y)
PDF_curveto(p, x+240, y+80, x+80, y+80, x, y)
PDF_stroke(p)
type,x,params = "axial",200,""
y = 0
PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0)
shading = PDF_shading(p, type, 0+x,0+y, 320+x,320+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial
pattern = PDF_shading_pattern(p,shading,"")
PDF_setcolor(p, "fill", "pattern", pattern,0,0,0)
PDF_moveto(p, x,y)
PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320)
PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320)
PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y)
PDF_curveto(p, x+240, y+80, x+80, y+80, x, y)
PDF_fill(p)
PDF_moveto(p, x,y)
PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320)
PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320)
PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y)
PDF_curveto(p, x+240, y+80, x+80, y+80, x, y)
PDF_stroke(p)
type,x,params = "radial",500,"r0=0 r1=220"
y = 0
PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0)
shading = PDF_shading(p, type, 120+x, 340+y, 120+x, 340+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial
pattern = PDF_shading_pattern(p,shading,"")
PDF_setcolor(p, "fill", "pattern", pattern,0,0,0)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+640)
PDF_lineto(p, x+160, y+640)
PDF_lineto(p, x+160, y+80)
PDF_lineto(p, x+80, y+80)
PDF_fill(p)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+640)
PDF_lineto(p, x+160, y+640)
PDF_lineto(p, x+160, y+80)
PDF_lineto(p, x+80, y+80)
PDF_stroke(p)
type,x,params = "axial",600,""
y = 0
PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0)
shading = PDF_shading(p, type, 80+x, 80+y, 80+x, 640+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial
pattern = PDF_shading_pattern(p,shading,"")
PDF_setcolor(p, "fill", "pattern", pattern,0,0,0)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+640)
PDF_lineto(p, x+160, y+640)
PDF_lineto(p, x+160, y+80)
PDF_lineto(p, x+80, y+80)
PDF_fill(p)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+640)
PDF_lineto(p, x+160, y+640)
PDF_lineto(p, x+160, y+80)
PDF_lineto(p, x+80, y+80)
PDF_stroke(p)
type,x,params = "axial",50,""
y = 300
PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0)
shading = PDF_shading(p, type, 80+x, 80+y, 400+x, 80+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial
pattern = PDF_shading_pattern(p,shading,"")
PDF_setcolor(p, "fill", "pattern", pattern,0,0,0)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+160)
PDF_lineto(p, x+400, y+160)
PDF_lineto(p, x+400, y+80)
PDF_lineto(p, x+80, y+80)
PDF_fill(p)
PDF_moveto(p, x+80, y+80)
PDF_lineto(p, x+80, y+160)
PDF_lineto(p, x+400, y+160)
PDF_lineto(p, x+400, y+80)
PDF_lineto(p, x+80, y+80)
PDF_stroke(p)
PDF_end_page(p)
PDF_close(p)
PDF_delete(p);
| brad/swftools | spec/gradients.py | Python | gpl-2.0 | 3,650 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
#
# Copyright (C) 2017 Lenovo, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Module to Reset to factory settings of Lenovo Switches
# Lenovo Networking
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cnos_factory
author: "Anil Kumar Muraleedharan (@amuraleedhar)"
short_description: Reset the switch's startup configuration to default (factory) on devices running Lenovo CNOS
description:
- This module allows you to reset a switch's startup configuration. The method provides a way to reset the
startup configuration to its factory settings. This is helpful when you want to move the switch to another
topology as a new network device.
This module uses SSH to manage network device configuration.
The results of the operation can be viewed in results directory.
For more information about this module from Lenovo and customizing it usage for your
use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_factory.html)
version_added: "2.3"
extends_documentation_fragment: cnos
options: {}
'''
EXAMPLES = '''
Tasks : The following are examples of using the module cnos_reload. These are written in the main.yml file of the tasks directory.
---
- name: Test Reset to factory
cnos_factory:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}"
outputfile: "./results/test_factory_{{ inventory_hostname }}_output.txt"
'''
RETURN = '''
msg:
description: Success or failure message
returned: always
type: string
sample: "Switch Startup Config is Reset to factory settings"
'''
import sys
try:
import paramiko
HAS_PARAMIKO = True
except ImportError:
HAS_PARAMIKO = False
import time
import socket
import array
import json
import time
import re
try:
from ansible.module_utils.network.cnos import cnos
HAS_LIB = True
except:
HAS_LIB = False
from ansible.module_utils.basic import AnsibleModule
from collections import defaultdict
def main():
module = AnsibleModule(
argument_spec=dict(
outputfile=dict(required=True),
host=dict(required=True),
username=dict(required=True),
password=dict(required=True, no_log=True),
enablePassword=dict(required=False, no_log=True),
deviceType=dict(required=True),),
supports_check_mode=False)
username = module.params['username']
password = module.params['password']
enablePassword = module.params['enablePassword']
cliCommand = "save erase \n"
outputfile = module.params['outputfile']
hostIP = module.params['host']
deviceType = module.params['deviceType']
output = ""
if not HAS_PARAMIKO:
module.fail_json(msg='paramiko is required for this module')
# Create instance of SSHClient object
remote_conn_pre = paramiko.SSHClient()
# Automatically add untrusted hosts (make sure okay for security policy in your environment)
remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# initiate SSH connection with the switch
remote_conn_pre.connect(hostIP, username=username, password=password)
time.sleep(2)
# Use invoke_shell to establish an 'interactive session'
remote_conn = remote_conn_pre.invoke_shell()
time.sleep(2)
# Enable and enter configure terminal then send command
output = output + cnos.waitForDeviceResponse("\n", ">", 2, remote_conn)
output = output + cnos.enterEnableModeForDevice(enablePassword, 3, remote_conn)
# Make terminal length = 0
output = output + cnos.waitForDeviceResponse("terminal length 0\n", "#", 2, remote_conn)
# cnos.debugOutput(cliCommand)
# Send the CLi command
output = output + cnos.waitForDeviceResponse(cliCommand, "[n]", 2, remote_conn)
output = output + cnos.waitForDeviceResponse("y" + "\n", "#", 2, remote_conn)
# Save it into the file
file = open(outputfile, "a")
file.write(output)
file.close()
errorMsg = cnos.checkOutputForError(output)
if(errorMsg is None):
module.exit_json(changed=True, msg="Switch Startup Config is Reset to factory settings ")
else:
module.fail_json(msg=errorMsg)
if __name__ == '__main__':
main()
| hryamzik/ansible | lib/ansible/modules/network/cnos/cnos_factory.py | Python | gpl-3.0 | 5,299 |
"""Support for MySensors covers."""
from homeassistant.components import mysensors
from homeassistant.components.cover import ATTR_POSITION, DOMAIN, CoverDevice
from homeassistant.const import STATE_OFF, STATE_ON
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up the mysensors platform for covers."""
mysensors.setup_mysensors_platform(
hass, DOMAIN, discovery_info, MySensorsCover,
async_add_entities=async_add_entities)
class MySensorsCover(mysensors.device.MySensorsEntity, CoverDevice):
"""Representation of the value of a MySensors Cover child node."""
@property
def assumed_state(self):
"""Return True if unable to access real state of entity."""
return self.gateway.optimistic
@property
def is_closed(self):
"""Return True if cover is closed."""
set_req = self.gateway.const.SetReq
if set_req.V_DIMMER in self._values:
return self._values.get(set_req.V_DIMMER) == 0
return self._values.get(set_req.V_LIGHT) == STATE_OFF
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
set_req = self.gateway.const.SetReq
return self._values.get(set_req.V_DIMMER)
async def async_open_cover(self, **kwargs):
"""Move the cover up."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_UP, 1)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
if set_req.V_DIMMER in self._values:
self._values[set_req.V_DIMMER] = 100
else:
self._values[set_req.V_LIGHT] = STATE_ON
self.async_schedule_update_ha_state()
async def async_close_cover(self, **kwargs):
"""Move the cover down."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_DOWN, 1)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
if set_req.V_DIMMER in self._values:
self._values[set_req.V_DIMMER] = 0
else:
self._values[set_req.V_LIGHT] = STATE_OFF
self.async_schedule_update_ha_state()
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
position = kwargs.get(ATTR_POSITION)
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_DIMMER, position)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
self._values[set_req.V_DIMMER] = position
self.async_schedule_update_ha_state()
async def async_stop_cover(self, **kwargs):
"""Stop the device."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_STOP, 1)
| MartinHjelmare/home-assistant | homeassistant/components/mysensors/cover.py | Python | apache-2.0 | 3,195 |
"""Tests for Airly."""
| nkgilley/home-assistant | tests/components/airly/__init__.py | Python | apache-2.0 | 23 |
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_ip_access_list(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def ip_acl_ip_access_list_standard_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name = ET.SubElement(standard, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name = ET.SubElement(extended, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_protocol_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
protocol_type = ET.SubElement(seq, "protocol-type")
protocol_type.text = kwargs.pop('protocol_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport = ET.SubElement(seq, "sport")
sport.text = kwargs.pop('sport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_tcp = ET.SubElement(seq, "sport-number-eq-neq-tcp")
sport_number_eq_neq_tcp.text = kwargs.pop('sport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_tcp = ET.SubElement(seq, "sport-number-lt-tcp")
sport_number_lt_tcp.text = kwargs.pop('sport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_tcp = ET.SubElement(seq, "sport-number-gt-tcp")
sport_number_gt_tcp.text = kwargs.pop('sport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_udp = ET.SubElement(seq, "sport-number-eq-neq-udp")
sport_number_eq_neq_udp.text = kwargs.pop('sport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_udp = ET.SubElement(seq, "sport-number-lt-udp")
sport_number_lt_udp.text = kwargs.pop('sport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_udp = ET.SubElement(seq, "sport-number-gt-udp")
sport_number_gt_udp.text = kwargs.pop('sport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_tcp = ET.SubElement(seq, "sport-number-range-lower-tcp")
sport_number_range_lower_tcp.text = kwargs.pop('sport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_udp = ET.SubElement(seq, "sport-number-range-lower-udp")
sport_number_range_lower_udp.text = kwargs.pop('sport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_tcp = ET.SubElement(seq, "sport-number-range-higher-tcp")
sport_number_range_higher_tcp.text = kwargs.pop('sport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_udp = ET.SubElement(seq, "sport-number-range-higher-udp")
sport_number_range_higher_udp.text = kwargs.pop('sport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_any_dip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_any_dip = ET.SubElement(seq, "dst-host-any-dip")
dst_host_any_dip.text = kwargs.pop('dst_host_any_dip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_ip = ET.SubElement(seq, "dst-host-ip")
dst_host_ip.text = kwargs.pop('dst_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_mask = ET.SubElement(seq, "dst-mask")
dst_mask.text = kwargs.pop('dst_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport = ET.SubElement(seq, "dport")
dport.text = kwargs.pop('dport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_tcp = ET.SubElement(seq, "dport-number-eq-neq-tcp")
dport_number_eq_neq_tcp.text = kwargs.pop('dport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_tcp = ET.SubElement(seq, "dport-number-lt-tcp")
dport_number_lt_tcp.text = kwargs.pop('dport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_tcp = ET.SubElement(seq, "dport-number-gt-tcp")
dport_number_gt_tcp.text = kwargs.pop('dport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_udp = ET.SubElement(seq, "dport-number-eq-neq-udp")
dport_number_eq_neq_udp.text = kwargs.pop('dport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_udp = ET.SubElement(seq, "dport-number-lt-udp")
dport_number_lt_udp.text = kwargs.pop('dport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_udp = ET.SubElement(seq, "dport-number-gt-udp")
dport_number_gt_udp.text = kwargs.pop('dport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_tcp = ET.SubElement(seq, "dport-number-range-lower-tcp")
dport_number_range_lower_tcp.text = kwargs.pop('dport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_udp = ET.SubElement(seq, "dport-number-range-lower-udp")
dport_number_range_lower_udp.text = kwargs.pop('dport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_tcp = ET.SubElement(seq, "dport-number-range-higher-tcp")
dport_number_range_higher_tcp.text = kwargs.pop('dport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_udp = ET.SubElement(seq, "dport-number-range-higher-udp")
dport_number_range_higher_udp.text = kwargs.pop('dport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dscp = ET.SubElement(seq, "dscp")
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_urg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
urg = ET.SubElement(seq, "urg")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_ack(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
ack = ET.SubElement(seq, "ack")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_push(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
push = ET.SubElement(seq, "push")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_fin(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
fin = ET.SubElement(seq, "fin")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_rst(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
rst = ET.SubElement(seq, "rst")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sync(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sync = ET.SubElement(seq, "sync")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
vlan = ET.SubElement(seq, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name = ET.SubElement(standard, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name = ET.SubElement(extended, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_protocol_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
protocol_type = ET.SubElement(seq, "protocol-type")
protocol_type.text = kwargs.pop('protocol_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport = ET.SubElement(seq, "sport")
sport.text = kwargs.pop('sport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_tcp = ET.SubElement(seq, "sport-number-eq-neq-tcp")
sport_number_eq_neq_tcp.text = kwargs.pop('sport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_tcp = ET.SubElement(seq, "sport-number-lt-tcp")
sport_number_lt_tcp.text = kwargs.pop('sport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_tcp = ET.SubElement(seq, "sport-number-gt-tcp")
sport_number_gt_tcp.text = kwargs.pop('sport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_udp = ET.SubElement(seq, "sport-number-eq-neq-udp")
sport_number_eq_neq_udp.text = kwargs.pop('sport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_udp = ET.SubElement(seq, "sport-number-lt-udp")
sport_number_lt_udp.text = kwargs.pop('sport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_udp = ET.SubElement(seq, "sport-number-gt-udp")
sport_number_gt_udp.text = kwargs.pop('sport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_tcp = ET.SubElement(seq, "sport-number-range-lower-tcp")
sport_number_range_lower_tcp.text = kwargs.pop('sport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_udp = ET.SubElement(seq, "sport-number-range-lower-udp")
sport_number_range_lower_udp.text = kwargs.pop('sport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_tcp = ET.SubElement(seq, "sport-number-range-higher-tcp")
sport_number_range_higher_tcp.text = kwargs.pop('sport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_udp = ET.SubElement(seq, "sport-number-range-higher-udp")
sport_number_range_higher_udp.text = kwargs.pop('sport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_any_dip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_any_dip = ET.SubElement(seq, "dst-host-any-dip")
dst_host_any_dip.text = kwargs.pop('dst_host_any_dip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_ip = ET.SubElement(seq, "dst-host-ip")
dst_host_ip.text = kwargs.pop('dst_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_mask = ET.SubElement(seq, "dst-mask")
dst_mask.text = kwargs.pop('dst_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport = ET.SubElement(seq, "dport")
dport.text = kwargs.pop('dport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_tcp = ET.SubElement(seq, "dport-number-eq-neq-tcp")
dport_number_eq_neq_tcp.text = kwargs.pop('dport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_tcp = ET.SubElement(seq, "dport-number-lt-tcp")
dport_number_lt_tcp.text = kwargs.pop('dport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_tcp = ET.SubElement(seq, "dport-number-gt-tcp")
dport_number_gt_tcp.text = kwargs.pop('dport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_udp = ET.SubElement(seq, "dport-number-eq-neq-udp")
dport_number_eq_neq_udp.text = kwargs.pop('dport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_udp = ET.SubElement(seq, "dport-number-lt-udp")
dport_number_lt_udp.text = kwargs.pop('dport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_udp = ET.SubElement(seq, "dport-number-gt-udp")
dport_number_gt_udp.text = kwargs.pop('dport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_tcp = ET.SubElement(seq, "dport-number-range-lower-tcp")
dport_number_range_lower_tcp.text = kwargs.pop('dport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_udp = ET.SubElement(seq, "dport-number-range-lower-udp")
dport_number_range_lower_udp.text = kwargs.pop('dport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_tcp = ET.SubElement(seq, "dport-number-range-higher-tcp")
dport_number_range_higher_tcp.text = kwargs.pop('dport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_udp = ET.SubElement(seq, "dport-number-range-higher-udp")
dport_number_range_higher_udp.text = kwargs.pop('dport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dscp = ET.SubElement(seq, "dscp")
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_urg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
urg = ET.SubElement(seq, "urg")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_ack(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
ack = ET.SubElement(seq, "ack")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_push(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
push = ET.SubElement(seq, "push")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_fin(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
fin = ET.SubElement(seq, "fin")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_rst(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
rst = ET.SubElement(seq, "rst")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sync(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sync = ET.SubElement(seq, "sync")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
vlan = ET.SubElement(seq, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
| BRCDcomm/pynos | pynos/versions/ver_7/ver_7_1_0/yang/brocade_ip_access_list.py | Python | apache-2.0 | 93,629 |
def test_is_generator_alias():
from nose.util import is_generator, isgenerator
| DESHRAJ/fjord | vendor/packages/nose/unit_tests/test_issue_064.py | Python | bsd-3-clause | 83 |
# The absolute import feature is required so that we get the root celery
# module rather than `amo.celery`.
from __future__ import absolute_import
from inspect import isclass
from celery.datastructures import AttributeDict
from tower import ugettext_lazy as _
__all__ = ('LOG', 'LOG_BY_ID', 'LOG_KEEP',)
class _LOG(object):
action_class = None
class CREATE_ADDON(_LOG):
id = 1
action_class = 'add'
format = _(u'{addon} was created.')
keep = True
class EDIT_PROPERTIES(_LOG):
""" Expects: addon """
id = 2
action_class = 'edit'
format = _(u'{addon} properties edited.')
class EDIT_DESCRIPTIONS(_LOG):
id = 3
action_class = 'edit'
format = _(u'{addon} description edited.')
class EDIT_CATEGORIES(_LOG):
id = 4
action_class = 'edit'
format = _(u'Categories edited for {addon}.')
class ADD_USER_WITH_ROLE(_LOG):
id = 5
action_class = 'add'
format = _(u'{0.name} ({1}) added to {addon}.')
keep = True
class REMOVE_USER_WITH_ROLE(_LOG):
id = 6
action_class = 'delete'
# L10n: {0} is the user being removed, {1} is their role.
format = _(u'{0.name} ({1}) removed from {addon}.')
keep = True
class EDIT_CONTRIBUTIONS(_LOG):
id = 7
action_class = 'edit'
format = _(u'Contributions for {addon}.')
class USER_DISABLE(_LOG):
id = 8
format = _(u'{addon} disabled.')
keep = True
class USER_ENABLE(_LOG):
id = 9
format = _(u'{addon} enabled.')
keep = True
# TODO(davedash): Log these types when pages are present
class SET_PUBLIC_STATS(_LOG):
id = 10
format = _(u'Stats set public for {addon}.')
keep = True
# TODO(davedash): Log these types when pages are present
class UNSET_PUBLIC_STATS(_LOG):
id = 11
format = _(u'{addon} stats set to private.')
keep = True
class CHANGE_STATUS(_LOG):
id = 12
# L10n: {0} is the status
format = _(u'{addon} status changed to {0}.')
keep = True
class ADD_PREVIEW(_LOG):
id = 13
action_class = 'add'
format = _(u'Preview added to {addon}.')
class EDIT_PREVIEW(_LOG):
id = 14
action_class = 'edit'
format = _(u'Preview edited for {addon}.')
class DELETE_PREVIEW(_LOG):
id = 15
action_class = 'delete'
format = _(u'Preview deleted from {addon}.')
class ADD_VERSION(_LOG):
id = 16
action_class = 'add'
format = _(u'{version} added to {addon}.')
keep = True
class EDIT_VERSION(_LOG):
id = 17
action_class = 'edit'
format = _(u'{version} edited for {addon}.')
class DELETE_VERSION(_LOG):
id = 18
action_class = 'delete'
# Note, {0} is a string not a version since the version is deleted.
# L10n: {0} is the version number
format = _(u'Version {0} deleted from {addon}.')
keep = True
class ADD_FILE_TO_VERSION(_LOG):
id = 19
action_class = 'add'
format = _(u'File {0.name} added to {version} of {addon}.')
class DELETE_FILE_FROM_VERSION(_LOG):
"""
Expecting: addon, filename, version
Because the file is being deleted, filename and version
should be strings and not the object.
"""
id = 20
action_class = 'delete'
format = _(u'File {0} deleted from {version} of {addon}.')
class APPROVE_VERSION(_LOG):
id = 21
action_class = 'approve'
format = _(u'{addon} {version} approved.')
short = _(u'Approved')
keep = True
review_email_user = True
review_queue = True
class PRELIMINARY_VERSION(_LOG):
id = 42
action_class = 'approve'
format = _(u'{addon} {version} given preliminary review.')
short = _(u'Preliminarily approved')
keep = True
review_email_user = True
review_queue = True
class REJECT_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 43
action_class = 'reject'
format = _(u'{addon} {version} rejected.')
short = _(u'Rejected')
keep = True
review_email_user = True
review_queue = True
class RETAIN_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 22
format = _(u'{addon} {version} retained.')
short = _(u'Retained')
keep = True
review_email_user = True
review_queue = True
class ESCALATE_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 23
format = _(u'{addon} {version} escalated.')
short = _(u'Escalated')
keep = True
review_email_user = True
review_queue = True
class REQUEST_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 24
format = _(u'{addon} {version} review requested.')
short = _(u'Review requested')
keep = True
review_email_user = True
review_queue = True
class REQUEST_INFORMATION(_LOG):
id = 44
format = _(u'{addon} {version} more information requested.')
short = _(u'More information requested')
keep = True
review_email_user = True
review_queue = True
class REQUEST_SUPER_REVIEW(_LOG):
id = 45
format = _(u'{addon} {version} super review requested.')
short = _(u'Super review requested')
keep = True
review_queue = True
class COMMENT_VERSION(_LOG):
id = 49
format = _(u'Comment on {addon} {version}.')
short = _(u'Comment')
keep = True
review_queue = True
hide_developer = True
class ADD_TAG(_LOG):
id = 25
action_class = 'tag'
format = _(u'{tag} added to {addon}.')
class REMOVE_TAG(_LOG):
id = 26
action_class = 'tag'
format = _(u'{tag} removed from {addon}.')
class ADD_TO_COLLECTION(_LOG):
id = 27
action_class = 'collection'
format = _(u'{addon} added to {collection}.')
class REMOVE_FROM_COLLECTION(_LOG):
id = 28
action_class = 'collection'
format = _(u'{addon} removed from {collection}.')
class ADD_REVIEW(_LOG):
id = 29
action_class = 'review'
format = _(u'{review} for {addon} written.')
# TODO(davedash): Add these when we do the admin site
class ADD_RECOMMENDED_CATEGORY(_LOG):
id = 31
action_class = 'edit'
# L10n: {0} is a category name.
format = _(u'{addon} featured in {0}.')
class REMOVE_RECOMMENDED_CATEGORY(_LOG):
id = 32
action_class = 'edit'
# L10n: {0} is a category name.
format = _(u'{addon} no longer featured in {0}.')
class ADD_RECOMMENDED(_LOG):
id = 33
format = _(u'{addon} is now featured.')
keep = True
class REMOVE_RECOMMENDED(_LOG):
id = 34
format = _(u'{addon} is no longer featured.')
keep = True
class ADD_APPVERSION(_LOG):
id = 35
action_class = 'add'
# L10n: {0} is the application, {1} is the version of the app
format = _(u'{0} {1} added.')
class CHANGE_USER_WITH_ROLE(_LOG):
""" Expects: author.user, role, addon """
id = 36
# L10n: {0} is a user, {1} is their role
format = _(u'{0.name} role changed to {1} for {addon}.')
keep = True
class CHANGE_LICENSE(_LOG):
""" Expects: license, addon """
id = 37
action_class = 'edit'
format = _(u'{addon} is now licensed under {0.name}.')
class CHANGE_POLICY(_LOG):
id = 38
action_class = 'edit'
format = _(u'{addon} policy changed.')
class CHANGE_ICON(_LOG):
id = 39
action_class = 'edit'
format = _(u'{addon} icon changed.')
class APPROVE_REVIEW(_LOG):
id = 40
action_class = 'approve'
format = _(u'{review} for {addon} approved.')
editor_format = _(u'{user} approved {review} for {addon}.')
keep = True
editor_event = True
class DELETE_REVIEW(_LOG):
"""Requires review.id and add-on objects."""
id = 41
action_class = 'review'
format = _(u'Review {review} for {addon} deleted.')
editor_format = _(u'{user} deleted {review} for {addon}.')
keep = True
editor_event = True
class MAX_APPVERSION_UPDATED(_LOG):
id = 46
format = _(u'Application max version for {version} updated.')
class BULK_VALIDATION_EMAILED(_LOG):
id = 47
format = _(u'Authors emailed about compatibility of {version}.')
class BULK_VALIDATION_USER_EMAILED(_LOG):
id = 130
format = _(u'Email sent to Author about add-on compatibility.')
class CHANGE_PASSWORD(_LOG):
id = 48
format = _(u'Password changed.')
class PAYPAL_FAILED(_LOG):
id = 51
format = _(u'{addon} failed checks with PayPal.')
class MANIFEST_UPDATED(_LOG):
id = 52
format = _(u'{addon} manifest updated.')
class APPROVE_VERSION_WAITING(_LOG):
id = 53
action_class = 'approve'
format = _(u'{addon} {version} approved but waiting to be made public.')
short = _(u'Approved but waiting')
keep = True
review_email_user = True
review_queue = True
class PURCHASE_ADDON(_LOG):
id = 54
format = _(u'{addon} purchased.')
class INSTALL_ADDON(_LOG):
id = 55
format = _(u'{addon} installed.')
class USER_EDITED(_LOG):
id = 60
format = _(u'Account updated.')
class ESCALATION_CLEARED(_LOG):
id = 66
format = _(u'Escalation cleared for {addon}.')
short = _(u'Escalation cleared')
keep = True
review_queue = True
class APP_DISABLED(_LOG):
id = 67
format = _(u'{addon} disabled.')
short = _(u'App disabled')
keep = True
review_queue = True
class ESCALATED_HIGH_ABUSE(_LOG):
id = 68
format = _(u'{addon} escalated because of high number of abuse reports.')
short = _(u'High Abuse Reports')
keep = True
review_queue = True
class ESCALATE_MANUAL(_LOG):
id = 73
format = _(u'{addon} escalated by reviewer.')
short = _(u'Reviewer escalation')
keep = True
review_queue = True
# TODO(robhudson): Escalation log for editor escalation..
class VIDEO_ERROR(_LOG):
id = 74
format = _(u'Video removed from {addon} because of a problem with '
u'the video. ')
short = _(u'Video removed')
class REREVIEW_DEVICES_ADDED(_LOG):
id = 75
format = _(u'{addon} re-review because of new device(s) added.')
short = _(u'Device(s) Added')
keep = True
review_queue = True
class REVIEW_DEVICE_OVERRIDE(_LOG):
id = 76
format = _(u'{addon} device support manually changed by reviewer.')
short = _(u'Device(s) Changed by Reviewer')
keep = True
review_queue = True
class CUSTOM_TEXT(_LOG):
id = 98
format = '{0}'
class CUSTOM_HTML(_LOG):
id = 99
format = '{0}'
class OBJECT_ADDED(_LOG):
id = 100
format = _(u'Created: {0}.')
admin_event = True
class OBJECT_EDITED(_LOG):
id = 101
format = _(u'Edited field: {2} set to: {0}.')
admin_event = True
class OBJECT_DELETED(_LOG):
id = 102
format = _(u'Deleted: {1}.')
admin_event = True
class ADMIN_USER_EDITED(_LOG):
id = 103
format = _(u'User {user} edited, reason: {1}')
admin_event = True
class ADMIN_USER_ANONYMIZED(_LOG):
id = 104
format = _(u'User {user} anonymized.')
admin_event = True
class ADMIN_USER_RESTRICTED(_LOG):
id = 105
format = _(u'User {user} restricted.')
admin_event = True
class ADMIN_VIEWED_LOG(_LOG):
id = 106
format = _(u'Admin {0} viewed activity log for {user}.')
admin_event = True
class EDIT_REVIEW(_LOG):
id = 107
action_class = 'review'
format = _(u'{review} for {addon} updated.')
class THEME_REVIEW(_LOG):
id = 108
action_class = 'review'
format = _(u'{addon} reviewed.')
class GROUP_USER_ADDED(_LOG):
id = 120
action_class = 'access'
format = _(u'User {0.name} added to {group}.')
keep = True
admin_event = True
class GROUP_USER_REMOVED(_LOG):
id = 121
action_class = 'access'
format = _(u'User {0.name} removed from {group}.')
keep = True
admin_event = True
class REVIEW_FEATURES_OVERRIDE(_LOG):
id = 122
format = _(u'{addon} minimum requirements manually changed by reviewer.')
short = _(u'Requirements Changed by Reviewer')
keep = True
review_queue = True
class REREVIEW_FEATURES_CHANGED(_LOG):
id = 123
format = _(u'{addon} minimum requirements manually changed.')
short = _(u'Requirements Changed')
keep = True
review_queue = True
class CHANGE_VERSION_STATUS(_LOG):
id = 124
# L10n: {0} is the status
format = _(u'{version} status changed to {0}.')
keep = True
class DELETE_USER_LOOKUP(_LOG):
id = 125
# L10n: {0} is the status
format = _(u'User {0.name} {0.id} deleted via lookup tool.')
keep = True
class CONTENT_RATING_TO_ADULT(_LOG):
id = 126
format = _('{addon} content rating changed to Adult.')
review_queue = True
class CONTENT_RATING_CHANGED(_LOG):
id = 127
format = _('{addon} content rating changed.')
class ADDON_UNLISTED(_LOG):
id = 128
format = _(u'{addon} unlisted.')
keep = True
class BETA_SIGNED_VALIDATION_PASSED(_LOG):
id = 131
format = _(u'{file} was signed.')
keep = True
class BETA_SIGNED_VALIDATION_FAILED(_LOG):
id = 132
format = _(u'{file} was signed.')
keep = True
class DELETE_ADDON(_LOG):
id = 133
action_class = 'delete'
# L10n: {0} is the add-on GUID.
format = _(u'Addon id {0} with GUID {1} has been deleted')
keep = True
LOGS = [x for x in vars().values()
if isclass(x) and issubclass(x, _LOG) and x != _LOG]
# Make sure there's no duplicate IDs.
assert len(LOGS) == len(set(log.id for log in LOGS))
LOG_BY_ID = dict((l.id, l) for l in LOGS)
LOG = AttributeDict((l.__name__, l) for l in LOGS)
LOG_ADMINS = [l.id for l in LOGS if hasattr(l, 'admin_event')]
LOG_KEEP = [l.id for l in LOGS if hasattr(l, 'keep')]
LOG_EDITORS = [l.id for l in LOGS if hasattr(l, 'editor_event')]
LOG_REVIEW_QUEUE = [l.id for l in LOGS if hasattr(l, 'review_queue')]
# Is the user emailed the message?
LOG_REVIEW_EMAIL_USER = [l.id for l in LOGS if hasattr(l, 'review_email_user')]
# Logs *not* to show to the developer.
LOG_HIDE_DEVELOPER = [l.id for l in LOGS
if (getattr(l, 'hide_developer', False)
or l.id in LOG_ADMINS)]
def log(action, *args, **kw):
"""
e.g. amo.log(amo.LOG.CREATE_ADDON, []),
amo.log(amo.LOG.ADD_FILE_TO_VERSION, file, version)
"""
from access.models import Group
from addons.models import Addon
from amo import get_user, logger_log
from devhub.models import (ActivityLog, AddonLog, CommentLog, GroupLog,
UserLog, VersionLog)
from users.models import UserProfile
from versions.models import Version
user = kw.get('user', get_user())
if not user:
logger_log.warning('Activity log called with no user: %s' % action.id)
return
al = ActivityLog(user=user, action=action.id)
al.arguments = args
if 'details' in kw:
al.details = kw['details']
al.save()
if 'details' in kw and 'comments' in al.details:
CommentLog(comments=al.details['comments'], activity_log=al).save()
# TODO(davedash): post-remora this may not be necessary.
if 'created' in kw:
al.created = kw['created']
# Double save necessary since django resets the created date on save.
al.save()
for arg in args:
if isinstance(arg, tuple):
if arg[0] == Addon:
AddonLog(addon_id=arg[1], activity_log=al).save()
elif arg[0] == Version:
VersionLog(version_id=arg[1], activity_log=al).save()
elif arg[0] == UserProfile:
UserLog(user_id=arg[1], activity_log=al).save()
elif arg[0] == Group:
GroupLog(group_id=arg[1], activity_log=al).save()
elif isinstance(arg, Addon):
AddonLog(addon=arg, activity_log=al).save()
elif isinstance(arg, Version):
VersionLog(version=arg, activity_log=al).save()
elif isinstance(arg, UserProfile):
# Index by any user who is mentioned as an argument.
UserLog(activity_log=al, user=arg).save()
elif isinstance(arg, Group):
GroupLog(group=arg, activity_log=al).save()
# Index by every user
UserLog(activity_log=al, user=user).save()
return al
| muffinresearch/addons-server | apps/amo/log.py | Python | bsd-3-clause | 16,053 |
"""test a warning is triggered when using for a lists comprehension variable"""
__revision__ = 'yo'
TEST_LC = [C for C in __revision__ if C.isalpha()]
print C # WARN
C = 4
print C # this one shouldn't trigger any warning
B = [B for B in __revision__ if B.isalpha()]
print B # nor this one
for var1, var2 in TEST_LC:
var1 = var2 + 4
print var1 # WARN
for note in __revision__:
note.something()
for line in __revision__:
for note in line:
A = note.anotherthing()
for x in []:
pass
for x in range(3):
print (lambda : x)() # OK
| dbbhattacharya/kitsune | vendor/packages/pylint/test/input/func_use_for_or_listcomp_var.py | Python | bsd-3-clause | 560 |
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import py_utils
from telemetry import story as story_module
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class LeakDetectionSharedState(shared_page_state.SharedDesktopPageState):
def ShouldReuseBrowserForAllStoryRuns(self):
return True
class LeakDetectionPage(page_module.Page):
def __init__(self, url, page_set, name=''):
super(LeakDetectionPage, self).__init__(
url=url, page_set=page_set, name=name,
shared_page_state_class=LeakDetectionSharedState)
def RunNavigateSteps(self, action_runner):
tabs = action_runner.tab.browser.tabs
new_tab = tabs.New()
new_tab.action_runner.Navigate('about:blank')
new_tab.action_runner.PrepareForLeakDetection()
new_tab.action_runner.MeasureMemory()
new_tab.action_runner.Navigate(self.url)
self._WaitForPageLoadToComplete(new_tab.action_runner)
new_tab.action_runner.Navigate('about:blank')
new_tab.action_runner.PrepareForLeakDetection()
new_tab.action_runner.MeasureMemory()
new_tab.Close()
def _WaitForPageLoadToComplete(self, action_runner):
py_utils.WaitFor(action_runner.tab.HasReachedQuiescence, timeout=30)
# Some websites have a script that loads resources continuously, in which cases
# HasReachedQuiescence would not be reached. This class waits for document ready
# state to be complete to avoid timeout for those pages.
class ResourceLoadingLeakDetectionPage(LeakDetectionPage):
def _WaitForPageLoadToComplete(self, action_runner):
action_runner.tab.WaitForDocumentReadyStateToBeComplete()
class LeakDetectionStorySet(story_module.StorySet):
def __init__(self):
super(LeakDetectionStorySet, self).__init__(
archive_data_file='data/leak_detection.json',
cloud_storage_bucket=story_module.PARTNER_BUCKET)
urls_list = [
# Alexa top websites
'https://www.google.com',
'https://www.youtube.com',
'https://www.facebook.com',
'https://www.baidu.com',
'https://www.wikipedia.org',
'https://world.taobao.com/',
'https://www.tmall.com/',
'http://www.amazon.com',
'http://www.twitter.com',
'https://www.instagram.com/',
'http://www.jd.com/',
'https://vk.com/',
'https://outlook.live.com',
'https://www.reddit.com/',
'https://weibo.com/',
'https://www.sina.com.cn/',
'https://www.360.cn/',
'https://yandex.ru/',
'https://www.blogger.com/',
'https://www.netflix.com/',
'https://www.pornhub.com/',
'https://www.linkedin.com/',
'https://www.yahoo.co.jp/',
'https://www.csdn.net/',
'https://www.alipay.com/',
'https://www.twitch.tv/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.ebay.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.microsoft.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.xvideos.com/',
'https://mail.ru/',
'https://www.bing.com/',
'http://www.wikia.com/',
'https://www.office.com/',
'https://www.imdb.com/',
'https://www.aliexpress.com/',
'https://www.msn.com/',
'https://news.google.com/',
'https://www.theguardian.com/',
'https://www.indiatimes.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'http://www.foxnews.com/',
'https://weather.com/',
'https://www.shutterstock.com/',
'https://docs.google.com/',
'https://wordpress.com/',
# TODO(yuzus): This test crashes.
# 'https://www.apple.com/',
'https://play.google.com/store',
'https://www.dropbox.com/',
'https://soundcloud.com/',
'https://vimeo.com/',
'https://www.slideshare.net/',
'https://www.mediafire.com/',
'https://www.etsy.com/',
'https://www.ikea.com/',
'https://www.bestbuy.com/',
'https://www.homedepot.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.target.com/',
'https://www.booking.com/',
'https://www.tripadvisor.com/',
'https://9gag.com/',
'https://www.expedia.com/',
'https://www.roblox.com/',
'https://www.gamespot.com/',
'https://www.blizzard.com',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://ign.com/',
'https://www.yelp.com/',
# Times out waiting for HasReachedQuiescence - crbug.com/927427
# 'https://gizmodo.com/',
'https://www.gsmarena.com/',
'https://www.theverge.com/',
'https://www.nlm.nih.gov/',
'https://archive.org/',
'https://www.udemy.com/',
'https://answers.yahoo.com/',
# TODO(crbug.com/985552): Memory dump fails flakily.
# 'https://www.goodreads.com/',
'https://www.cricbuzz.com/',
'http://www.goal.com/',
'http://siteadvisor.com/',
'https://www.patreon.com/',
'https://www.jw.org/',
'http://europa.eu/',
'https://translate.google.com/',
'https://www.epicgames.com/',
'http://www.reverso.net/',
'https://play.na.leagueoflegends.com/',
'https://www.thesaurus.com/',
'https://www.weebly.com/',
'https://www.deviantart.com/',
'https://www.scribd.com/',
'https://www.hulu.com/',
'https://www.xfinity.com/',
# India Alexa top websites
'https://porn555.com/',
'https://www.onlinesbi.com/',
'https://www.flipkart.com/',
'https://www.hotstar.com/',
'https://www.incometaxindiaefiling.gov.in/',
'https://stackoverflow.com/',
# TODO(crbug.com/1005035) Memory dump fails flakily.
# 'https://www.irctc.co.in/nget/',
'https://www.hdfcbank.com/',
'https://www.whatsapp.com/',
'https://uidai.gov.in/',
'https://billdesk.com/',
'https://www.icicibank.com/',
# US Alexa top websites
'https://imgur.com/',
'https://www.craigslist.org/',
'https://www.chase.com/',
# TODO(892352): tumblr started timing out due to a catapult roll. See
# https://crbug.com/892352
# 'https://www.tumblr.com/',
'https://www.paypal.com/',
# TODO(yuzus): espn.com is flaky. https://crbug.com/959796
#'http://www.espn.com/',
'https://edition.cnn.com/',
'https://www.pinterest.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.nytimes.com/',
'https://github.com/',
'https://www.salesforce.com/',
# Japan Alexa top websites
'https://www.rakuten.co.jp/',
'http://www.nicovideo.jp/',
'https://fc2.com/',
'https://ameblo.jp/',
'http://kakaku.com/',
'https://www.goo.ne.jp/',
'https://www.pixiv.net/',
# websites which were found to be leaking in the past
'https://www.prezi.com',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'http://www.time.com',
'http://www.cheapoair.com',
'http://www.onlinedown.net',
'http://www.dailypost.ng',
'http://www.aljazeera.net',
'http://www.googleapps.com',
'http://www.airbnb.ch',
'http://www.livedoor.jp',
'http://www.blu-ray.com',
# TODO(953195): Test times out.
# 'http://www.block.io',
'http://www.hockeybuzz.com',
'http://www.silverpop.com',
'http://www.ansa.it',
'http://www.gulfair.com',
'http://www.nusatrip.com',
'http://www.samsung-fun.ru',
'http://www.opentable.com',
'http://www.magnetmail.net',
'http://zzz.com.ua',
'http://a-rakumo.appspot.com',
'http://www.sakurafile.com',
'http://www.psiexams.com',
'http://www.contentful.com',
'http://www.estibot.com',
'http://www.mbs.de',
'http://www.zhengjie.com',
'http://www.sjp.pl',
'http://www.mastodon.social',
'http://www.horairetrain.net',
'http://www.torrentzeu.to',
'http://www.inbank.it',
'http://www.gradpoint.com',
'http://www.mail.bg',
'http://www.aaannunci.it',
'http://www.leandomainsearch.com',
'http://www.wpjam.com',
'http://www.nigma.ru',
'http://www.do-search.com',
'http://www.omniboxes.com',
'http://whu.edu.cn',
'http://support.wordpress.com',
'http://www.webwebweb.com',
'http://www.sick.com',
'http://www.iowacconline.com',
'http://hdu.edu.cn',
'http://www.register.com',
'http://www.careesma.in',
'http://www.bestdic.ir',
'http://www.privacyassistant.net',
'http://www.sklavenzentrale.com',
'http://www.podbay.fm',
'http://www.coco.fr',
'http://www.skipaas.com',
'http://www.chatword.org',
'http://www.ezcardinfo.com',
'http://www.daydao.com',
'http://www.expediapartnercentral.com',
'http://www.22find.com',
'http://www.e-shop.gr',
'http://www.indeed.com',
'http://www.highwaybus.com',
'http://www.pingpang.info',
'http://www.besgold.com',
'http://www.arabam.com',
'http://makfax.com.mk',
'http://game.co.za',
'http://www.savaari.com',
'http://www.railsguides.jp',
]
resource_loading_urls_list = [
'https://www.hotels.com/',
'https://www.livejournal.com/',
# TODO(keishi): Memory dump fails flakily crbug.com/963273
#'https://www.yahoo.com',
'http://www.quora.com',
'https://www.macys.com',
'http://infomoney.com.br',
'http://www.listindiario.com',
'https://www.engadget.com/',
'https://www.sohu.com/',
'http://www.qq.com',
'http://www.benzworld.org',
'http://www.520mojing.com',
]
for url in urls_list:
self.AddStory(LeakDetectionPage(url, self, url))
for url in resource_loading_urls_list:
self.AddStory(ResourceLoadingLeakDetectionPage(url, self, url))
| chromium/chromium | tools/perf/contrib/leak_detection/page_sets.py | Python | bsd-3-clause | 10,066 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Methods for working with cecog
Copyright 2010 University of Dundee, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import os
import re
import sys
from omero.cli import BaseControl, CLI
import omero
import omero.constants
from omero.rtypes import rstring
class CecogControl(BaseControl):
"""CeCog integration plugin.
Provides actions for prepairing data and otherwise integrating with Cecog. See
the Run_Cecog_4.1.py script.
"""
# [MetaMorph_PlateScanPackage]
# regex_subdirectories = re.compile('(?=[^_]).*?(?P<D>\d+).*?')
# regex_position = re.compile('P(?P<P>.+?)_')
# continuous_frames = 1
regex_token = re.compile(r'(?P<Token>.+)\.')
regex_time = re.compile(r'T(?P<T>\d+)')
regex_channel = re.compile(r'_C(?P<C>.+?)(_|$)')
regex_zslice = re.compile(r'_Z(?P<Z>\d+)')
def _configure(self, parser):
sub = parser.sub()
merge = parser.add(sub, self.merge, self.merge.__doc__)
merge.add_argument("path", help="Path to image files")
rois = parser.add(sub, self.rois, self.rois.__doc__)
rois.add_argument(
"-f", "--file", required=True, help="Details file to be parsed")
rois.add_argument(
"-i", "--image", required=True,
help="Image id which should have ids attached")
for x in (merge, rois):
x.add_login_arguments()
#
# Public methods
#
def merge(self, args):
"""Uses PIL to read multiple planes from a local folder.
Planes are combined and uploaded to OMERO as new images with additional T, C,
Z dimensions.
It should be run as a local script (not via scripting service) in order that
it has access to the local users file system. Therefore need EMAN2 or PIL
installed locally.
Example usage:
$ bin/omero cecog merge /Applications/CecogPackage/Data/Demo_data/0037/
Since this dir does not contain folders, this will upload images in '0037'
into a Dataset called Demo_data in a Project called 'Data'.
$ bin/omero cecog merge /Applications/CecogPackage/Data/Demo_data/
Since this dir does contain folders, this will look for images in all
subdirectories of 'Demo_data' and upload images into a Dataset called
Demo_data in a Project called 'Data'.
Images will be combined in Z, C and T according to the \
MetaMorph_PlateScanPackage naming convention.
E.g. tubulin_P0037_T00005_Cgfp_Z1_S1.tiff is Point 37, Timepoint 5, Channel \
gfp, Z 1. S?
see \
/Applications/CecogPackage/CecogAnalyzer.app/Contents/Resources/resources/\
naming_schemes.conf
"""
"""
Processes the command args, makes project and dataset then calls
uploadDirAsImages() to process and
upload the images to OMERO.
"""
from omero.rtypes import unwrap
from omero.util.script_utils import uploadDirAsImages
path = args.path
client = self.ctx.conn(args)
queryService = client.sf.getQueryService()
updateService = client.sf.getUpdateService()
pixelsService = client.sf.getPixelsService()
# if we don't have any folders in the 'dir' E.g.
# CecogPackage/Data/Demo_data/0037/
# then 'Demo_data' becomes a dataset
subDirs = []
for f in os.listdir(path):
fullpath = path + f
# process folders in root dir:
if os.path.isdir(fullpath):
subDirs.append(fullpath)
# get the dataset name and project name from path
if len(subDirs) == 0:
p = path[:-1] # will remove the last folder
p = os.path.dirname(p)
else:
if os.path.basename(path) == "":
p = path[:-1] # remove slash
datasetName = os.path.basename(p) # e.g. Demo_data
p = p[:-1]
p = os.path.dirname(p)
projectName = os.path.basename(p) # e.g. Data
self.ctx.err("Putting images in Project: %s Dataset: %s"
% (projectName, datasetName))
# create dataset
dataset = omero.model.DatasetI()
dataset.name = rstring(datasetName)
dataset = updateService.saveAndReturnObject(dataset)
# create project
project = omero.model.ProjectI()
project.name = rstring(projectName)
project = updateService.saveAndReturnObject(project)
# put dataset in project
link = omero.model.ProjectDatasetLinkI()
link.parent = omero.model.ProjectI(project.id.val, False)
link.child = omero.model.DatasetI(dataset.id.val, False)
updateService.saveAndReturnObject(link)
if len(subDirs) > 0:
for subDir in subDirs:
self.ctx.err("Processing images in %s" % subDir)
rv = uploadDirAsImages(client.sf, queryService, updateService,
pixelsService, subDir, dataset)
self.ctx.out("%s" % unwrap(rv))
# if there are no sub-directories, just put all the images in the dir
else:
self.ctx.err("Processing images in %s" % path)
rv = uploadDirAsImages(client.sf, queryService, updateService,
pixelsService, path, dataset)
self.ctx.out("%s" % unwrap(rv))
def rois(self, args):
"""Parses an object_details text file, as generated by CeCog Analyzer
and saves the data as ROIs on an Image in OMERO.
Text file is of the form:
frame objID classLabel className centerX centerY mean sd
1 10 6 lateana 1119 41 76.8253796095 \
54.9305640673
Example usage:
bin/omero cecog rois -f \
Data/Demo_output/analyzed/0037/statistics/P0037__object_details.txt -i 502
"""
"""
Processes the command args, parses the object_details.txt file and
creates ROIs on the image specified in OMERO
"""
from omero.util.script_utils import uploadCecogObjectDetails
filePath = args.file
imageId = args.image
if not os.path.exists(filePath):
self.ctx.die(654, "Could find the object_details file at %s"
% filePath)
client = self.ctx.conn(args)
updateService = client.sf.getUpdateService()
ids = uploadCecogObjectDetails(updateService, imageId, filePath)
self.ctx.out("Rois created: %s" % len(ids))
try:
register("cecog", CecogControl, CecogControl.__doc__)
except NameError:
if __name__ == "__main__":
cli = CLI()
cli.register("cecog", CecogControl, CecogControl.__doc__)
cli.invoke(sys.argv[1:])
| dominikl/openmicroscopy | components/tools/OmeroPy/src/omero/plugins/cecog.py | Python | gpl-2.0 | 6,684 |
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
import re
from waflib import Utils,Task,TaskGen,Logs
from waflib.TaskGen import feature,before_method,after_method,extension
from waflib.Configure import conf
INC_REGEX="""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
re_inc=re.compile(INC_REGEX,re.I)
re_use=re.compile(USE_REGEX,re.I)
re_mod=re.compile(MOD_REGEX,re.I)
class fortran_parser(object):
def __init__(self,incpaths):
self.seen=[]
self.nodes=[]
self.names=[]
self.incpaths=incpaths
def find_deps(self,node):
txt=node.read()
incs=[]
uses=[]
mods=[]
for line in txt.splitlines():
m=re_inc.search(line)
if m:
incs.append(m.group(1))
m=re_use.search(line)
if m:
uses.append(m.group(1))
m=re_mod.search(line)
if m:
mods.append(m.group(1))
return(incs,uses,mods)
def start(self,node):
self.waiting=[node]
while self.waiting:
nd=self.waiting.pop(0)
self.iter(nd)
def iter(self,node):
path=node.abspath()
incs,uses,mods=self.find_deps(node)
for x in incs:
if x in self.seen:
continue
self.seen.append(x)
self.tryfind_header(x)
for x in uses:
name="USE@%s"%x
if not name in self.names:
self.names.append(name)
for x in mods:
name="MOD@%s"%x
if not name in self.names:
self.names.append(name)
def tryfind_header(self,filename):
found=None
for n in self.incpaths:
found=n.find_resource(filename)
if found:
self.nodes.append(found)
self.waiting.append(found)
break
if not found:
if not filename in self.names:
self.names.append(filename)
| asljivo1/802.11ah-ns3 | ns-3/.waf-1.8.12-f00e5b53f6bbeab1384a38c9cc5d51f7/waflib/Tools/fc_scan.py | Python | gpl-2.0 | 1,859 |
#
# Copyright (C) 2000-2005 by Yasushi Saito (yasushi.saito@gmail.com)
#
# Jockey is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2, or (at your option) any
# later version.
#
# Jockey is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
import tick_mark
import line_style
import pychart_util
import error_bar
import chart_object
import legend
import object_set
import line_plot_doc
import theme
from pychart_types import *
from types import *
default_width = 1.2
line_style_itr = None
_keys = {
'data' : (AnyType, None, pychart_util.data_desc),
'label': (StringType, '???', pychart_util.label_desc),
'data_label_offset': (CoordType, (0, 5),
"""The location of data labels relative to the sample point. Meaningful only when data_label_format != None."""),
'data_label_format': (FormatType, None,
"""The format string for the label printed
beside a sample point.
It can be a `printf' style format string, or
a two-parameter function that takes the (x, y)
values and returns a string. """
+ pychart_util.string_desc),
'xcol' : (IntType, 0, pychart_util.xcol_desc),
'ycol': (IntType, 1, pychart_util.ycol_desc),
'y_error_minus_col': (IntType, 2,
"""The column (within "data") from which the depth of the errorbar is extracted. Meaningful only when error_bar != None. <<error_bar>>"""),
'y_error_plus_col': (IntType, -1,
"""The column (within "data") from which the height of the errorbar is extracted. Meaningful only when error_bar != None. <<error_bar>>"""),
'y_qerror_minus_col': (IntType, -1, '<<error_bar>>'),
'y_qerror_plus_col': (IntType, -1, '<<error_bar>>'),
'line_style': (line_style.T, lambda: line_style_itr.next(), pychart_util.line_desc,
"By default, a style is picked from standard styles round-robin. <<line_style>>"),
'tick_mark': (tick_mark.T, None, pychart_util.tick_mark_desc),
'error_bar': (error_bar.T, None,
'The style of the error bar. <<error_bar>>'),
}
class T(chart_object.T):
__doc__ = line_plot_doc.doc
keys = _keys
def check_integrity(self):
assert chart_object.T.check_integrity(self)
##AUTOMATICALLY GENERATED
##END AUTOMATICALLY GENERATED
def get_data_range(self, which):
if which == 'X':
return pychart_util.get_data_range(self.data, self.xcol)
else:
return pychart_util.get_data_range(self.data, self.ycol)
def get_legend_entry(self):
if self.label:
line_style = self.line_style
if not line_style and self.error_bar:
line_style = getattr(self.error_bar, 'line_style', None) or \
getattr(self.error_bar, 'hline_style', None) or \
getattr(self.error_bar, 'vline_style', None)
if not line_style:
raise Exception, 'Line plot has label, but an empty line style and error bar.'
return legend.Entry(line_style=line_style,
tick_mark=self.tick_mark,
fill_style=None,
label=self.label)
return None
def draw(self, ar, can):
# Draw the line
clipbox = theme.adjust_bounding_box([ar.loc[0], ar.loc[1],
ar.loc[0] + ar.size[0],
ar.loc[1] + ar.size[1]]);
can.clip(clipbox[0],clipbox[1],clipbox[2],clipbox[3])
if self.line_style:
points = []
for pair in self.data:
yval = pychart_util.get_sample_val(pair, self.ycol)
xval = pair[self.xcol]
if None not in (xval, yval):
points.append((ar.x_pos(xval), ar.y_pos(yval)))
can.lines(self.line_style, points)
can.endclip()
# Draw tick marks and error bars
can.clip(ar.loc[0] - 10, ar.loc[1] - 10,
ar.loc[0] + ar.size[0] + 10,
ar.loc[1] + ar.size[1] + 10)
for pair in self.data:
x = pair[self.xcol]
y = pychart_util.get_sample_val(pair, self.ycol)
if None in (x, y): continue
x_pos = ar.x_pos(x)
y_pos = ar.y_pos(y)
if self.error_bar:
plus = pair[self.y_error_plus_col or self.y_error_minus_col]
minus = pair[self.y_error_minus_col or self.y_error_plus_col]
if self.y_qerror_minus_col or self.y_qerror_plus_col:
q_plus = pair[self.y_qerror_plus_col or self.y_qerror_minus_col]
q_minus = pair[self.y_qerror_minus_col or self.y_qerror_plus_col]
if None not in (minus,plus,q_minus,q_plus):
self.error_bar.draw(can, (x_pos, y_pos),
ar.y_pos(y - minus),
ar.y_pos(y + plus),
ar.y_pos(y - q_minus),
ar.y_pos(y + q_plus))
else:
if None not in (minus,plus): #PDS
self.error_bar.draw(can, (x_pos, y_pos),
ar.y_pos(y - minus),
ar.y_pos(y + plus))
if self.tick_mark:
self.tick_mark.draw(can, x_pos, y_pos)
if self.data_label_format:
can.show(x_pos + self.data_label_offset[0],
y_pos + self.data_label_offset[1],
'/hC' + pychart_util.apply_format(self.data_label_format, (x, y), 1))
can.endclip()
def init():
global line_style_itr
line_styles = object_set.T()
for org_style in line_style.standards.list():
style = line_style.T(width = default_width, color = org_style.color,
dash = org_style.dash)
line_styles.add(style)
line_style_itr = line_styles.iterate()
theme.add_reinitialization_hook(init)
| ShaolongHu/lpts | site-packages/pychart/line_plot.py | Python | gpl-2.0 | 6,684 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,wildcard-import
from tensorflow.contrib.kfac.python.ops.utils import *
from tensorflow.python.util.all_util import remove_undocumented
# pylint: enable=unused-import,line-too-long,wildcard-import
_allowed_symbols = [
"SequenceDict",
"setdefault",
"tensors_to_column",
"column_to_tensors",
"kronecker_product",
"layer_params_to_mat2d",
"mat2d_to_layer_params",
"compute_pi",
"posdef_inv",
"posdef_inv_matrix_inverse",
"posdef_inv_cholesky",
"posdef_inv_funcs",
"SubGraph",
"generate_random_signs",
"fwd_gradients",
]
remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
| benoitsteiner/tensorflow-opencl | tensorflow/contrib/kfac/python/ops/utils_lib.py | Python | apache-2.0 | 1,520 |
from Child import Child
from Node import Node # noqa: I201
AVAILABILITY_NODES = [
# availability-spec-list -> availability-entry availability-spec-list?
Node('AvailabilitySpecList', kind='SyntaxCollection',
element='AvailabilityArgument'),
# Wrapper for all the different entries that may occur inside @available
# availability-entry -> '*' ','?
# | identifier ','?
# | availability-version-restriction ','?
# | availability-versioned-argument ','?
Node('AvailabilityArgument', kind='Syntax',
description='''
A single argument to an `@available` argument like `*`, `iOS 10.1`, \
or `message: "This has been deprecated"`.
''',
children=[
Child('Entry', kind='Syntax',
description='The actual argument',
node_choices=[
Child('Star', kind='SpacedBinaryOperatorToken',
text_choices=['*']),
Child('IdentifierRestriction',
kind='IdentifierToken'),
Child('AvailabilityVersionRestriction',
kind='AvailabilityVersionRestriction'),
Child('AvailabilityLabeledArgument',
kind='AvailabilityLabeledArgument'),
]),
Child('TrailingComma', kind='CommaToken', is_optional=True,
description='''
A trailing comma if the argument is followed by another \
argument
'''),
]),
# Representation of 'deprecated: 2.3', 'message: "Hello world"' etc.
# availability-versioned-argument -> identifier ':' version-tuple
Node('AvailabilityLabeledArgument', kind='Syntax',
description='''
A argument to an `@available` attribute that consists of a label and \
a value, e.g. `message: "This has been deprecated"`.
''',
children=[
Child('Label', kind='IdentifierToken',
description='The label of the argument'),
Child('Colon', kind='ColonToken',
description='The colon separating label and value'),
Child('Value', kind='Syntax',
node_choices=[
Child('String', 'StringLiteralToken'),
Child('Version', 'VersionTuple'),
], description='The value of this labeled argument',),
]),
# Representation for 'iOS 10', 'swift 3.4' etc.
# availability-version-restriction -> identifier version-tuple
Node('AvailabilityVersionRestriction', kind='Syntax',
description='''
An argument to `@available` that restricts the availability on a \
certain platform to a version, e.g. `iOS 10` or `swift 3.4`.
''',
children=[
Child('Platform', kind='IdentifierToken',
classification='Keyword',
description='''
The name of the OS on which the availability should be \
restricted or 'swift' if the availability should be \
restricted based on a Swift version.
'''),
Child('Version', kind='VersionTuple'),
]),
# version-tuple -> integer-literal
# | float-literal
# | float-literal '.' integer-literal
Node('VersionTuple', kind='Syntax',
description='''
A version number of the form major.minor.patch in which the minor \
and patch part may be ommited.
''',
children=[
Child('MajorMinor', kind='Syntax',
node_choices=[
Child('Major', kind='IntegerLiteralToken'),
Child('MajorMinor', kind='FloatingLiteralToken')
], description='''
In case the version consists only of the major version, an \
integer literal that specifies the major version. In case \
the version consists of major and minor version number, a \
floating literal in which the decimal part is interpreted \
as the minor version.
'''),
Child('PatchPeriod', kind='PeriodToken', is_optional=True,
description='''
If the version contains a patch number, the period \
separating the minor from the patch number.
'''),
Child('PatchVersion', kind='IntegerLiteralToken',
is_optional=True, description='''
The patch version if specified.
'''),
]),
]
| austinzheng/swift | utils/gyb_syntax_support/AvailabilityNodes.py | Python | apache-2.0 | 4,872 |
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generator for C++ structs from api json files.
The purpose of this tool is to remove the need for hand-written code that
converts to and from base::Value types when receiving javascript api calls.
Originally written for generating code for extension apis. Reference schemas
are in chrome/common/extensions/api.
Usage example:
compiler.py --root /home/Work/src --namespace extensions windows.json
tabs.json
compiler.py --destdir gen --root /home/Work/src
--namespace extensions windows.json tabs.json
"""
import optparse
import os
import shlex
import sys
from cpp_bundle_generator import CppBundleGenerator
from cpp_generator import CppGenerator
from cpp_type_generator import CppTypeGenerator
from js_externs_generator import JsExternsGenerator
from js_interface_generator import JsInterfaceGenerator
import json_schema
from cpp_namespace_environment import CppNamespaceEnvironment
from model import Model
from schema_loader import SchemaLoader
# Names of supported code generators, as specified on the command-line.
# First is default.
GENERATORS = [
'cpp', 'cpp-bundle-registration', 'cpp-bundle-schema', 'externs', 'interface'
]
def GenerateSchema(generator_name,
file_paths,
root,
destdir,
cpp_namespace_pattern,
bundle_name,
impl_dir,
include_rules):
# Merge the source files into a single list of schemas.
api_defs = []
for file_path in file_paths:
schema = os.path.relpath(file_path, root)
schema_loader = SchemaLoader(
root,
os.path.dirname(schema),
include_rules,
cpp_namespace_pattern)
api_def = schema_loader.LoadSchema(schema)
# If compiling the C++ model code, delete 'nocompile' nodes.
if generator_name == 'cpp':
api_def = json_schema.DeleteNodes(api_def, 'nocompile')
# Delete all 'nodefine' nodes. They are only for documentation.
api_def = json_schema.DeleteNodes(api_def, 'nodefine')
api_defs.extend(api_def)
api_model = Model(allow_inline_enums=False)
# For single-schema compilation make sure that the first (i.e. only) schema
# is the default one.
default_namespace = None
# If we have files from multiple source paths, we'll use the common parent
# path as the source directory.
src_path = None
# Load the actual namespaces into the model.
for target_namespace, file_path in zip(api_defs, file_paths):
relpath = os.path.relpath(os.path.normpath(file_path), root)
namespace = api_model.AddNamespace(target_namespace,
relpath,
include_compiler_options=True,
environment=CppNamespaceEnvironment(
cpp_namespace_pattern))
if default_namespace is None:
default_namespace = namespace
if src_path is None:
src_path = namespace.source_file_dir
else:
src_path = os.path.commonprefix((src_path, namespace.source_file_dir))
_, filename = os.path.split(file_path)
filename_base, _ = os.path.splitext(filename)
# Construct the type generator with all the namespaces in this model.
type_generator = CppTypeGenerator(api_model,
schema_loader,
default_namespace)
if generator_name in ('cpp-bundle-registration', 'cpp-bundle-schema'):
cpp_bundle_generator = CppBundleGenerator(root,
api_model,
api_defs,
type_generator,
cpp_namespace_pattern,
bundle_name,
src_path,
impl_dir)
if generator_name == 'cpp-bundle-registration':
generators = [
('generated_api_registration.cc',
cpp_bundle_generator.api_cc_generator),
('generated_api_registration.h', cpp_bundle_generator.api_h_generator),
]
elif generator_name == 'cpp-bundle-schema':
generators = [
('generated_schemas.cc', cpp_bundle_generator.schemas_cc_generator),
('generated_schemas.h', cpp_bundle_generator.schemas_h_generator)
]
elif generator_name == 'cpp':
cpp_generator = CppGenerator(type_generator)
generators = [
('%s.h' % filename_base, cpp_generator.h_generator),
('%s.cc' % filename_base, cpp_generator.cc_generator)
]
elif generator_name == 'externs':
generators = [
('%s_externs.js' % namespace.unix_name, JsExternsGenerator())
]
elif generator_name == 'interface':
generators = [
('%s_interface.js' % namespace.unix_name, JsInterfaceGenerator())
]
else:
raise Exception('Unrecognised generator %s' % generator_name)
output_code = []
for filename, generator in generators:
code = generator.Generate(namespace).Render()
if destdir:
if generator_name == 'cpp-bundle-registration':
# Function registrations must be output to impl_dir, since they link in
# API implementations.
output_dir = os.path.join(destdir, impl_dir)
else:
output_dir = os.path.join(destdir, src_path)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
with open(os.path.join(output_dir, filename), 'w') as f:
f.write(code)
# If multiple files are being output, add the filename for each file.
if len(generators) > 1:
output_code += [filename, '', code, '']
else:
output_code += [code]
return '\n'.join(output_code)
if __name__ == '__main__':
parser = optparse.OptionParser(
description='Generates a C++ model of an API from JSON schema',
usage='usage: %prog [option]... schema')
parser.add_option('-r', '--root', default='.',
help='logical include root directory. Path to schema files from specified'
' dir will be the include path.')
parser.add_option('-d', '--destdir',
help='root directory to output generated files.')
parser.add_option('-n', '--namespace', default='generated_api_schemas',
help='C++ namespace for generated files. e.g extensions::api.')
parser.add_option('-b', '--bundle-name', default='',
help='A string to prepend to generated bundle class names, so that '
'multiple bundle rules can be used without conflicting. '
'Only used with one of the cpp-bundle generators.')
parser.add_option('-g', '--generator', default=GENERATORS[0],
choices=GENERATORS,
help='The generator to use to build the output code. Supported values are'
' %s' % GENERATORS)
parser.add_option('-i', '--impl-dir', dest='impl_dir',
help='The root path of all API implementations')
parser.add_option('-I', '--include-rules',
help='A list of paths to include when searching for referenced objects,'
' with the namespace separated by a \':\'. Example: '
'/foo/bar:Foo::Bar::%(namespace)s')
(opts, file_paths) = parser.parse_args()
if not file_paths:
sys.exit(0) # This is OK as a no-op
# Unless in bundle mode, only one file should be specified.
if (opts.generator not in ('cpp-bundle-registration', 'cpp-bundle-schema') and
len(file_paths) > 1):
# TODO(sashab): Could also just use file_paths[0] here and not complain.
raise Exception(
"Unless in bundle mode, only one file can be specified at a time.")
def split_path_and_namespace(path_and_namespace):
if ':' not in path_and_namespace:
raise ValueError('Invalid include rule "%s". Rules must be of '
'the form path:namespace' % path_and_namespace)
return path_and_namespace.split(':', 1)
include_rules = []
if opts.include_rules:
include_rules = map(split_path_and_namespace,
shlex.split(opts.include_rules))
result = GenerateSchema(opts.generator, file_paths, opts.root, opts.destdir,
opts.namespace, opts.bundle_name, opts.impl_dir,
include_rules)
if not opts.destdir:
print result
| heke123/chromium-crosswalk | tools/json_schema_compiler/compiler.py | Python | bsd-3-clause | 8,474 |
Subsets and Splits