repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
SerCeMan/intellij-community
|
refs/heads/master
|
python/testData/paramInfo/BoundMethodReassigned.py
|
83
|
class A(object):
def foo(self, a, b):
pass
moo = foo
ff = A().moo
f = ff
f(<arg1>1, <arg2>2)
|
GauravSahu/odoo
|
refs/heads/8.0
|
openerp/report/render/odt2odt/odt2odt.py
|
443
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.report.render.rml2pdf import utils
import copy
class odt2odt(object):
def __init__(self, odt, localcontext):
self.localcontext = localcontext
self.etree = odt
self._node = None
def render(self):
def process_text(node,new_node):
for child in utils._child_get(node, self):
new_child = copy.deepcopy(child)
new_node.append(new_child)
if len(child):
for n in new_child:
new_child.text = utils._process_text(self, child.text)
new_child.tail = utils._process_text(self, child.tail)
new_child.remove(n)
process_text(child, new_child)
else:
new_child.text = utils._process_text(self, child.text)
new_child.tail = utils._process_text(self, child.tail)
self._node = copy.deepcopy(self.etree)
for n in self._node:
self._node.remove(n)
process_text(self.etree, self._node)
return self._node
def parseNode(node, localcontext = {}):
r = odt2odt(node, localcontext)
return r.render()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
owtf/owtf
|
refs/heads/develop
|
owtf/managers/url.py
|
1
|
"""
owtf.managers.url
~~~~~~~~~~~~~~~~~
The DB stores HTTP transactions, unique URLs and more.
"""
from owtf.db.session import get_count, get_scoped_session
from owtf.lib.exceptions import InvalidParameterType
from owtf.managers.target import is_url_in_scope, target_required
from owtf.models.url import Url
from owtf.utils.strings import str2bool
from owtf.settings import (
is_file_regex,
is_image_regex,
is_small_file_regex,
is_ssi_regex,
is_url_regex,
)
num_urls_before = 0
def is_regex_url(url, regexp):
""" Wrapper method to search URL for different properties based on regex
:param url: URL
:type url: `str`
:param regexp: Regular expression for the property
:type url: `str`
:return: True/False
:rtype: `bool`
"""
return len(regexp.findall(url)) > 0
def small_file_url(url):
""" Checks if small file url
:param url: URL
:type url: `str`
:return: True/False
:rtype: `bool`
"""
return is_regex_url(url, is_small_file_regex)
def file_url(url):
""" Checks if it is a file url
:param url: URL
:type url: `str`
:return: True/False
:rtype: `bool`
"""
return is_regex_url(url, is_file_regex)
def image_url(url):
""" Checks if it is an image url
:param url: URL
:type url: `str`
:return: True/False
:rtype: `bool`
"""
return is_regex_url(url, is_image_regex)
def ssi_url(url):
""" Checks if SSI url
:param url: URL
:type url: `str`
:return: True/False
:rtype: `bool`
"""
return is_regex_url(url, is_ssi_regex)
@target_required
def add_urls_to_db(session, url, visited, found=None, target_id=None):
"""Adds a URL to the DB
:param url: URL to be added
:type url: `str`
:param visited: Visited or not
:type visited: `bool`
:param found: True/False
:type found: `bool`
:param target_id: Target ID
:type target_id: `int`
:return: None
:rtype: None
"""
if is_url(url): # New URL
# Make sure URL is clean prior to saving in DB, nasty bugs
# can happen without this
url = url.strip()
scope = is_url_in_scope(url)
session.merge(Url(target_id=target_id, url=url, visited=visited, scope=scope))
session.commit()
def get_urls_to_visit():
"""Gets urls to visit for a target
:param target: Target
:type target: `str`
:return: List of not visited URLs
:rtype: `list`
"""
session = get_scoped_session()
urls = session.query(Url.url).filter_by(visited=False).all()
urls = [i[0] for i in urls]
return urls
def is_url(url):
"""Check if valid URL
:param url: URL
:type url: `str`
:return: True/False
:rtype: `bool`
"""
return is_regex_url(url, is_url_regex)
@target_required
def add_url(session, url, found=None, target_id=None):
"""Adds a URL to the relevant DBs if not already added
:param url: URL to be added
:type url: `str`
:param found: Visited or not
:type found: `bool`
:param target_id: target ID
:type target_id: `int`
:return: None
:rtype: None
"""
visited = False
if found is not None: # Visited URL -> Found in [ True, False ]
visited = True
return add_urls_to_db(session, url, visited, found=found, target_id=target_id)
@target_required
def import_processed_url(session, urls_list, target_id=None):
"""Imports a processed URL from the DB
:param urls_list: List of URLs
:type urls_list: `list`
:param target_id: Target ID
:type target_id: `int`
:return: None
:rtype: None
"""
for url, visited, scope in urls_list:
session.merge(Url(target_id=target_id, url=url, visited=visited, scope=scope))
session.commit()
@target_required
def import_urls(session, url_list, target_id=None):
"""Extracts and classifies all URLs passed. Expects a newline separated
URL list
:param url_list: List of urls
:type url_list: `list`
:param target_id: target ID
:type target_id: `int`
:return: List of imported URLS
:rtype: `list`
"""
imported_urls = []
for url in url_list:
if is_url(url):
imported_urls.append(url)
session.merge(Url(url=url, target_id=target_id))
session.commit()
return imported_urls # Return imported urls
def url_gen_query(session, criteria, target_id, for_stats=False):
"""Generate query based on criteria and target ID
:param criteria: Filter criteria
:type criteria: `dict`
:param target_id: Target ID
:type target_id: `int`
:param for_stats: True/False
:type for_stats: `bool`
:return:
:rtype:
"""
query = session.query(Url).filter_by(target_id=target_id)
# Check if criteria is url search
if criteria.get("search", None):
if criteria.get("url", None):
if isinstance(criteria.get("url"), list):
criteria["url"] = criteria["url"][0]
query = query.filter(Url.url.like("%%{!s}%%".format(criteria["url"])))
else: # If not search
if criteria.get("url", None):
if isinstance(criteria.get("url"), str):
query = query.filter_by(url=criteria["url"])
if isinstance(criteria.get("url"), list):
query = query.filter(Url.url.in_(criteria["url"]))
# For the following section doesn't matter if filter/search because
# it doesn't make sense to search in a boolean column :P
if criteria.get("visited", None):
if isinstance(criteria.get("visited"), list):
criteria["visited"] = criteria["visited"][0]
query = query.filter_by(visited=str2bool(criteria["visited"]))
if criteria.get("scope", None):
if isinstance(criteria.get("scope"), list):
criteria["scope"] = criteria["scope"][0]
query = query.filter_by(scope=str2bool(criteria["scope"]))
if not for_stats: # Query for stats can't have limit and offset
try:
if criteria.get("offset", None):
if isinstance(criteria.get("offset"), list):
criteria["offset"] = criteria["offset"][0]
query = query.offset(int(criteria["offset"]))
if criteria.get("limit", None):
if isinstance(criteria.get("limit"), list):
criteria["limit"] = criteria["limit"][0]
query = query.limit(int(criteria["limit"]))
except ValueError:
raise InvalidParameterType("Invalid parameter type for transaction db")
return query
@target_required
def get_all_urls(session, criteria, target_id=None):
"""Get all URLs based on criteria and target ID
:param criteria: Filter criteria
:type criteria: `dict`
:param target_id: Target ID
:type target_id: `int`
:return: List of URL dicts
:rtype: `list`
"""
query = url_gen_query(session, criteria, target_id)
results = query.all()
return [url_obj.to_dict() for url_obj in results]
@target_required
def search_all_urls(session, criteria, target_id=None):
"""Search all URLs based on criteria and target ID
.. note::
Three things needed
+ Total number of urls
+ Filtered url
+ Filtered number of url
:param criteria: Filter criteria
:type criteria: `dict`
:param target_id: Target ID
:type target_id: `int`
:return: Search result dict
:rtype: `dict`
"""
total = get_count(session.query(Url).filter_by(target_id=target_id))
filtered_url_objs = url_gen_query(session, criteria, target_id).all()
filtered_number = get_count(
url_gen_query(session, criteria, target_id, for_stats=True)
)
results = {
"records_total": total,
"records_filtered": filtered_number,
"data": [url_obj.to_dict() for url_obj in filtered_url_objs],
}
return results
|
rwl/PyCIM
|
refs/heads/master
|
CIM14/ENTSOE/Dynamics/IEC61970/Dynamics/DynamicsMetaBlockInputReference.py
|
1
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.ENTSOE.Dynamics.IEC61970.Core.CoreIdentifiedObject import CoreIdentifiedObject
class DynamicsMetaBlockInputReference(CoreIdentifiedObject):
def __init__(self, MemberOf_MetaBlockReference=None, MetaBlockSignal=None, MetaBlockConnectable=None, StandardControlBlock_MetaBlockConnectable=None, *args, **kw_args):
"""Initialises a new 'DynamicsMetaBlockInputReference' instance.
@param MemberOf_MetaBlockReference:
@param MetaBlockSignal:
@param MetaBlockConnectable:
@param StandardControlBlock_MetaBlockConnectable:
"""
self._MemberOf_MetaBlockReference = None
self.MemberOf_MetaBlockReference = MemberOf_MetaBlockReference
self._MetaBlockSignal = None
self.MetaBlockSignal = MetaBlockSignal
self._MetaBlockConnectable = None
self.MetaBlockConnectable = MetaBlockConnectable
self._StandardControlBlock_MetaBlockConnectable = None
self.StandardControlBlock_MetaBlockConnectable = StandardControlBlock_MetaBlockConnectable
super(DynamicsMetaBlockInputReference, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["MemberOf_MetaBlockReference", "MetaBlockSignal", "MetaBlockConnectable", "StandardControlBlock_MetaBlockConnectable"]
_many_refs = []
def getMemberOf_MetaBlockReference(self):
return self._MemberOf_MetaBlockReference
def setMemberOf_MetaBlockReference(self, value):
if self._MemberOf_MetaBlockReference is not None:
filtered = [x for x in self.MemberOf_MetaBlockReference.MetaBlockInputReference if x != self]
self._MemberOf_MetaBlockReference._MetaBlockInputReference = filtered
self._MemberOf_MetaBlockReference = value
if self._MemberOf_MetaBlockReference is not None:
if self not in self._MemberOf_MetaBlockReference._MetaBlockInputReference:
self._MemberOf_MetaBlockReference._MetaBlockInputReference.append(self)
MemberOf_MetaBlockReference = property(getMemberOf_MetaBlockReference, setMemberOf_MetaBlockReference)
def getMetaBlockSignal(self):
"""
"""
return self._MetaBlockSignal
def setMetaBlockSignal(self, value):
if self._MetaBlockSignal is not None:
self._MetaBlockSignal._From = None
self._MetaBlockSignal = value
if self._MetaBlockSignal is not None:
self._MetaBlockSignal.From = None
self._MetaBlockSignal._From = self
MetaBlockSignal = property(getMetaBlockSignal, setMetaBlockSignal)
def getMetaBlockConnectable(self):
return self._MetaBlockConnectable
def setMetaBlockConnectable(self, value):
if self._MetaBlockConnectable is not None:
filtered = [x for x in self.MetaBlockConnectable.MetaBlockInputReference if x != self]
self._MetaBlockConnectable._MetaBlockInputReference = filtered
self._MetaBlockConnectable = value
if self._MetaBlockConnectable is not None:
if self not in self._MetaBlockConnectable._MetaBlockInputReference:
self._MetaBlockConnectable._MetaBlockInputReference.append(self)
MetaBlockConnectable = property(getMetaBlockConnectable, setMetaBlockConnectable)
def getStandardControlBlock_MetaBlockConnectable(self):
return self._StandardControlBlock_MetaBlockConnectable
def setStandardControlBlock_MetaBlockConnectable(self, value):
if self._StandardControlBlock_MetaBlockConnectable is not None:
filtered = [x for x in self.StandardControlBlock_MetaBlockConnectable.StandardControlBlock_MetaBlockInputReference if x != self]
self._StandardControlBlock_MetaBlockConnectable._StandardControlBlock_MetaBlockInputReference = filtered
self._StandardControlBlock_MetaBlockConnectable = value
if self._StandardControlBlock_MetaBlockConnectable is not None:
if self not in self._StandardControlBlock_MetaBlockConnectable._StandardControlBlock_MetaBlockInputReference:
self._StandardControlBlock_MetaBlockConnectable._StandardControlBlock_MetaBlockInputReference.append(self)
StandardControlBlock_MetaBlockConnectable = property(getStandardControlBlock_MetaBlockConnectable, setStandardControlBlock_MetaBlockConnectable)
|
pk-sam/crosswalk-test-suite
|
refs/heads/master
|
webapi/webapi-rawsockets-w3c-tests/inst.xpk.py
|
357
|
#!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
import string
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PKG_NAME = os.path.basename(SCRIPT_DIR)
PARAMETERS = None
#XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/5000/dbus/user_bus_socket"
SRC_DIR = ""
PKG_SRC_DIR = ""
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code != None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def updateCMD(cmd=None):
if "pkgcmd" in cmd:
cmd = "su - %s -c '%s;%s'" % (PARAMETERS.user, XW_ENV, cmd)
return cmd
def getUSERID():
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell id -u %s" % (
PARAMETERS.device, PARAMETERS.user)
else:
cmd = "ssh %s \"id -u %s\"" % (
PARAMETERS.device, PARAMETERS.user )
return doCMD(cmd)
def getPKGID(pkg_name=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
else:
cmd = "ssh %s \"%s\"" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
(return_code, output) = doCMD(cmd)
if return_code != 0:
return None
test_pkg_id = None
for line in output:
if line.find("[" + pkg_name + "]") != -1:
pkgidIndex = line.split().index("pkgid")
test_pkg_id = line.split()[pkgidIndex+1].strip("[]")
break
return test_pkg_id
def doRemoteCMD(cmd=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (PARAMETERS.device, updateCMD(cmd))
else:
cmd = "ssh %s \"%s\"" % (PARAMETERS.device, updateCMD(cmd))
return doCMD(cmd)
def doRemoteCopy(src=None, dest=None):
if PARAMETERS.mode == "SDB":
cmd_prefix = "sdb -s %s push" % PARAMETERS.device
cmd = "%s %s %s" % (cmd_prefix, src, dest)
else:
cmd = "scp -r %s %s:/%s" % (src, PARAMETERS.device, dest)
(return_code, output) = doCMD(cmd)
doRemoteCMD("sync")
if return_code != 0:
return True
else:
return False
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".xpk"):
pkg_id = getPKGID(os.path.basename(os.path.splitext(file)[0]))
if not pkg_id:
action_status = False
continue
(return_code, output) = doRemoteCMD(
"pkgcmd -u -t xpk -q -n %s" % pkg_id)
for line in output:
if "Failure" in line:
action_status = False
break
(return_code, output) = doRemoteCMD(
"rm -rf %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
return action_status
def instPKGs():
action_status = True
(return_code, output) = doRemoteCMD(
"mkdir -p %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".xpk"):
if not doRemoteCopy(os.path.join(root, file), "%s/%s" % (SRC_DIR, file)):
action_status = False
(return_code, output) = doRemoteCMD(
"pkgcmd -i -t xpk -q -p %s/%s" % (SRC_DIR, file))
doRemoteCMD("rm -rf %s/%s" % (SRC_DIR, file))
for line in output:
if "Failure" in line:
action_status = False
break
# Do some special copy/delete... steps
'''
(return_code, output) = doRemoteCMD(
"mkdir -p %s/tests" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
if not doRemoteCopy("specname/tests", "%s/tests" % PKG_SRC_DIR):
action_status = False
'''
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-m", dest="mode", action="store", help="Specify mode")
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
opts_parser.add_option(
"-a", dest="user", action="store", help="User name")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception, e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.user:
PARAMETERS.user = "app"
global SRC_DIR, PKG_SRC_DIR
SRC_DIR = "/home/%s/content" % PARAMETERS.user
PKG_SRC_DIR = "%s/tct/opt/%s" % (SRC_DIR, PKG_NAME)
if not PARAMETERS.mode:
PARAMETERS.mode = "SDB"
if PARAMETERS.mode == "SDB":
if not PARAMETERS.device:
(return_code, output) = doCMD("sdb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
else:
PARAMETERS.mode = "SSH"
if not PARAMETERS.device:
print "No device provided"
sys.exit(1)
user_info = getUSERID()
re_code = user_info[0]
if re_code == 0 :
global XW_ENV
userid = user_info[1][0]
XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/%s/dbus/user_bus_socket"%str(userid)
else:
print "[Error] cmd commands error : %s"%str(user_info[1])
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
|
funkypawz/MakerRobot
|
refs/heads/master
|
telegram/inlinequeryresultcachedphoto.py
|
2
|
#!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2016
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains the classes that represent Telegram
InlineQueryResultPhoto"""
from telegram import InlineQueryResult, InlineKeyboardMarkup, InputMessageContent
class InlineQueryResultCachedPhoto(InlineQueryResult):
def __init__(self,
id,
photo_file_id,
title=None,
description=None,
caption=None,
reply_markup=None,
input_message_content=None,
**kwargs):
# Required
super(InlineQueryResultCachedPhoto, self).__init__('photo', id)
self.photo_file_id = photo_file_id
# Optionals
if title:
self.title = title
if description:
self.description = description
if caption:
self.caption = caption
if reply_markup:
self.reply_markup = reply_markup
if input_message_content:
self.input_message_content = input_message_content
@staticmethod
def de_json(data):
data = super(InlineQueryResultCachedPhoto, InlineQueryResultCachedPhoto).de_json(data)
data['reply_markup'] = InlineKeyboardMarkup.de_json(data.get('reply_markup'))
data['input_message_content'] = InputMessageContent.de_json(data.get(
'input_message_content'))
return InlineQueryResultCachedPhoto(**data)
|
blacklin/kbengine
|
refs/heads/master
|
kbe/src/lib/python/Lib/antigravity.py
|
917
|
import webbrowser
import hashlib
webbrowser.open("http://xkcd.com/353/")
def geohash(latitude, longitude, datedow):
'''Compute geohash() using the Munroe algorithm.
>>> geohash(37.421542, -122.085589, b'2005-05-26-10458.68')
37.857713 -122.544543
'''
# http://xkcd.com/426/
h = hashlib.md5(datedow).hexdigest()
p, q = [('%f' % float.fromhex('0.' + x)) for x in (h[:16], h[16:32])]
print('%d%s %d%s' % (latitude, p[1:], longitude, q[1:]))
|
smiller171/ansible
|
refs/heads/devel
|
test/units/plugins/connections/__init__.py
|
7690
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
pdufour/sqlalchemy
|
refs/heads/master
|
test/sql/test_labels.py
|
27
|
from sqlalchemy import exc as exceptions, select, MetaData, Integer, or_, \
bindparam
from sqlalchemy.engine import default
from sqlalchemy.sql import table, column
from sqlalchemy.sql.elements import _truncated_label
from sqlalchemy.testing import AssertsCompiledSQL, assert_raises, engines,\
fixtures, eq_
from sqlalchemy.testing.schema import Table, Column
IDENT_LENGTH = 29
class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'DefaultDialect'
table1 = table('some_large_named_table',
column('this_is_the_primarykey_column'),
column('this_is_the_data_column')
)
table2 = table('table_with_exactly_29_characs',
column('this_is_the_primarykey_column'),
column('this_is_the_data_column')
)
def _length_fixture(self, length=IDENT_LENGTH, positional=False):
dialect = default.DefaultDialect()
dialect.max_identifier_length = length
if positional:
dialect.paramstyle = 'format'
dialect.positional = True
return dialect
def _engine_fixture(self, length=IDENT_LENGTH):
eng = engines.testing_engine()
eng.dialect.max_identifier_length = length
return eng
def test_table_alias_1(self):
self.assert_compile(
self.table2.alias().select(),
'SELECT '
'table_with_exactly_29_c_1.'
'this_is_the_primarykey_column, '
'table_with_exactly_29_c_1.this_is_the_data_column '
'FROM '
'table_with_exactly_29_characs '
'AS table_with_exactly_29_c_1',
dialect=self._length_fixture()
)
def test_table_alias_2(self):
table1 = self.table1
table2 = self.table2
ta = table2.alias()
on = table1.c.this_is_the_data_column == ta.c.this_is_the_data_column
self.assert_compile(
select([table1, ta]).select_from(table1.join(ta, on)).
where(ta.c.this_is_the_data_column == 'data3'),
'SELECT '
'some_large_named_table.this_is_the_primarykey_column, '
'some_large_named_table.this_is_the_data_column, '
'table_with_exactly_29_c_1.this_is_the_primarykey_column, '
'table_with_exactly_29_c_1.this_is_the_data_column '
'FROM '
'some_large_named_table '
'JOIN '
'table_with_exactly_29_characs '
'AS '
'table_with_exactly_29_c_1 '
'ON '
'some_large_named_table.this_is_the_data_column = '
'table_with_exactly_29_c_1.this_is_the_data_column '
'WHERE '
'table_with_exactly_29_c_1.this_is_the_data_column = '
':this_is_the_data_column_1',
dialect=self._length_fixture()
)
def test_too_long_name_disallowed(self):
m = MetaData()
t = Table('this_name_is_too_long_for_what_were_doing_in_this_test',
m, Column('foo', Integer))
eng = self._engine_fixture()
methods = (t.create, t.drop, m.create_all, m.drop_all)
for meth in methods:
assert_raises(exceptions.IdentifierError, meth, eng)
def _assert_labeled_table1_select(self, s):
table1 = self.table1
compiled = s.compile(dialect=self._length_fixture())
assert set(compiled._create_result_map()['some_large_named_table__2'][1]).\
issuperset(
[
'some_large_named_table_this_is_the_data_column',
'some_large_named_table__2',
table1.c.this_is_the_data_column
]
)
assert set(compiled._create_result_map()['some_large_named_table__1'][1]).\
issuperset(
[
'some_large_named_table_this_is_the_primarykey_column',
'some_large_named_table__1',
table1.c.this_is_the_primarykey_column
]
)
def test_result_map_use_labels(self):
table1 = self.table1
s = table1.select().apply_labels().\
order_by(table1.c.this_is_the_primarykey_column)
self._assert_labeled_table1_select(s)
def test_result_map_limit(self):
table1 = self.table1
# some dialects such as oracle (and possibly ms-sql in a future
# version) generate a subquery for limits/offsets. ensure that the
# generated result map corresponds to the selected table, not the
# select query
s = table1.select(use_labels=True,
order_by=[table1.c.this_is_the_primarykey_column]).\
limit(2)
self._assert_labeled_table1_select(s)
def test_result_map_subquery(self):
table1 = self.table1
s = table1.select(
table1.c.this_is_the_primarykey_column == 4).\
alias('foo')
s2 = select([s])
compiled = s2.compile(dialect=self._length_fixture())
assert \
set(compiled._create_result_map()['this_is_the_data_column'][1]).\
issuperset(['this_is_the_data_column',
s.c.this_is_the_data_column])
assert \
set(compiled._create_result_map()['this_is_the_primarykey__1'][1]).\
issuperset(['this_is_the_primarykey_column',
'this_is_the_primarykey__1',
s.c.this_is_the_primarykey_column])
def test_result_map_anon_alias(self):
table1 = self.table1
dialect = self._length_fixture()
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias()
s = select([q]).apply_labels()
self.assert_compile(
s,
"SELECT "
"anon_1.this_is_the_primarykey__2 AS anon_1_this_is_the_prim_1, "
"anon_1.this_is_the_data_column AS anon_1_this_is_the_data_3 "
"FROM ("
"SELECT "
"some_large_named_table."
"this_is_the_primarykey_column AS this_is_the_primarykey__2, "
"some_large_named_table."
"this_is_the_data_column AS this_is_the_data_column "
"FROM "
"some_large_named_table "
"WHERE "
"some_large_named_table.this_is_the_primarykey_column "
"= :this_is_the_primarykey__1"
") "
"AS anon_1", dialect=dialect)
compiled = s.compile(dialect=dialect)
assert set(compiled._create_result_map()['anon_1_this_is_the_data_3'][1]).\
issuperset([
'anon_1_this_is_the_data_3',
q.corresponding_column(
table1.c.this_is_the_data_column)
])
assert set(compiled._create_result_map()['anon_1_this_is_the_prim_1'][1]).\
issuperset([
'anon_1_this_is_the_prim_1',
q.corresponding_column(
table1.c.this_is_the_primarykey_column)
])
def test_column_bind_labels_1(self):
table1 = self.table1
s = table1.select(table1.c.this_is_the_primarykey_column == 4)
self.assert_compile(
s,
"SELECT some_large_named_table.this_is_the_primarykey_column, "
"some_large_named_table.this_is_the_data_column "
"FROM some_large_named_table WHERE "
"some_large_named_table.this_is_the_primarykey_column = "
":this_is_the_primarykey__1",
checkparams={'this_is_the_primarykey__1': 4},
dialect=self._length_fixture()
)
self.assert_compile(
s,
"SELECT some_large_named_table.this_is_the_primarykey_column, "
"some_large_named_table.this_is_the_data_column "
"FROM some_large_named_table WHERE "
"some_large_named_table.this_is_the_primarykey_column = "
"%s",
checkpositional=(4, ),
checkparams={'this_is_the_primarykey__1': 4},
dialect=self._length_fixture(positional=True)
)
def test_column_bind_labels_2(self):
table1 = self.table1
s = table1.select(or_(
table1.c.this_is_the_primarykey_column == 4,
table1.c.this_is_the_primarykey_column == 2
))
self.assert_compile(
s,
"SELECT some_large_named_table.this_is_the_primarykey_column, "
"some_large_named_table.this_is_the_data_column "
"FROM some_large_named_table WHERE "
"some_large_named_table.this_is_the_primarykey_column = "
":this_is_the_primarykey__1 OR "
"some_large_named_table.this_is_the_primarykey_column = "
":this_is_the_primarykey__2",
checkparams={
'this_is_the_primarykey__1': 4,
'this_is_the_primarykey__2': 2
},
dialect=self._length_fixture()
)
self.assert_compile(
s,
"SELECT some_large_named_table.this_is_the_primarykey_column, "
"some_large_named_table.this_is_the_data_column "
"FROM some_large_named_table WHERE "
"some_large_named_table.this_is_the_primarykey_column = "
"%s OR "
"some_large_named_table.this_is_the_primarykey_column = "
"%s",
checkparams={
'this_is_the_primarykey__1': 4,
'this_is_the_primarykey__2': 2
},
checkpositional=(4, 2),
dialect=self._length_fixture(positional=True)
)
def test_bind_param_non_truncated(self):
table1 = self.table1
stmt = table1.insert().values(
this_is_the_data_column=
bindparam("this_is_the_long_bindparam_name")
)
compiled = stmt.compile(dialect=self._length_fixture(length=10))
eq_(
compiled.construct_params(
params={"this_is_the_long_bindparam_name": 5}),
{'this_is_the_long_bindparam_name': 5}
)
def test_bind_param_truncated_named(self):
table1 = self.table1
bp = bindparam(_truncated_label("this_is_the_long_bindparam_name"))
stmt = table1.insert().values(
this_is_the_data_column=bp
)
compiled = stmt.compile(dialect=self._length_fixture(length=10))
eq_(
compiled.construct_params(params={
"this_is_the_long_bindparam_name": 5}),
{"this_1": 5}
)
def test_bind_param_truncated_positional(self):
table1 = self.table1
bp = bindparam(_truncated_label("this_is_the_long_bindparam_name"))
stmt = table1.insert().values(
this_is_the_data_column=bp
)
compiled = stmt.compile(
dialect=self._length_fixture(length=10, positional=True))
eq_(
compiled.construct_params(params={
"this_is_the_long_bindparam_name": 5}),
{"this_1": 5}
)
class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'DefaultDialect'
table1 = table('some_large_named_table',
column('this_is_the_primarykey_column'),
column('this_is_the_data_column')
)
table2 = table('table_with_exactly_29_characs',
column('this_is_the_primarykey_column'),
column('this_is_the_data_column')
)
def test_adjustable_1(self):
table1 = self.table1
q = table1.select(
table1.c.this_is_the_primarykey_column == 4).alias('foo')
x = select([q])
compile_dialect = default.DefaultDialect(label_length=10)
self.assert_compile(
x, 'SELECT '
'foo.this_1, foo.this_2 '
'FROM ('
'SELECT '
'some_large_named_table.this_is_the_primarykey_column '
'AS this_1, '
'some_large_named_table.this_is_the_data_column '
'AS this_2 '
'FROM '
'some_large_named_table '
'WHERE '
'some_large_named_table.this_is_the_primarykey_column '
'= :this_1'
') '
'AS foo', dialect=compile_dialect)
def test_adjustable_2(self):
table1 = self.table1
q = table1.select(
table1.c.this_is_the_primarykey_column == 4).alias('foo')
x = select([q])
compile_dialect = default.DefaultDialect(label_length=10)
self.assert_compile(
x, 'SELECT '
'foo.this_1, foo.this_2 '
'FROM ('
'SELECT '
'some_large_named_table.this_is_the_primarykey_column '
'AS this_1, '
'some_large_named_table.this_is_the_data_column '
'AS this_2 '
'FROM '
'some_large_named_table '
'WHERE '
'some_large_named_table.this_is_the_primarykey_column '
'= :this_1'
') '
'AS foo', dialect=compile_dialect)
def test_adjustable_3(self):
table1 = self.table1
compile_dialect = default.DefaultDialect(label_length=4)
q = table1.select(
table1.c.this_is_the_primarykey_column == 4).alias('foo')
x = select([q])
self.assert_compile(
x, 'SELECT '
'foo._1, foo._2 '
'FROM ('
'SELECT '
'some_large_named_table.this_is_the_primarykey_column '
'AS _1, '
'some_large_named_table.this_is_the_data_column '
'AS _2 '
'FROM '
'some_large_named_table '
'WHERE '
'some_large_named_table.this_is_the_primarykey_column '
'= :_1'
') '
'AS foo', dialect=compile_dialect)
def test_adjustable_4(self):
table1 = self.table1
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias()
x = select([q], use_labels=True)
compile_dialect = default.DefaultDialect(label_length=10)
self.assert_compile(
x, 'SELECT '
'anon_1.this_2 AS anon_1, '
'anon_1.this_4 AS anon_3 '
'FROM ('
'SELECT '
'some_large_named_table.this_is_the_primarykey_column '
'AS this_2, '
'some_large_named_table.this_is_the_data_column '
'AS this_4 '
'FROM '
'some_large_named_table '
'WHERE '
'some_large_named_table.this_is_the_primarykey_column '
'= :this_1'
') '
'AS anon_1', dialect=compile_dialect)
def test_adjustable_5(self):
table1 = self.table1
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias()
x = select([q], use_labels=True)
compile_dialect = default.DefaultDialect(label_length=4)
self.assert_compile(
x, 'SELECT '
'_1._2 AS _1, '
'_1._4 AS _3 '
'FROM ('
'SELECT '
'some_large_named_table.this_is_the_primarykey_column '
'AS _2, '
'some_large_named_table.this_is_the_data_column '
'AS _4 '
'FROM '
'some_large_named_table '
'WHERE '
'some_large_named_table.this_is_the_primarykey_column '
'= :_1'
') '
'AS _1', dialect=compile_dialect)
def test_adjustable_result_schema_column_1(self):
table1 = self.table1
q = table1.select(
table1.c.this_is_the_primarykey_column == 4).apply_labels().\
alias('foo')
dialect = default.DefaultDialect(label_length=10)
compiled = q.compile(dialect=dialect)
assert set(compiled._create_result_map()['some_2'][1]).issuperset([
table1.c.this_is_the_data_column,
'some_large_named_table_this_is_the_data_column',
'some_2'
])
assert set(compiled._create_result_map()['some_1'][1]).issuperset([
table1.c.this_is_the_primarykey_column,
'some_large_named_table_this_is_the_primarykey_column',
'some_1'
])
def test_adjustable_result_schema_column_2(self):
table1 = self.table1
q = table1.select(
table1.c.this_is_the_primarykey_column == 4).alias('foo')
x = select([q])
dialect = default.DefaultDialect(label_length=10)
compiled = x.compile(dialect=dialect)
assert set(compiled._create_result_map()['this_2'][1]).issuperset([
q.corresponding_column(table1.c.this_is_the_data_column),
'this_is_the_data_column',
'this_2'])
assert set(compiled._create_result_map()['this_1'][1]).issuperset([
q.corresponding_column(table1.c.this_is_the_primarykey_column),
'this_is_the_primarykey_column',
'this_1'])
def test_table_plus_column_exceeds_length(self):
"""test that the truncation only occurs when tablename + colname are
concatenated, if they are individually under the label length.
"""
compile_dialect = default.DefaultDialect(label_length=30)
a_table = table(
'thirty_characters_table_xxxxxx',
column('id')
)
other_table = table(
'other_thirty_characters_table_',
column('id'),
column('thirty_characters_table_id')
)
anon = a_table.alias()
j1 = other_table.outerjoin(
anon,
anon.c.id == other_table.c.thirty_characters_table_id)
self.assert_compile(
select([other_table, anon]).
select_from(j1).apply_labels(),
'SELECT '
'other_thirty_characters_table_.id '
'AS other_thirty_characters__1, '
'other_thirty_characters_table_.thirty_characters_table_id '
'AS other_thirty_characters__2, '
'thirty_characters_table__1.id '
'AS thirty_characters_table__3 '
'FROM '
'other_thirty_characters_table_ '
'LEFT OUTER JOIN '
'thirty_characters_table_xxxxxx AS thirty_characters_table__1 '
'ON thirty_characters_table__1.id = '
'other_thirty_characters_table_.thirty_characters_table_id',
dialect=compile_dialect)
def test_colnames_longer_than_labels_lowercase(self):
t1 = table('a', column('abcde'))
self._test_colnames_longer_than_labels(t1)
def test_colnames_longer_than_labels_uppercase(self):
m = MetaData()
t1 = Table('a', m, Column('abcde', Integer))
self._test_colnames_longer_than_labels(t1)
def _test_colnames_longer_than_labels(self, t1):
dialect = default.DefaultDialect(label_length=4)
a1 = t1.alias(name='asdf')
# 'abcde' is longer than 4, but rendered as itself
# needs to have all characters
s = select([a1])
self.assert_compile(select([a1]),
'SELECT asdf.abcde FROM a AS asdf',
dialect=dialect)
compiled = s.compile(dialect=dialect)
assert set(compiled._create_result_map()['abcde'][1]).issuperset([
'abcde', a1.c.abcde, 'abcde'])
# column still there, but short label
s = select([a1]).apply_labels()
self.assert_compile(s,
'SELECT asdf.abcde AS _1 FROM a AS asdf',
dialect=dialect)
compiled = s.compile(dialect=dialect)
assert set(compiled._create_result_map()['_1'][1]).issuperset([
'asdf_abcde', a1.c.abcde, '_1'])
def test_label_overlap_unlabeled(self):
"""test that an anon col can't overlap with a fixed name, #3396"""
table1 = table(
"tablename", column('columnname_one'), column('columnn_1'))
stmt = select([table1]).apply_labels()
dialect = default.DefaultDialect(label_length=23)
self.assert_compile(
stmt,
"SELECT tablename.columnname_one AS tablename_columnn_1, "
"tablename.columnn_1 AS tablename_columnn_2 FROM tablename",
dialect=dialect
)
compiled = stmt.compile(dialect=dialect)
eq_(
set(compiled._create_result_map()),
set(['tablename_columnn_1', 'tablename_columnn_2'])
)
|
peregrinius/web2py-liscio
|
refs/heads/master
|
languages/ro.py
|
162
|
# coding: utf8
{
'!=': '!=',
'!langcode!': 'ro',
'!langname!': 'Română',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" (actualizează) este o expresie opțională precum "câmp1=\'valoare_nouă\'". Nu puteți actualiza sau șterge rezultatele unui JOIN',
'%(nrows)s records found': '%(nrows)s înregistrări găsite',
'%d days ago': '%d days ago',
'%d weeks ago': '%d weeks ago',
'%s %%{row} deleted': '%s linii șterse',
'%s %%{row} updated': '%s linii actualizate',
'%s selected': '%s selectat(e)',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(something like "it-it")': '(ceva ce seamănă cu "it-it")',
'1 day ago': '1 day ago',
'1 week ago': '1 week ago',
'<': '<',
'<=': '<=',
'=': '=',
'>': '>',
'>=': '>=',
'A new version of web2py is available': 'O nouă versiune de web2py este disponibilă',
'A new version of web2py is available: %s': 'O nouă versiune de web2py este disponibilă: %s',
'About': 'Despre',
'about': 'despre',
'About application': 'Despre aplicație',
'Access Control': 'Control acces',
'Add': 'Adaugă',
'additional code for your application': 'cod suplimentar pentru aplicația dvs.',
'admin disabled because no admin password': 'administrare dezactivată deoarece parola de administrator nu a fost furnizată',
'admin disabled because not supported on google app engine': 'administrare dezactivată deoarece funcționalitatea nu e suportat pe Google App Engine',
'admin disabled because unable to access password file': 'administrare dezactivată deoarece nu există acces la fișierul cu parole',
'Admin is disabled because insecure channel': 'Adminstrarea este dezactivată deoarece conexiunea nu este sigură',
'Admin is disabled because unsecure channel': 'Administrarea este dezactivată deoarece conexiunea nu este securizată',
'Administration': 'Administrare',
'Administrative Interface': 'Interfață administrare',
'Administrator Password:': 'Parolă administrator:',
'Ajax Recipes': 'Rețete Ajax',
'And': 'Și',
'and rename it (required):': 'și renumiți (obligatoriu):',
'and rename it:': ' și renumiți:',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'appadmin dezactivat deoarece conexiunea nu e sigură',
'application "%s" uninstalled': 'aplicația "%s" a fost dezinstalată',
'application compiled': 'aplicația a fost compilată',
'application is compiled and cannot be designed': 'aplicația este compilată și nu poate fi editată',
'Are you sure you want to delete file "%s"?': 'Sigur ștergeți fișierul "%s"?',
'Are you sure you want to delete this object?': 'Sigur ștergeți acest obiect?',
'Are you sure you want to uninstall application "%s"': 'Sigur dezinstalați aplicația "%s"',
'Are you sure you want to uninstall application "%s"?': 'Sigur dezinstalați aplicația "%s"?',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENȚIE: Nu vă puteți conecta decât utilizând o conexiune securizată (HTTPS) sau rulând aplicația pe computerul local.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENȚIE: Nu puteți efectua mai multe teste o dată deoarece lansarea în execuție a mai multor subpocese nu este sigură.',
'ATTENTION: you cannot edit the running application!': 'ATENȚIE: nu puteți edita o aplicație în curs de execuție!',
'Authentication': 'Autentificare',
'Available Databases and Tables': 'Baze de date și tabele disponibile',
'Back': 'Înapoi',
'Buy this book': 'Cumpără această carte',
'Cache': 'Cache',
'cache': 'cache',
'Cache Keys': 'Chei cache',
'cache, errors and sessions cleaned': 'cache, erori și sesiuni golite',
'Cannot be empty': 'Nu poate fi vid',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Compilare imposibilă: aplicația conține erori. Debogați aplicația și încercați din nou.',
'cannot create file': 'fișier imposibil de creat',
'cannot upload file "%(filename)s"': 'imposibil de încărcat fișierul "%(filename)s"',
'Change Password': 'Schimbare parolă',
'Change password': 'Schimbare parolă',
'change password': 'schimbare parolă',
'check all': 'coșați tot',
'Check to delete': 'Coșați pentru a șterge',
'clean': 'golire',
'Clear': 'Golește',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'click to check for upgrades': 'Clic pentru a verifica dacă există upgrade-uri',
'Client IP': 'IP client',
'Community': 'Comunitate',
'compile': 'compilare',
'compiled application removed': 'aplicația compilată a fost ștearsă',
'Components and Plugins': 'Componente și plugin-uri',
'contains': 'conține',
'Controller': 'Controlor',
'Controllers': 'Controlori',
'controllers': 'controlori',
'Copyright': 'Drepturi de autor',
'create file with filename:': 'crează fișier cu numele:',
'Create new application': 'Creați aplicație nouă',
'create new application:': 'crează aplicație nouă:',
'crontab': 'crontab',
'Current request': 'Cerere curentă',
'Current response': 'Răspuns curent',
'Current session': 'Sesiune curentă',
'currently saved or': 'în prezent salvat sau',
'customize me!': 'Personalizează-mă!',
'data uploaded': 'date încărcate',
'Database': 'bază de date',
'Database %s select': 'selectare bază de date %s',
'database administration': 'administrare bază de date',
'Date and Time': 'Data și ora',
'db': 'db',
'DB Model': 'Model bază de date',
'defines tables': 'definire tabele',
'Delete': 'Șterge',
'delete': 'șterge',
'delete all checked': 'șterge tot ce e coșat',
'Delete:': 'Șterge:',
'Demo': 'Demo',
'Deploy on Google App Engine': 'Instalare pe Google App Engine',
'Deployment Recipes': 'Rețete de instalare',
'Description': 'Descriere',
'design': 'design',
'DESIGN': 'DESIGN',
'Design for': 'Design pentru',
'DISK': 'DISK',
'Disk Cache Keys': 'Chei cache de disc',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentație',
"Don't know what to do?": 'Nu știți ce să faceți?',
'done!': 'gata!',
'Download': 'Descărcare',
'E-mail': 'E-mail',
'E-mail invalid': 'E-mail invalid',
'edit': 'editare',
'EDIT': 'EDITARE',
'Edit': 'Editare',
'Edit application': 'Editare aplicație',
'edit controller': 'editare controlor',
'Edit current record': 'Editare înregistrare curentă',
'Edit Profile': 'Editare profil',
'edit profile': 'editare profil',
'Edit This App': 'Editați această aplicație',
'Editing file': 'Editare fișier',
'Editing file "%s"': 'Editare fișier "%s"',
'Email and SMS': 'E-mail și SMS',
'enter a number between %(min)g and %(max)g': 'introduceți un număr între %(min)g și %(max)g',
'enter an integer between %(min)g and %(max)g': 'introduceți un întreg între %(min)g și %(max)g',
'Error logs for "%(app)s"': 'Log erori pentru "%(app)s"',
'errors': 'erori',
'Errors': 'Erori',
'Export': 'Export',
'export as csv file': 'exportă ca fișier csv',
'exposes': 'expune',
'extends': 'extinde',
'failed to reload module': 'reîncarcare modul nereușită',
'False': 'Neadevărat',
'FAQ': 'Întrebări frecvente',
'file "%(filename)s" created': 'fișier "%(filename)s" creat',
'file "%(filename)s" deleted': 'fișier "%(filename)s" șters',
'file "%(filename)s" uploaded': 'fișier "%(filename)s" încărcat',
'file "%(filename)s" was not deleted': 'fișierul "%(filename)s" n-a fost șters',
'file "%s" of %s restored': 'fișier "%s" de %s restaurat',
'file changed on disk': 'fișier modificat pe disc',
'file does not exist': 'fișier inexistent',
'file saved on %(time)s': 'fișier salvat %(time)s',
'file saved on %s': 'fișier salvat pe %s',
'First name': 'Prenume',
'Forbidden': 'Interzis',
'Forms and Validators': 'Formulare și validatori',
'Free Applications': 'Aplicații gratuite',
'Functions with no doctests will result in [passed] tests.': 'Funcțiile fără doctests vor genera teste [trecute].',
'Group %(group_id)s created': 'Grup %(group_id)s creat',
'Group ID': 'ID grup',
'Group uniquely assigned to user %(id)s': 'Grup asociat în mod unic utilizatorului %(id)s',
'Groups': 'Grupuri',
'Hello World': 'Salutare lume',
'help': 'ajutor',
'Home': 'Acasă',
'How did you get here?': 'Cum ați ajuns aici?',
'htmledit': 'editare html',
'import': 'import',
'Import/Export': 'Import/Export',
'includes': 'include',
'Index': 'Index',
'insert new': 'adaugă nou',
'insert new %s': 'adaugă nou %s',
'Installed applications': 'Aplicații instalate',
'internal error': 'eroare internă',
'Internal State': 'Stare internă',
'Introduction': 'Introducere',
'Invalid action': 'Acțiune invalidă',
'Invalid email': 'E-mail invalid',
'invalid password': 'parolă invalidă',
'Invalid password': 'Parolă invalidă',
'Invalid Query': 'Interogare invalidă',
'invalid request': 'cerere invalidă',
'invalid ticket': 'tichet invalid',
'Key': 'Key',
'language file "%(filename)s" created/updated': 'fișier de limbă "%(filename)s" creat/actualizat',
'Language files (static strings) updated': 'Fișierele de limbă (șirurile statice de caractere) actualizate',
'languages': 'limbi',
'Languages': 'Limbi',
'languages updated': 'limbi actualizate',
'Last name': 'Nume',
'Last saved on:': 'Ultima salvare:',
'Layout': 'Șablon',
'Layout Plugins': 'Șablon plugin-uri',
'Layouts': 'Șabloane',
'License for': 'Licență pentru',
'Live Chat': 'Chat live',
'loading...': 'încarc...',
'Logged in': 'Logat',
'Logged out': 'Delogat',
'Login': 'Autentificare',
'login': 'autentificare',
'Login to the Administrative Interface': 'Logare interfață de administrare',
'logout': 'ieșire',
'Logout': 'Ieșire',
'Lost Password': 'Parolă pierdută',
'Lost password?': 'Parolă pierdută?',
'Main Menu': 'Meniu principal',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Model meniu',
'merge': 'unește',
'Models': 'Modele',
'models': 'modele',
'Modules': 'Module',
'modules': 'module',
'My Sites': 'Site-urile mele',
'Name': 'Nume',
'New': 'Nou',
'new application "%s" created': 'aplicația nouă "%s" a fost creată',
'New password': 'Parola nouă',
'New Record': 'Înregistrare nouă',
'new record inserted': 'înregistrare nouă adăugată',
'next 100 rows': 'următoarele 100 de linii',
'NO': 'NU',
'No databases in this application': 'Aplicație fără bază de date',
'Object or table name': 'Obiect sau nume de tabel',
'Old password': 'Parola veche',
'Online examples': 'Exemple online',
'Or': 'Sau',
'or import from csv file': 'sau importă din fișier csv',
'or provide application url:': 'sau furnizează adresă url:',
'Origin': 'Origine',
'Original/Translation': 'Original/Traducere',
'Other Plugins': 'Alte plugin-uri',
'Other Recipes': 'Alte rețete',
'Overview': 'Prezentare de ansamblu',
'pack all': 'împachetează toate',
'pack compiled': 'pachet compilat',
'Password': 'Parola',
"Password fields don't match": 'Câmpurile de parolă nu se potrivesc',
'Peeking at file': 'Vizualizare fișier',
'please input your password again': 'introduceți parola din nou',
'Plugins': 'Plugin-uri',
'Powered by': 'Pus în mișcare de',
'Preface': 'Prefață',
'previous 100 rows': '100 de linii anterioare',
'Profile': 'Profil',
'Python': 'Python',
'Query': 'Interogare',
'Query:': 'Interogare:',
'Quick Examples': 'Exemple rapide',
'RAM': 'RAM',
'RAM Cache Keys': 'Chei cache RAM',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Rețete',
'Record': 'înregistrare',
'record does not exist': 'înregistrare inexistentă',
'Record ID': 'ID înregistrare',
'Record id': 'id înregistrare',
'register': 'înregistrare',
'Register': 'Înregistrare',
'Registration identifier': 'Identificator de autentificare',
'Registration key': 'Cheie înregistrare',
'Registration successful': 'Autentificare reușită',
'Remember me (for 30 days)': 'Ține-mă minte (timp de 30 de zile)',
'remove compiled': 'șterge compilate',
'Request reset password': 'Cerere resetare parolă',
'Reset Password key': 'Cheie restare parolă',
'Resolve Conflict file': 'Fișier rezolvare conflict',
'restore': 'restaurare',
'revert': 'revenire',
'Role': 'Rol',
'Rows in Table': 'Linii în tabel',
'Rows selected': 'Linii selectate',
'save': 'salvare',
'Save profile': 'Salvează profil',
'Saved file hash:': 'Hash fișier salvat:',
'Search': 'Căutare',
'Semantic': 'Semantică',
'Services': 'Servicii',
'session expired': 'sesiune expirată',
'shell': 'line de commandă',
'site': 'site',
'Size of cache:': 'Size of cache:',
'some files could not be removed': 'anumite fișiere n-au putut fi șterse',
'starts with': 'începe cu',
'state': 'stare',
'static': 'static',
'Static files': 'Fișiere statice',
'Statistics': 'Statistics',
'Stylesheet': 'Foaie de stiluri',
'Submit': 'Înregistrează',
'submit': 'submit',
'Support': 'Suport',
'Sure you want to delete this object?': 'Sigur ștergeți acest obiect?',
'Table': 'tabel',
'Table name': 'Nume tabel',
'test': 'test',
'Testing application': 'Testare aplicație',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Interogarea (query)" este o condiție de tipul "db.tabel1.câmp1==\'valoare\'". Ceva de genul "db.tabel1.câmp1==db.tabel2.câmp2" generează un JOIN SQL.',
'the application logic, each URL path is mapped in one exposed function in the controller': 'logica aplicației, fiecare rută URL este mapată într-o funcție expusă de controlor',
'The Core': 'Nucleul',
'the data representation, define database tables and sets': 'reprezentarea datelor, definește tabelele bazei de date și seturile (de date)',
'The output of the file is a dictionary that was rendered by the view %s': 'Fișierul produce un dicționar care a fost prelucrat de vederea %s',
'the presentations layer, views are also known as templates': 'nivelul de prezentare, vederile sunt de asemenea numite și șabloane',
'The Views': 'Vederile',
'There are no controllers': 'Nu există controlori',
'There are no models': 'Nu există modele',
'There are no modules': 'Nu există module',
'There are no static files': 'Nu există fișiere statice',
'There are no translators, only default language is supported': 'Nu există traduceri, doar limba implicită este suportată',
'There are no views': 'Nu există vederi',
'these files are served without processing, your images go here': 'aceste fișiere sunt servite fără procesare, imaginea se plasează acolo',
'This App': 'Această aplicație',
'This is a copy of the scaffolding application': 'Aceasta este o copie a aplicației schelet',
'This is the %(filename)s template': 'Aceasta este șablonul fișierului %(filename)s',
'Ticket': 'Tichet',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Moment în timp (timestamp)',
'to previous version.': 'la versiunea anterioară.',
'too short': 'prea scurt',
'translation strings for the application': 'șiruri de caractere folosite la traducerea aplicației',
'True': 'Adevărat',
'try': 'încearcă',
'try something like': 'încearcă ceva de genul',
'Twitter': 'Twitter',
'Unable to check for upgrades': 'Imposibil de verificat dacă există actualizări',
'unable to create application "%s"': 'imposibil de creat aplicația "%s"',
'unable to delete file "%(filename)s"': 'imposibil de șters fișierul "%(filename)s"',
'Unable to download': 'Imposibil de descărcat',
'Unable to download app': 'Imposibil de descărcat aplicația',
'unable to parse csv file': 'imposibil de analizat fișierul csv',
'unable to uninstall "%s"': 'imposibil de dezinstalat "%s"',
'uncheck all': 'decoșează tot',
'uninstall': 'dezinstalează',
'update': 'actualizează',
'update all languages': 'actualizează toate limbile',
'Update:': 'Actualizare:',
'upload application:': 'incarcă aplicația:',
'Upload existing application': 'Încarcă aplicația existentă',
'upload file:': 'încarcă fișier:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Folosiți (...)&(...) pentru AND, (...)|(...) pentru OR, și ~(...) pentru NOT, pentru a crea interogări complexe.',
'User %(id)s Logged-in': 'Utilizator %(id)s autentificat',
'User %(id)s Logged-out': 'Utilizator %(id)s delogat',
'User %(id)s Password changed': 'Parola utilizatorului %(id)s a fost schimbată',
'User %(id)s Password reset': 'Resetare parola utilizator %(id)s',
'User %(id)s Profile updated': 'Profil utilizator %(id)s actualizat',
'User %(id)s Registered': 'Utilizator %(id)s înregistrat',
'User ID': 'ID utilizator',
'value already in database or empty': 'Valoare existentă în baza de date sau vidă',
'Verify Password': 'Verifică parola',
'versioning': 'versiuni',
'Videos': 'Video-uri',
'View': 'Vedere',
'view': 'vedere',
'Views': 'Vederi',
'views': 'vederi',
'web2py is up to date': 'web2py este la zi',
'web2py Recent Tweets': 'Ultimele tweet-uri web2py',
'Welcome': 'Bine ați venit',
'Welcome %s': 'Bine ați venit %s',
'Welcome to web2py': 'Bun venit la web2py',
'Welcome to web2py!': 'Bun venit la web2py!',
'Which called the function %s located in the file %s': 'Care a apelat funcția %s prezentă în fișierul %s',
'YES': 'DA',
'You are successfully running web2py': 'Rulați cu succes web2py',
'You can modify this application and adapt it to your needs': 'Puteți modifica și adapta aplicația nevoilor dvs.',
'You visited the url %s': 'Ați vizitat adresa %s',
}
|
VanirAOSP/external_chromium_org
|
refs/heads/kk44
|
build/android/pylib/base/__init__.py
|
998
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
|
DataDog/gunicorn
|
refs/heads/master
|
examples/test.py
|
4
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
#
# Example code from Eventlet sources
import os
import pprint
from wsgiref.validate import validator
import sys
from gunicorn import __version__
#@validator
def app(environ, start_response):
"""Simplest possible application object"""
errors = environ['wsgi.errors']
# pprint.pprint(('ENVIRON', environ), stream=errors)
data = b'Hello, World!\n'
status = '200 OK'
response_headers = [
('Content-type','text/plain'),
('Content-Length', str(len(data))),
('X-Gunicorn-Version', __version__),
("Test", "test тест"),
]
start_response(status, response_headers)
return iter([data])
|
twitter/pants
|
refs/heads/master
|
tests/python/pants_test/backend/jvm/tasks/jvm_binary_task_test_base.py
|
2
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from pants.backend.jvm.tasks.classpath_products import ClasspathProducts
from pants.java.jar.jar_dependency_utils import M2Coordinate, ResolvedJar
from pants_test.jvm.jvm_tool_task_test_base import JvmToolTaskTestBase
class JvmBinaryTaskTestBase(JvmToolTaskTestBase):
"""
:API: public
"""
def create_artifact(self, org, name, rev, classifier=None, ext=None, materialize=True):
"""
:API: public
:param string org: The maven dependency `groupId`.
:param string name: The maven dependency `artifactId`.
:param string rev: The maven dependency `version`.
:param string classifier: The maven dependency `classifier`.
:param string ext: There is no direct maven parallel, but the maven `packaging` value of the
depended-on artifact for simple cases, and in more complex cases the
extension of the artifact. For example, 'bundle' packaging implies an
extension of 'jar'. Defaults to 'jar'.
:param bool materialize: `False` to populate the returned resolved_jar with a `pants_path` that
does not exist; defaults to `True` and `touch`es the `pants_path`.
:returns: A resolved jar describing the artifact.
:rtype: :class:`pants.java.jar.ResolvedJar`
"""
coordinate = M2Coordinate(org=org, name=name, rev=rev, classifier=classifier, ext=ext)
cache_path = 'not/a/real/cache/path'
jar_name = coordinate.artifact_filename
pants_path = self.create_workdir_file(jar_name) if materialize else os.path.join(self.pants_workdir,
jar_name)
return ResolvedJar(coordinate=coordinate, cache_path=cache_path, pants_path=pants_path)
def iter_files(self, dir_path):
"""Returns an iterator over the files found under the given `dir_path`.
:API: public
:param string dir_path: The path of the directory tree to scan for files.
:returns: An iterator of the relative paths of files found under `dir_path`.
:rtype: :class:`collections.Iterator` of string
"""
for root_dir, _, files in os.walk(dir_path):
for f in files:
yield os.path.relpath(os.path.join(root_dir, f), dir_path)
def ensure_classpath_products(self, context):
"""Gets or creates the classpath products expected by `JvmBinaryTask`.
:API: public
:param context: The pants run context to get/create/associate classpath products with.
:type context: :class:`pants.goal.context.Context`
:returns: The classpath products associated with the given `context`
:rtype: :class:`pants.backend.jvm.tasks.classpath_products.ClasspathProducts`
"""
return context.products.get_data('runtime_classpath',
init_func=ClasspathProducts.init_func(self.pants_workdir))
def ensure_consolidated_classpath_products(self, context):
"""Gets or creates the classpath products expected by `JvmBinaryTask`.
:API: public
:param context: The pants run context to get/create/associate classpath products with.
:type context: :class:`pants.goal.context.Context`
:returns: The classpath products associated with the given `context`
:rtype: :class:`pants.backend.jvm.tasks.classpath_products.ClasspathProducts`
"""
runtime_classpath = self.ensure_classpath_products(context)
return context.products.get_data('consolidated_classpath', runtime_classpath.copy)
|
wfxiang08/django185
|
refs/heads/master
|
tests/admin_inlines/tests.py
|
6
|
from __future__ import unicode_literals
import warnings
from django.contrib.admin import ModelAdmin, TabularInline
from django.contrib.admin.helpers import InlineAdminForm
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.test import RequestFactory, TestCase, override_settings
from django.utils.encoding import force_text
from .admin import InnerInline, site as admin_site
from .models import (
Author, BinaryTree, Book, Chapter, Child, ChildModel1, ChildModel2,
Fashionista, FootNote, Holder, Holder2, Holder3, Holder4, Inner, Inner2,
Inner3, Inner4Stacked, Inner4Tabular, Novel, OutfitItem, Parent,
ParentModelWithCustomPk, Person, Poll, Profile, ProfileCollection,
Question, Sighting, SomeChildModel, SomeParentModel, Teacher,
)
INLINE_CHANGELINK_HTML = 'class="inlinechangelink">Change</a>'
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_inlines.urls")
class TestInline(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
self.factory = RequestFactory()
def test_can_delete(self):
"""
can_delete should be passed to inlineformset factory.
"""
holder = Holder.objects.get(dummy=13)
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(holder.id,))
)
inner_formset = response.context['inline_admin_formsets'][0].formset
expected = InnerInline.can_delete
actual = inner_formset.can_delete
self.assertEqual(expected, actual, 'can_delete must be equal')
def test_readonly_stacked_inline_label(self):
"""Bug #13174."""
holder = Holder.objects.create(dummy=42)
Inner.objects.create(holder=holder, dummy=42, readonly='')
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(holder.id,))
)
self.assertContains(response, '<label>Inner readonly label:</label>')
def test_many_to_many_inlines(self):
"Autogenerated many-to-many inlines are displayed correctly (#13407)"
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# The heading for the m2m inline block uses the right text
self.assertContains(response, '<h2>Author-book relationships</h2>')
# The "add another" label is correct
self.assertContains(response, 'Add another Author-book relationship')
# The '+' is dropped from the autogenerated form prefix (Author_books+)
self.assertContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_primary(self):
person = Person.objects.create(firstname='Imelda')
item = OutfitItem.objects.create(name='Shoes')
# Imelda likes shoes, but can't carry her own bags.
data = {
'shoppingweakness_set-TOTAL_FORMS': 1,
'shoppingweakness_set-INITIAL_FORMS': 0,
'shoppingweakness_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'person': person.id,
'max_weight': 0,
'shoppingweakness_set-0-item': item.id,
}
response = self.client.post(reverse('admin:admin_inlines_fashionista_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(Fashionista.objects.filter(person__firstname='Imelda')), 1)
def test_tabular_non_field_errors(self):
"""
Ensure that non_field_errors are displayed correctly, including the
right value for colspan. Refs #13510.
"""
data = {
'title_set-TOTAL_FORMS': 1,
'title_set-INITIAL_FORMS': 0,
'title_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'title_set-0-title1': 'a title',
'title_set-0-title2': 'a different title',
}
response = self.client.post(reverse('admin:admin_inlines_titlecollection_add'), data)
# Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbox.
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>The two titles must be the same</li></ul></td></tr>')
def test_no_parent_callable_lookup(self):
"""Admin inline `readonly_field` shouldn't invoke parent ModelAdmin callable"""
# Identically named callable isn't present in the parent ModelAdmin,
# rendering of the add view shouldn't explode
response = self.client.get(reverse('admin:admin_inlines_novel_add'))
self.assertEqual(response.status_code, 200)
# View should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="chapter_set-group">')
def test_callable_lookup(self):
"""Admin inline should invoke local callable when its name is listed in readonly_fields"""
response = self.client.get(reverse('admin:admin_inlines_poll_add'))
self.assertEqual(response.status_code, 200)
# Add parent object view should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="question_set-group">')
# The right callable should be used for the inline readonly_fields
# column cells
self.assertContains(response, '<p>Callable in QuestionInline</p>')
def test_help_text(self):
"""
Ensure that the inlines' model field help texts are displayed when
using both the stacked and tabular layouts.
Ref #8190.
"""
response = self.client.get(reverse('admin:admin_inlines_holder4_add'))
self.assertContains(response, '<p class="help">Awesome stacked help text is awesome.</p>', 4)
self.assertContains(response, '<img src="/static/admin/img/icon-unknown.gif" class="help help-tooltip" width="10" height="10" alt="(Awesome tabular help text is awesome.)" title="Awesome tabular help text is awesome." />', 1)
# ReadOnly fields
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(response, '<img src="/static/admin/img/icon-unknown.gif" class="help help-tooltip" width="10" height="10" alt="(Help text for ReadOnlyInline)" title="Help text for ReadOnlyInline" />', 1)
def test_inline_hidden_field_no_column(self):
"""#18263 -- Make sure hidden fields don't get a column in tabular inlines"""
parent = SomeParentModel.objects.create(name='a')
SomeChildModel.objects.create(name='b', position='0', parent=parent)
SomeChildModel.objects.create(name='c', position='1', parent=parent)
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_change', args=(parent.pk,)))
self.assertNotContains(response, '<td class="field-position">')
self.assertContains(response, (
'<input id="id_somechildmodel_set-1-position" '
'name="somechildmodel_set-1-position" type="hidden" value="1" />'))
def test_non_related_name_inline(self):
"""
Ensure that multiple inlines with related_name='+' have correct form
prefixes. Bug #16838.
"""
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(response,
'<input type="hidden" name="-1-0-id" id="id_-1-0-id" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-1-0-capo_famiglia" id="id_-1-0-capo_famiglia" />', html=True)
self.assertContains(response,
'<input id="id_-1-0-name" type="text" class="vTextField" '
'name="-1-0-name" maxlength="100" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-2-0-id" id="id_-2-0-id" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-2-0-capo_famiglia" id="id_-2-0-capo_famiglia" />', html=True)
self.assertContains(response,
'<input id="id_-2-0-name" type="text" class="vTextField" '
'name="-2-0-name" maxlength="100" />', html=True)
@override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True)
def test_localize_pk_shortcut(self):
"""
Ensure that the "View on Site" link is correct for locales that use
thousand separators
"""
holder = Holder.objects.create(pk=123456789, dummy=42)
inner = Inner.objects.create(pk=987654321, holder=holder, dummy=42, readonly='')
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.id,)))
inner_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(inner).pk, inner.pk)
self.assertContains(response, inner_shortcut)
def test_custom_pk_shortcut(self):
"""
Ensure that the "View on Site" link is correct for models with a
custom primary key field. Bug #18433.
"""
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
child1 = ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
child2 = ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
child1_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child1).pk, child1.pk)
child2_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child2).pk, child2.pk)
self.assertContains(response, child1_shortcut)
self.assertContains(response, child2_shortcut)
def test_create_inlines_on_inherited_model(self):
"""
Ensure that an object can be created with inlines when it inherits
another class. Bug #19524.
"""
data = {
'name': 'Martian',
'sighting_set-TOTAL_FORMS': 1,
'sighting_set-INITIAL_FORMS': 0,
'sighting_set-MAX_NUM_FORMS': 0,
'sighting_set-0-place': 'Zone 51',
'_save': 'Save',
}
response = self.client.post(reverse('admin:admin_inlines_extraterrestrial_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Sighting.objects.filter(et__name='Martian').count(), 1)
def test_custom_get_extra_form(self):
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
# The maximum number of forms should respect 'get_max_num' on the
# ModelAdmin
max_forms_input = '<input id="id_binarytree_set-MAX_NUM_FORMS" name="binarytree_set-MAX_NUM_FORMS" type="hidden" value="%d" />'
# The total number of forms will remain the same in either case
total_forms_hidden = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="2" />'
response = self.client.get(reverse('admin:admin_inlines_binarytree_add'))
self.assertContains(response, max_forms_input % 3)
self.assertContains(response, total_forms_hidden)
response = self.client.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
self.assertContains(response, max_forms_input % 2)
self.assertContains(response, total_forms_hidden)
def test_min_num(self):
"""
Ensure that min_num and extra determine number of forms.
"""
class MinNumInline(TabularInline):
model = BinaryTree
min_num = 2
extra = 3
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = '<input id="id_binarytree_set-MIN_NUM_FORMS" name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="2" />'
total_forms = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="5" />'
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertContains(response, min_forms)
self.assertContains(response, total_forms)
def test_custom_min_num(self):
"""
Ensure that get_min_num is called and used correctly.
"""
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
class MinNumInline(TabularInline):
model = BinaryTree
extra = 3
def get_min_num(self, request, obj=None, **kwargs):
if obj:
return 5
return 2
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = '<input id="id_binarytree_set-MIN_NUM_FORMS" name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="%d" />'
total_forms = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="%d" />'
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertContains(response, min_forms % 2)
self.assertContains(response, total_forms % 5)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(bt_head.id))
self.assertContains(response, min_forms % 5)
self.assertContains(response, total_forms % 8)
def test_inline_nonauto_noneditable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(response,
'<input id="id_nonautopkbook_set-0-rand_pk" name="nonautopkbook_set-0-rand_pk" type="hidden" />',
html=True)
self.assertContains(response,
'<input id="id_nonautopkbook_set-2-0-rand_pk" name="nonautopkbook_set-2-0-rand_pk" type="hidden" />',
html=True)
def test_inline_editable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(response,
'<input class="vIntegerField" id="id_editablepkbook_set-0-manual_pk" name="editablepkbook_set-0-manual_pk" type="text" />',
html=True, count=1)
self.assertContains(response,
'<input class="vIntegerField" id="id_editablepkbook_set-2-0-manual_pk" name="editablepkbook_set-2-0-manual_pk" type="text" />',
html=True, count=1)
def test_stacked_inline_edit_form_contains_has_original_class(self):
holder = Holder.objects.create(dummy=1)
holder.inner_set.create(dummy=1)
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.pk,)))
self.assertContains(
response,
'<div class="inline-related has_original" id="inner_set-0">',
count=1
)
self.assertContains(
response,
'<div class="inline-related" id="inner_set-1">',
count=1
)
def test_inlines_show_change_link_registered(self):
"Inlines `show_change_link` for registered models when enabled."
holder = Holder4.objects.create(dummy=1)
item1 = Inner4Stacked.objects.create(dummy=1, holder=holder)
item2 = Inner4Tabular.objects.create(dummy=1, holder=holder)
items = (
('inner4stacked', item1.pk),
('inner4tabular', item2.pk),
)
response = self.client.get(reverse('admin:admin_inlines_holder4_change', args=(holder.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
for model, pk in items:
url = reverse('admin:admin_inlines_%s_change' % model, args=(pk,))
self.assertContains(response, '<a href="%s" %s' % (url, INLINE_CHANGELINK_HTML))
def test_inlines_show_change_link_unregistered(self):
"Inlines `show_change_link` disabled for unregistered models."
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
self.assertFalse(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
def test_tabular_inline_show_change_link_false_registered(self):
"Inlines `show_change_link` disabled by default."
poll = Poll.objects.create(name="New poll")
Question.objects.create(poll=poll)
response = self.client.get(reverse('admin:admin_inlines_poll_change', args=(poll.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_inlines.urls")
class TestInlineMedia(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_inline_media_only_base(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
def test_inline_media_only_inline(self):
holder = Holder3(dummy=13)
holder.save()
Inner3(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder3_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_inline_scripts.js')
def test_all_inline_media(self):
holder = Holder2(dummy=13)
holder.save()
Inner2(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
self.assertContains(response, 'my_awesome_inline_scripts.js')
@override_settings(ROOT_URLCONF="admin_inlines.urls")
class TestInlineAdminForm(TestCase):
def test_immutable_content_type(self):
"""Regression for #9362
The problem depends only on InlineAdminForm and its "original"
argument, so we can safely set the other arguments to None/{}. We just
need to check that the content_type argument of Child isn't altered by
the internals of the inline form."""
sally = Teacher.objects.create(name='Sally')
john = Parent.objects.create(name='John')
joe = Child.objects.create(name='Joe', teacher=sally, parent=john)
iaf = InlineAdminForm(None, None, {}, {}, joe)
parent_ct = ContentType.objects.get_for_model(Parent)
self.assertEqual(iaf.original.content_type, parent_ct)
def test_original_content_type_id_deprecated(self):
"""
#23444 -- Verify a warning is raised when accessing
`original_content_type_id` attribute of `InlineAdminForm` object.
"""
iaf = InlineAdminForm(None, None, {}, {}, None)
poll = Poll.objects.create(name="poll")
iaf2 = InlineAdminForm(None, None, {}, {}, poll)
poll_ct = ContentType.objects.get_for_model(Poll)
with warnings.catch_warnings(record=True) as recorded:
warnings.filterwarnings('always')
with self.assertRaises(AttributeError):
iaf.original_content_type_id
msg = force_text(recorded.pop().message)
self.assertEqual(
msg,
'InlineAdminForm.original_content_type_id is deprecated and will be '
'removed in Django 1.10. If you were using this attribute to construct '
'the "view on site" URL, use the `absolute_url` attribute instead.'
)
self.assertEqual(iaf2.original_content_type_id, poll_ct.id)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_inlines.urls")
class TestInlineProtectedOnDelete(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_deleting_inline_with_protected_delete_does_not_validate(self):
lotr = Novel.objects.create(name='Lord of the rings')
chapter = Chapter.objects.create(novel=lotr, name='Many Meetings')
foot_note = FootNote.objects.create(chapter=chapter, note='yadda yadda')
change_url = reverse('admin:admin_inlines_novel_change', args=(lotr.id,))
response = self.client.get(change_url)
data = {
'name': lotr.name,
'chapter_set-TOTAL_FORMS': 1,
'chapter_set-INITIAL_FORMS': 1,
'chapter_set-MAX_NUM_FORMS': 1000,
'_save': 'Save',
'chapter_set-0-id': chapter.id,
'chapter_set-0-name': chapter.name,
'chapter_set-0-novel': lotr.id,
'chapter_set-0-DELETE': 'on'
}
response = self.client.post(change_url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Deleting chapter %s would require deleting "
"the following protected related objects: foot note %s"
% (chapter, foot_note))
@override_settings(ROOT_URLCONF="admin_inlines.urls")
class TestInlinePermissions(TestCase):
"""
Make sure the admin respects permissions for objects that are edited
inline. Refs #8060.
"""
def setUp(self):
self.user = User(username='admin')
self.user.is_staff = True
self.user.is_active = True
self.user.set_password('secret')
self.user.save()
self.author_ct = ContentType.objects.get_for_model(Author)
self.holder_ct = ContentType.objects.get_for_model(Holder2)
self.book_ct = ContentType.objects.get_for_model(Book)
self.inner_ct = ContentType.objects.get_for_model(Inner2)
# User always has permissions to add and change Authors, and Holders,
# the main (parent) models of the inlines. Permissions on the inlines
# vary per test.
permission = Permission.objects.get(codename='add_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='add_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
author = Author.objects.create(pk=1, name='The Author')
book = author.books.create(name='The inline Book')
self.author_change_url = reverse('admin:admin_inlines_author_change', args=(author.id,))
# Get the ID of the automatically created intermediate model for the Author-Book m2m
author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=book)
self.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk
holder = Holder2.objects.create(dummy=13)
inner2 = Inner2.objects.create(dummy=42, holder=holder)
self.holder_change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,))
self.inner2_id = inner2.id
self.assertEqual(
self.client.login(username='admin', password='secret'),
True)
def test_inline_add_m2m_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_change_m2m_noperm(self):
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_change_fk_noperm(self):
response = self.client.get(self.holder_change_url)
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_add_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on Books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# Add permission on inner2s, so we get the inline
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS" />', html=True)
def test_inline_change_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
self.assertNotContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_m2m_change_perm(self):
permission = Permission.objects.get(codename='change_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# We have change perm on books, so we can add/change/delete inlines
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(response, 'Add another Author-book relationship')
self.assertContains(response, '<input type="hidden" id="id_Author_books-TOTAL_FORMS" '
'value="4" name="Author_books-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_Author_books-0-id" '
'value="%i" name="Author_books-0-id" />' % self.author_book_auto_m2m_intermediate_id, html=True)
self.assertContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add permission on inner2s, so we can add but not modify existing
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
# 3 extra forms only, not the existing instance form
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertNotContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
def test_inline_change_fk_change_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change permission on inner2s, so we can change existing but not add new
self.assertContains(response, '<h2>Inner2s</h2>')
# Just the one form for existing instances
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="1" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
# max-num 0 means we can't add new ones
self.assertContains(response, '<input type="hidden" id="id_inner2_set-MAX_NUM_FORMS" '
'value="0" name="inner2_set-MAX_NUM_FORMS" />', html=True)
def test_inline_change_fk_add_change_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add/change perm, so we can add new and change existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance and three extra for new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="4" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
def test_inline_change_fk_change_del_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change/delete perm on inner2s, so we can change/delete existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, no new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="1" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
def test_inline_change_fk_all_perms(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# All perms on inner2s, so we can add/change/delete
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, three for new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="4" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_inlines.urls")
class SeleniumFirefoxTests(AdminSeleniumWebDriverTestCase):
available_apps = ['admin_inlines'] + AdminSeleniumWebDriverTestCase.available_apps
fixtures = ['admin-views-users.xml']
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_add_stackeds(self):
"""
Ensure that the "Add another XXX" link correctly adds items to the
stacked formset.
"""
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_holder4_add')))
inline_id = '#inner4stacked_set-group'
rows_length = lambda: len(self.selenium.find_elements_by_css_selector(
'%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
self.assertEqual(rows_length(), 4)
def test_delete_stackeds(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_holder4_add')))
inline_id = '#inner4stacked_set-group'
rows_length = lambda: len(self.selenium.find_elements_by_css_selector(
'%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
add_button.click()
self.assertEqual(rows_length(), 5, msg="sanity check")
for delete_link in self.selenium.find_elements_by_css_selector(
'%s .inline-deletelink' % inline_id):
delete_link.click()
self.assertEqual(rows_length(), 3)
def test_add_inlines(self):
"""
Ensure that the "Add another XXX" link correctly adds items to the
inline form.
"""
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Check that there's only one inline to start with and that it has the
# correct ID.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 1)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[0].get_attribute('id'),
'profile_set-0')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-last_name]')), 1)
# Add an inline
self.selenium.find_element_by_link_text('Add another Profile').click()
# Check that the inline has been added, that it has the right id, and
# that it contains the right fields.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 2)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[1].get_attribute('id'), 'profile_set-1')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-last_name]')), 1)
# Let's add another one to be sure
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 3)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[2].get_attribute('id'), 'profile_set-2')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-last_name]')), 1)
# Enter some data and click 'Save'
self.selenium.find_element_by_name('profile_set-0-first_name').send_keys('0 first name 1')
self.selenium.find_element_by_name('profile_set-0-last_name').send_keys('0 last name 2')
self.selenium.find_element_by_name('profile_set-1-first_name').send_keys('1 first name 1')
self.selenium.find_element_by_name('profile_set-1-last_name').send_keys('1 last name 2')
self.selenium.find_element_by_name('profile_set-2-first_name').send_keys('2 first name 1')
self.selenium.find_element_by_name('profile_set-2-last_name').send_keys('2 last name 2')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
# Check that the objects have been created in the database
self.assertEqual(ProfileCollection.objects.all().count(), 1)
self.assertEqual(Profile.objects.all().count(), 3)
def test_delete_inlines(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 5)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-3')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-4')), 1)
# Click on a few delete buttons
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1 td.delete a').click()
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2 td.delete a').click()
# Verify that they're gone and that the IDs have been re-sequenced
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 3)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
def test_alternating_rows(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
row_selector = 'form#profilecollection_form tr.dynamic-profile_set'
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row1" % row_selector)), 2, msg="Expect two row1 styled rows")
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row2" % row_selector)), 1, msg="Expect one row2 styled row")
class SeleniumChromeTests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumIETests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
|
TeamEOS/external_chromium_org
|
refs/heads/lp5.0
|
tools/telemetry/telemetry/web_perf/timeline_based_measurement_unittest.py
|
8
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from telemetry import test
from telemetry.core import wpr_modes
from telemetry.timeline import model as model_module
from telemetry.timeline import async_slice
from telemetry.page import page_measurement_unittest_base
from telemetry.page import page_set
from telemetry.page import page as page_module
# pylint: disable=W0401,W0614
from telemetry.page.actions.all_page_actions import *
from telemetry.results import page_measurement_results
from telemetry.unittest import options_for_unittests
from telemetry.web_perf import timeline_based_measurement as tbm_module
from telemetry.web_perf.metrics import timeline_based_metric
class TimelineBasedMetricsTests(unittest.TestCase):
def setUp(self):
model = model_module.TimelineModel()
renderer_thread = model.GetOrCreateProcess(1).GetOrCreateThread(2)
renderer_thread.name = 'CrRendererMain'
# [ X ]
# [ Y ]
renderer_thread.BeginSlice('cat1', 'x.y', 10, 0)
renderer_thread.EndSlice(20, 20)
renderer_thread.async_slices.append(async_slice.AsyncSlice(
'cat', 'Interaction.LogicalName1/is_smooth',
timestamp=0, duration=20,
start_thread=renderer_thread, end_thread=renderer_thread,
thread_start=5, thread_duration=15))
renderer_thread.async_slices.append(async_slice.AsyncSlice(
'cat', 'Interaction.LogicalName2/is_responsive',
timestamp=25, duration=5,
start_thread=renderer_thread, end_thread=renderer_thread,
thread_start=25, thread_duration=5))
model.FinalizeImport()
self.model = model
self.renderer_thread = renderer_thread
def testFindTimelineInteractionRecords(self):
metric = tbm_module._TimelineBasedMetrics( # pylint: disable=W0212
self.model, self.renderer_thread, lambda _: [])
interactions = metric.FindTimelineInteractionRecords()
self.assertEquals(2, len(interactions))
self.assertTrue(interactions[0].is_smooth)
self.assertEquals(0, interactions[0].start)
self.assertEquals(20, interactions[0].end)
self.assertTrue(interactions[1].is_responsive)
self.assertEquals(25, interactions[1].start)
self.assertEquals(30, interactions[1].end)
def testAddResults(self):
results = page_measurement_results.PageMeasurementResults()
class FakeSmoothMetric(timeline_based_metric.TimelineBasedMetric):
def AddResults(self, model, renderer_thread,
interaction_records, results):
results.Add('FakeSmoothMetric', 'ms', 1)
class FakeLoadingMetric(timeline_based_metric.TimelineBasedMetric):
def AddResults(self, model, renderer_thread,
interaction_records, results):
for r in interaction_records:
assert r.logical_name == 'LogicalName2'
results.Add('FakeLoadingMetric', 'ms', 2)
def CreateMetricsForTimelineInteractionRecord(interaction):
res = []
if interaction.is_smooth:
res.append(FakeSmoothMetric())
if interaction.is_responsive:
res.append(FakeLoadingMetric())
return res
metric = tbm_module._TimelineBasedMetrics( # pylint: disable=W0212
self.model, self.renderer_thread,
CreateMetricsForTimelineInteractionRecord)
ps = page_set.PageSet(file_path=os.path.dirname(__file__))
ps.AddPageWithDefaultRunNavigate('http://www.bar.com/')
results.WillMeasurePage(ps.pages[0])
metric.AddResults(results)
results.DidMeasurePage()
v = results.FindAllPageSpecificValuesNamed('LogicalName1-FakeSmoothMetric')
self.assertEquals(len(v), 1)
v = results.FindAllPageSpecificValuesNamed('LogicalName2-FakeLoadingMetric')
self.assertEquals(len(v), 1)
class TestTimelinebasedMeasurementPage(page_module.Page):
def __init__(self, ps, base_dir):
super(TestTimelinebasedMeasurementPage, self).__init__(
'file://interaction_enabled_page.html', ps, base_dir)
def RunSmoothness(self, action_runner):
action_runner.Wait(2)
action_runner.TapElement('#drawer')
action_runner.Wait(1)
class TimelineBasedMeasurementTest(
page_measurement_unittest_base.PageMeasurementUnitTestBase):
def setUp(self):
self._options = options_for_unittests.GetCopy()
self._options.browser_options.wpr_mode = wpr_modes.WPR_OFF
# Disabled due to flakiness: crbug.com/368386
@test.Disabled
def testSmoothnessTimelineBasedMeasurementForSmoke(self):
ps = self.CreatePageSetFromFileInUnittestDataDir(
'interaction_enabled_page.html')
setattr(ps.pages[0], 'RunSmoothness', {
'action': 'wait', 'javascript': 'window.animationDone'})
measurement = tbm_module.TimelineBasedMeasurement()
results = self.RunMeasurement(measurement, ps,
options=self._options)
self.assertEquals(0, len(results.failures))
v = results.FindAllPageSpecificValuesNamed('CenterAnimation-jank')
self.assertEquals(len(v), 1)
v = results.FindAllPageSpecificValuesNamed('DrawerAnimation-jank')
self.assertEquals(len(v), 1)
# Disabled since mainthread_jank metric is not supported on windows platform.
@test.Disabled('win')
def testMainthreadJankTimelineBasedMeasurement(self):
ps = self.CreateEmptyPageSet()
ps.AddPage(TestTimelinebasedMeasurementPage(ps, ps.base_dir))
measurement = tbm_module.TimelineBasedMeasurement()
results = self.RunMeasurement(measurement, ps,
options=self._options)
self.assertEquals(0, len(results.failures))
# In interaction_enabled_page.html, we create a jank loop based on
# window.performance.now() (basically loop for x milliseconds).
# Since window.performance.now() uses wall-time
# instead of thread time, we set time to looping to 100ms in
# interaction_enabled_page.html and only assert the biggest jank > 50ms here
# to account for the fact that the browser may deschedule during the jank
# loop.
v = results.FindAllPageSpecificValuesNamed(
'JankThreadJSRun-responsive-biggest_jank_thread_time')
self.assertGreaterEqual(v[0].value, 50)
v = results.FindAllPageSpecificValuesNamed(
'JankThreadJSRun-responsive-total_big_jank_thread_time')
self.assertGreaterEqual(v[0].value, 50)
|
madhurrajn/samashthi
|
refs/heads/master
|
lib/django/contrib/sites/__init__.py
|
808
|
default_app_config = 'django.contrib.sites.apps.SitesConfig'
|
czlx0701/MasterThesisTest
|
refs/heads/master
|
libs/tornado/test/concurrent_test.py
|
63
|
#!/usr/bin/env python
#
# Copyright 2012 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, with_statement
import logging
import re
import socket
import sys
import traceback
from tornado.concurrent import Future, return_future, ReturnValueIgnoredError
from tornado.escape import utf8, to_unicode
from tornado import gen
from tornado.iostream import IOStream
from tornado import stack_context
from tornado.tcpserver import TCPServer
from tornado.testing import AsyncTestCase, LogTrapTestCase, bind_unused_port, gen_test
try:
from concurrent import futures
except ImportError:
futures = None
class ReturnFutureTest(AsyncTestCase):
@return_future
def sync_future(self, callback):
callback(42)
@return_future
def async_future(self, callback):
self.io_loop.add_callback(callback, 42)
@return_future
def immediate_failure(self, callback):
1 / 0
@return_future
def delayed_failure(self, callback):
self.io_loop.add_callback(lambda: 1 / 0)
@return_future
def return_value(self, callback):
# Note that the result of both running the callback and returning
# a value (or raising an exception) is unspecified; with current
# implementations the last event prior to callback resolution wins.
return 42
@return_future
def no_result_future(self, callback):
callback()
def test_immediate_failure(self):
with self.assertRaises(ZeroDivisionError):
# The caller sees the error just like a normal function.
self.immediate_failure(callback=self.stop)
# The callback is not run because the function failed synchronously.
self.io_loop.add_timeout(self.io_loop.time() + 0.05, self.stop)
result = self.wait()
self.assertIs(result, None)
def test_return_value(self):
with self.assertRaises(ReturnValueIgnoredError):
self.return_value(callback=self.stop)
def test_callback_kw(self):
future = self.sync_future(callback=self.stop)
result = self.wait()
self.assertEqual(result, 42)
self.assertEqual(future.result(), 42)
def test_callback_positional(self):
# When the callback is passed in positionally, future_wrap shouldn't
# add another callback in the kwargs.
future = self.sync_future(self.stop)
result = self.wait()
self.assertEqual(result, 42)
self.assertEqual(future.result(), 42)
def test_no_callback(self):
future = self.sync_future()
self.assertEqual(future.result(), 42)
def test_none_callback_kw(self):
# explicitly pass None as callback
future = self.sync_future(callback=None)
self.assertEqual(future.result(), 42)
def test_none_callback_pos(self):
future = self.sync_future(None)
self.assertEqual(future.result(), 42)
def test_async_future(self):
future = self.async_future()
self.assertFalse(future.done())
self.io_loop.add_future(future, self.stop)
future2 = self.wait()
self.assertIs(future, future2)
self.assertEqual(future.result(), 42)
@gen_test
def test_async_future_gen(self):
result = yield self.async_future()
self.assertEqual(result, 42)
def test_delayed_failure(self):
future = self.delayed_failure()
self.io_loop.add_future(future, self.stop)
future2 = self.wait()
self.assertIs(future, future2)
with self.assertRaises(ZeroDivisionError):
future.result()
def test_kw_only_callback(self):
@return_future
def f(**kwargs):
kwargs['callback'](42)
future = f()
self.assertEqual(future.result(), 42)
def test_error_in_callback(self):
self.sync_future(callback=lambda future: 1 / 0)
# The exception gets caught by our StackContext and will be re-raised
# when we wait.
self.assertRaises(ZeroDivisionError, self.wait)
def test_no_result_future(self):
future = self.no_result_future(self.stop)
result = self.wait()
self.assertIs(result, None)
# result of this future is undefined, but not an error
future.result()
def test_no_result_future_callback(self):
future = self.no_result_future(callback=lambda: self.stop())
result = self.wait()
self.assertIs(result, None)
future.result()
@gen_test
def test_future_traceback(self):
@return_future
@gen.engine
def f(callback):
yield gen.Task(self.io_loop.add_callback)
try:
1 / 0
except ZeroDivisionError:
self.expected_frame = traceback.extract_tb(
sys.exc_info()[2], limit=1)[0]
raise
try:
yield f()
self.fail("didn't get expected exception")
except ZeroDivisionError:
tb = traceback.extract_tb(sys.exc_info()[2])
self.assertIn(self.expected_frame, tb)
# The following series of classes demonstrate and test various styles
# of use, with and without generators and futures.
class CapServer(TCPServer):
def handle_stream(self, stream, address):
logging.info("handle_stream")
self.stream = stream
self.stream.read_until(b"\n", self.handle_read)
def handle_read(self, data):
logging.info("handle_read")
data = to_unicode(data)
if data == data.upper():
self.stream.write(b"error\talready capitalized\n")
else:
# data already has \n
self.stream.write(utf8("ok\t%s" % data.upper()))
self.stream.close()
class CapError(Exception):
pass
class BaseCapClient(object):
def __init__(self, port, io_loop):
self.port = port
self.io_loop = io_loop
def process_response(self, data):
status, message = re.match('(.*)\t(.*)\n', to_unicode(data)).groups()
if status == 'ok':
return message
else:
raise CapError(message)
class ManualCapClient(BaseCapClient):
def capitalize(self, request_data, callback=None):
logging.info("capitalize")
self.request_data = request_data
self.stream = IOStream(socket.socket(), io_loop=self.io_loop)
self.stream.connect(('127.0.0.1', self.port),
callback=self.handle_connect)
self.future = Future()
if callback is not None:
self.future.add_done_callback(
stack_context.wrap(lambda future: callback(future.result())))
return self.future
def handle_connect(self):
logging.info("handle_connect")
self.stream.write(utf8(self.request_data + "\n"))
self.stream.read_until(b'\n', callback=self.handle_read)
def handle_read(self, data):
logging.info("handle_read")
self.stream.close()
try:
self.future.set_result(self.process_response(data))
except CapError as e:
self.future.set_exception(e)
class DecoratorCapClient(BaseCapClient):
@return_future
def capitalize(self, request_data, callback):
logging.info("capitalize")
self.request_data = request_data
self.stream = IOStream(socket.socket(), io_loop=self.io_loop)
self.stream.connect(('127.0.0.1', self.port),
callback=self.handle_connect)
self.callback = callback
def handle_connect(self):
logging.info("handle_connect")
self.stream.write(utf8(self.request_data + "\n"))
self.stream.read_until(b'\n', callback=self.handle_read)
def handle_read(self, data):
logging.info("handle_read")
self.stream.close()
self.callback(self.process_response(data))
class GeneratorCapClient(BaseCapClient):
@return_future
@gen.engine
def capitalize(self, request_data, callback):
logging.info('capitalize')
stream = IOStream(socket.socket(), io_loop=self.io_loop)
logging.info('connecting')
yield gen.Task(stream.connect, ('127.0.0.1', self.port))
stream.write(utf8(request_data + '\n'))
logging.info('reading')
data = yield gen.Task(stream.read_until, b'\n')
logging.info('returning')
stream.close()
callback(self.process_response(data))
class ClientTestMixin(object):
def setUp(self):
super(ClientTestMixin, self).setUp()
self.server = CapServer(io_loop=self.io_loop)
sock, port = bind_unused_port()
self.server.add_sockets([sock])
self.client = self.client_class(io_loop=self.io_loop, port=port)
def tearDown(self):
self.server.stop()
super(ClientTestMixin, self).tearDown()
def test_callback(self):
self.client.capitalize("hello", callback=self.stop)
result = self.wait()
self.assertEqual(result, "HELLO")
def test_callback_error(self):
self.client.capitalize("HELLO", callback=self.stop)
self.assertRaisesRegexp(CapError, "already capitalized", self.wait)
def test_future(self):
future = self.client.capitalize("hello")
self.io_loop.add_future(future, self.stop)
self.wait()
self.assertEqual(future.result(), "HELLO")
def test_future_error(self):
future = self.client.capitalize("HELLO")
self.io_loop.add_future(future, self.stop)
self.wait()
self.assertRaisesRegexp(CapError, "already capitalized", future.result)
def test_generator(self):
@gen.engine
def f():
result = yield self.client.capitalize("hello")
self.assertEqual(result, "HELLO")
self.stop()
f()
self.wait()
def test_generator_error(self):
@gen.engine
def f():
with self.assertRaisesRegexp(CapError, "already capitalized"):
yield self.client.capitalize("HELLO")
self.stop()
f()
self.wait()
class ManualClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase):
client_class = ManualCapClient
class DecoratorClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase):
client_class = DecoratorCapClient
class GeneratorClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase):
client_class = GeneratorCapClient
|
jumitche/libstoragemgmt
|
refs/heads/master
|
plugin/ontap/na.py
|
2
|
# Copyright (C) 2012-2014 Red Hat, Inc.
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; If not, see <http://www.gnu.org/licenses/>.
#
# Author: tasleson
import socket
import sys
import six
from xml.etree import ElementTree
import time
from binascii import hexlify
import ssl
from lsm.external.xmltodict import convert_xml_to_dict
from lsm import (LsmError, ErrorNumber)
if six.PY3:
long = int
try:
from urllib.request import (Request,
urlopen,
HTTPPasswordMgrWithDefaultRealm,
HTTPBasicAuthHandler,
HTTPSHandler,
build_opener,
install_opener)
from urllib.error import (URLError, HTTPError)
from urllib.parse import urlparse
except ImportError:
from urllib2 import (Request,
urlopen,
HTTPPasswordMgrWithDefaultRealm,
HTTPBasicAuthHandler,
HTTPSHandler,
build_opener,
install_opener,
URLError,
HTTPError)
from urlparse import urlparse
# Set to an appropriate directory and file to dump the raw response.
xml_debug = ""
def netapp_filer_parse_response(resp):
if xml_debug:
out = open(xml_debug, "wb")
out.write(resp)
out.close()
return convert_xml_to_dict(ElementTree.fromstring(resp))
def param_value(val):
"""
Given a parameter to pass to filer, convert to XML
"""
rc = ""
if type(val) is dict or isinstance(val, dict):
for k, v in list(val.items()):
rc += "<%s>%s</%s>" % (k, param_value(v), k)
elif type(val) is list or isinstance(val, list):
for i in val:
rc += param_value(i)
else:
rc = val
return rc
def netapp_filer(host, username, password, timeout, command, parameters=None,
use_ssl=False, ssl_verify=False, ca_cert=None):
"""
Issue a command to the NetApp filer.
Note: Change to default use_ssl on before we ship a release version.
"""
proto = 'http'
if use_ssl:
proto = 'https'
url = "%s://%s/servlets/netapp.servlets.admin.XMLrequest_filer" % \
(proto, host)
req = Request(url)
req.add_header('Content-Type', 'text/xml')
password_manager = HTTPPasswordMgrWithDefaultRealm()
password_manager.add_password(None, url, username, password)
auth_manager = HTTPBasicAuthHandler(password_manager)
if use_ssl:
ssl._DEFAULT_CIPHERS += ':RC4-SHA:3DES'
ssl._DEFAULT_CIPHERS = ssl._DEFAULT_CIPHERS.replace(':!3DES','')
if ca_cert:
try:
ssl_ctx = ssl.create_default_context(cafile=ca_cert)
except IOError as ioe:
raise LsmError(ErrorNumber.INVALID_ARGUMENT,
"Failed to load CA file : %s" % str(ioe))
else:
ssl_ctx = ssl.create_default_context()
if ssl_verify == False:
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_NONE
opener = build_opener(HTTPSHandler(context=ssl_ctx), auth_manager)
else:
opener = build_opener(auth_manager)
install_opener(opener)
# build the command and the arguments for it
p = ""
if parameters:
for k, v in list(parameters.items()):
p += "<%s>%s</%s>" % (k, param_value(v), k)
payload = "<%s>\n%s\n</%s>" % (command, p, command)
data = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE netapp SYSTEM "file:/etc/netapp_filer.dtd">
<netapp xmlns="http://www.netapp.com/filer/admin" version="1.1">
%s
</netapp>
""" % payload
handler = None
rc = None
try:
handler = urlopen(req, data.encode('utf-8'), float(timeout))
if handler.getcode() == 200:
rc = netapp_filer_parse_response(handler.read())
except HTTPError:
raise
except URLError as ue:
err_msg = str(ue)
if isinstance(ue.reason, socket.timeout):
raise FilerError(Filer.ETIMEOUT, "Connection timeout")
elif "UNSUPPORTED_PROTOCOL" in err_msg or \
"EOF occurred in violation of protocol" in err_msg :
raise LsmError(ErrorNumber.NO_SUPPORT,
"ONTAP SSL version is not supported, "
"please enable TLS on ONTAP filer, "
"check 'man 1 ontap_lsmplugin'")
elif "CERTIFICATE_VERIFY_FAILED" in err_msg:
raise LsmError(ErrorNumber.NETWORK_CONNREFUSED,
"SSL certification verification failed")
else:
raise
except socket.timeout:
raise FilerError(Filer.ETIMEOUT, "Connection timeout")
except ssl.SSLError as sse:
# The ssl library doesn't give a good way to find specific reason.
# We are doing a string contains which is not ideal, but other than
# throwing a generic error in this case there isn't much we can do
# to be more specific.
if "timed out" in str(sse).lower():
raise FilerError(Filer.ETIMEOUT, "Connection timeout (SSL)")
else:
raise FilerError(Filer.EUNKNOWN,
"SSL error occurred (%s)", str(sse))
finally:
if handler:
handler.close()
return rc
class FilerError(Exception):
"""
Class represents a NetApp bad return code
"""
IGROUP_NOT_CONTAIN_GIVEN_INIT = 9007
IGROUP_ALREADY_HAS_INIT = 9008
NO_SUCH_IGROUP = 9003
# Using the name from NetApp SDK netapp_errno.h
EVDISK_ERROR_VDISK_EXISTS = 9012 # LUN name already in use
EVDISK_ERROR_VDISK_EXPORTED = 9013 # LUN is currently mapped
EVDISK_ERROR_VDISK_NOT_ENABLED = 9014 # LUN is not online
EVDISK_ERROR_VDISK_NOT_DISABLED = 9015 # LUN is not offline
EVDISK_ERROR_NO_SUCH_LUNMAP = 9016 # LUN is already unmapped
EVDISK_ERROR_INITGROUP_MAPS_EXIST = 9029
# LUN maps for this initiator group exist
EVDISK_ERROR_SIZE_TOO_LARGE = 9034 # LUN size too large.
EVDISK_ERROR_RESIZE_TOO_LARGE = 9035 # Re-size amount is too large
EVDISK_ERROR_NO_SUCH_VOLUME = 9036 # NetApp Volume not exists.
EVDISK_ERROR_SIZE_TOO_SMALL = 9041 # Specified too small a size
EVDISK_ERROR_SIZE_UNCHANGED = 9042 # requested size is the same.
EVDISK_ERROR_INITGROUP_HAS_VDISK = 9023 # Already masked
EOP_DISALLOWED_ON_CLONE_PARENT = 15894 # NetApp volume is clone source.
def __init__(self, errno, reason, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
self.errno = int(errno)
self.reason = reason
def to_list(v):
"""
The return values in hash form can either be a single hash item or a list
of hash items, this code handles both to make callers always get a list.
"""
rc = []
if v is not None:
if isinstance(v, list):
rc = v
else:
rc.append(v)
return rc
# RC4 implementation taken from wikipedia article
# https://en.wikipedia.org/wiki/RC4 pseudo code and
# implementing it in python
def _ksa():
"""
Key-scheduling algorithm (KSA)
"""
key = "#u82fyi8S5\017pPemw"
s = list(range(256))
j = 0
for i in list(range(256)):
j = (j + s[i] + ord(key[i % len(key)])) % 256
s[i], s[j] = s[j], s[i]
return s
def _prga(k):
"""
Pseudo-random generation algorithm
"""
i = 0
j = 0
while True:
i = (i + 1) % 256
j = (j + k[i]) % 256
k[i], k[j] = k[j], k[i]
yield k[(k[i] + k[j]) % 256]
def encode_py(text):
k = _ksa()
encrypted_bytes = bytearray()
r_seq = _prga(k)
for char in text:
encrypted_bytes.append(int(ord(char) ^ next(r_seq)))
return hexlify(encrypted_bytes).decode("utf-8")
class Filer(object):
"""
Class to handle NetApp API calls.
Note: These are using lsm terminology.
"""
EUNKNOWN = 10 # Non-specific error
ENAVOL_NAME_DUPE = 17 # Volume name collision
ENOSPC = 28 # Out of space
ETIMEOUT = 60 # Time-out
EINVALID_ISCSI_NAME = 9006 # Invalid ISCSI IQN
EDUPE_VOLUME_PATH = 9012 # Duplicate volume name
ENO_SUCH_VOLUME = 9017 # lun not found
ESIZE_TOO_LARGE = 9034 # Specified too large a size
ENO_SUCH_FS = 9036 # FS not found
EVOLUME_TOO_SMALL = 9041 # Specified too small a size
EAPILICENSE = 13008 # Unlicensed API
EFSDOESNOTEXIST = 13040 # FS does not exist
EFSOFFLINE = 13042 # FS is offline.
EFSNAMEINVALID = 13044 # FS Name invalid
ENOSPACE = 13062 # Not enough space
ESERVICENOTLICENSED = 13902 # Not licensed
ECLONE_NAME_EXISTS = 14952 # Clone with same name exists
ECLONE_LICENSE_EXPIRED = 14955 # Not licensed
ECLONE_NOT_LICENSED = 14956 # Not licensed
(LSM_VOL_PREFIX, LSM_INIT_PREFIX) = ('lsm_lun_container', 'lsm_init_')
def _invoke(self, command, parameters=None):
rc = netapp_filer(self.host, self.username, self.password,
self.timeout, command, parameters, self.use_ssl,
self.ssl_verify, self.ca_cert)
t = rc['netapp']['results']['attrib']
if t['status'] != 'passed':
raise FilerError(t['errno'], t['reason'])
return rc['netapp']['results']
def __init__(self, host, username, password, timeout, use_ssl=True,
ssl_verify=False, ca_cert=None):
self.host = host
self.username = username
self.password = password
self.timeout = timeout
self.use_ssl = use_ssl
self.ssl_verify = ssl_verify
self.ca_cert = ca_cert
def system_info(self):
rc = self._invoke('system-get-info')
return rc['system-info']
def validate(self):
# TODO: Validate that everything we need to function is available?
self._invoke('system-api-list')
return None
def disks(self):
disks = self._invoke('disk-list-info')
return disks['disk-details']['disk-detail-info']
def aggregates(self, aggr_name=None):
"""
Return a list of aggregates
If aggr_name provided, return [na_aggr]
"""
if aggr_name:
pools = self._invoke('aggr-list-info', {'aggregate': aggr_name})
else:
pools = self._invoke('aggr-list-info')
tmp = pools['aggregates']['aggr-info']
return to_list(tmp)
def aggregate_volume_names(self, aggr_name):
"""
Return a list of volume names that are on an aggregate
"""
vol_names = []
rc = self._invoke('aggr-list-info', {'aggregate': aggr_name})
aggr = rc['aggregates']['aggr-info']
if aggr is not None and aggr['volumes'] is not None:
vols = aggr['volumes']['contained-volume-info']
vol_names = [e['name'] for e in to_list(vols)]
return vol_names
@staticmethod
def lun_build_name(volume_name, file_name):
"""
Given a volume name and file return full path"
"""
return '/vol/%s/%s' % (volume_name, file_name)
def luns_get_specific(self, na_lun_name=None, na_volume_name=None):
"""
Return all logical units, or information about one or for all those
on a volume name.
"""
if na_lun_name is not None:
luns = self._invoke('lun-list-info', {'path': na_lun_name})
elif na_volume_name is not None:
luns = self._invoke('lun-list-info',
{'volume-name': na_volume_name})
else:
luns = self._invoke('lun-list-info')
return to_list(luns['luns']['lun-info'])
def _get_aggr_info(self):
aggrs = self._invoke('aggr-list-info')
tmp = to_list(aggrs['aggregates']['aggr-info'])
return [x for x in tmp if x['volumes'] is not None]
def luns_get_all(self):
"""
Return all lun-info
"""
try:
return to_list(self._invoke('lun-list-info')['luns']['lun-info'])
except TypeError:
# No LUN found.
return []
def lun_min_size(self):
return self._invoke('lun-get-minsize', {'type': 'image'})['min-size']
def lun_create(self, full_path_name, size_bytes, flag_thin=False):
"""
Creates a lun
If flag_thin set to True, will set 'space-reservation-enabled' as
'false' which means "create a LUN without any space being reserved".
"""
params = {'path': full_path_name,
'size': size_bytes}
if flag_thin is True:
params['space-reservation-enabled'] = 'false'
self._invoke('lun-create-by-size', params)
def lun_delete(self, lun_path):
"""
Deletes a lun given a lun path
"""
self._invoke('lun-destroy', {'path': lun_path})
def lun_resize(self, lun_path, size_bytes):
"""
Re-sizes a lun
"""
self._invoke('lun-resize', {'path': lun_path, 'size': size_bytes,
'force': 'true'})
def volume_resize(self, na_vol_name, size_diff_kb):
"""
Given a NetApp volume name and a size change in kb, re-size the
NetApp volume.
"""
params = {'volume': na_vol_name}
if size_diff_kb > 0:
params['new-size'] = '+' + str(size_diff_kb) + 'k'
else:
params['new-size'] = str(size_diff_kb) + 'k'
self._invoke('volume-size', params)
return None
def volumes(self, volume_name=None):
"""
Return a list of NetApp volumes
"""
if not volume_name:
v = self._invoke('volume-list-info')
else:
v = self._invoke('volume-list-info', {'volume': volume_name})
t = v['volumes']['volume-info']
rc = to_list(t)
return rc
def volume_create(self, aggr_name, vol_name, size_in_bytes):
"""
Creates a volume given an aggr_name, volume name and size in bytes.
"""
params = {'containing-aggr-name': aggr_name,
'size': int(size_in_bytes * 1.30),
# There must be a better way to account for this
'volume': vol_name}
self._invoke('volume-create', params)
# Turn off scheduled snapshots
self._invoke('volume-set-option', {'volume': vol_name,
'option-name': 'nosnap',
'option-value': 'on', })
# Turn off auto export!
self.nfs_export_remove(['/vol/' + vol_name])
def volume_clone(self, src_volume, dest_volume, snapshot=None):
"""
Clones a volume given a source volume name, destination volume name
and optional backing snapshot.
"""
params = {'parent-volume': src_volume, 'volume': dest_volume}
if snapshot:
params['parent-snapshot'] = snapshot.name
self._invoke('volume-clone-create', params)
def volume_delete(self, vol_name):
"""
Deletes a volume and everything on it.
"""
online = False
try:
self._invoke('volume-offline', {'name': vol_name})
online = True
except FilerError as f_error:
if f_error.errno != Filer.EFSDOESNOTEXIST:
raise
try:
self._invoke('volume-destroy', {'name': vol_name})
except FilerError:
# If the volume was online, we will return it to same status
# Store the original exception information
exception_info = sys.exc_info()
if online:
try:
self._invoke('volume-online', {'name': vol_name})
except FilerError:
pass
six.reraise(*exception_info)
def volume_names(self):
"""
Return a list of volume names
"""
vols = self.volumes()
return [v['name'] for v in vols]
def clone(self, source_path, dest_path, backing_snapshot=None,
ranges=None):
"""
Creates a file clone
"""
params = {'source-path': source_path}
# You can have source == dest, but if you do you can only specify
# source
if source_path != dest_path:
params['destination-path'] = dest_path
if backing_snapshot:
raise FilerError(ErrorNumber.NO_SUPPORT,
"Support for backing luns not implemented "
"for this API version")
# params['snapshot-name']= backing_snapshot
if ranges:
block_ranges = []
for r in ranges:
values = {'block-count': r.block_count,
'destination-block-number': r.dest_block,
'source-block-number': r.src_block}
block_ranges.append({'block-range': values})
params['block-ranges'] = block_ranges
rc = self._invoke('clone-start', params)
c_id = rc['clone-id']
while True:
progress = self._invoke('clone-list-status',
{'clone-id': c_id})
# According to the spec the output is optional, if not present
# then we are done and good
if 'status' in progress:
progress = progress['status']['ops-info']
if progress['clone-state'] == 'failed':
self._invoke('clone-clear', {'clone-id': c_id})
raise FilerError(progress['error'], progress['reason'])
elif progress['clone-state'] == 'running' \
or progress['clone-state'] == 'fail exit':
# State needs to transition to failed before we can
# clear it!
time.sleep(0.2) # Don't hog cpu
elif progress['clone-state'] == 'completed':
return
else:
raise FilerError(ErrorNumber.NO_SUPPORT,
'Unexpected state=' +
progress['clone-state'])
else:
return
def lun_online(self, lun_path):
self._invoke('lun-online', {'path': lun_path})
def lun_offline(self, lun_path):
self._invoke('lun-offline', {'path': lun_path})
def igroups(self, group_name=None):
rc = []
if group_name:
g = self._invoke('igroup-list-info',
{'initiator-group-name': group_name})
else:
g = self._invoke('igroup-list-info')
if g['initiator-groups']:
rc = to_list(g['initiator-groups']['initiator-group-info'])
return rc
def igroup_create(self, name, igroup_type):
params = {'initiator-group-name': name,
'initiator-group-type': igroup_type}
self._invoke('igroup-create', params)
def igroup_delete(self, name):
self._invoke('igroup-destroy', {'initiator-group-name': name})
def iscsi_initiator_add_auth(self, initiator, user_name, password,
out_user, out_password):
pw = encode_py(password)
args = {'initiator': initiator}
if user_name and len(user_name) and password and len(password):
args.update({'user-name': user_name,
'password': pw, 'auth-type': "CHAP"})
if out_user and len(out_user) and \
out_password and len(out_password):
args.update({'outbound-user-name': out_user,
'outbound-password': out_password})
else:
args.update({'initiator': initiator, 'auth-type': "none"})
self._invoke('iscsi-initiator-add-auth', args)
def igroup_add_initiator(self, ig, initiator):
self._invoke('igroup-add',
{'initiator-group-name': ig, 'initiator': initiator})
def igroup_del_initiator(self, ig, initiator):
self._invoke('igroup-remove',
{'initiator-group-name': ig,
'initiator': initiator,
'force': 'true'})
def lun_map(self, igroup, lun_path):
self._invoke('lun-map', {'initiator-group': igroup, 'path': lun_path})
def lun_unmap(self, igroup, lun_path):
self._invoke(
'lun-unmap', {'initiator-group': igroup, 'path': lun_path})
def lun_map_list_info(self, lun_path):
initiator_groups = []
rc = self._invoke('lun-map-list-info', {'path': lun_path})
if rc['initiator-groups'] is not None:
igi = to_list(rc['initiator-groups'])
for i in igi:
group_name = i['initiator-group-info']['initiator-group-name']
initiator_groups.append(self.igroups(group_name)[0])
return initiator_groups
def lun_initiator_list_map_info(self, initiator_id, initiator_group_name):
"""
Given an initiator_id and initiator group name, return a list of
lun-info
"""
luns = []
rc = self._invoke('lun-initiator-list-map-info',
{'initiator': initiator_id})
if rc['lun-maps']:
lun_name_list = to_list(rc['lun-maps']['lun-map-info'])
# Get all the lun with information about aggr
all_luns = self.luns_get_all()
for l in lun_name_list:
if l['initiator-group'] == initiator_group_name:
for al in all_luns:
if al['path'] == l['path']:
luns.append(al)
return luns
def snapshots(self, volume_name):
rc = []
args = {'target-type': 'volume', 'target-name': volume_name}
ss = self._invoke('snapshot-list-info', args)
if ss['snapshots']:
rc = to_list(ss['snapshots']['snapshot-info'])
return rc
def snapshot_create(self, volume_name, snapshot_name):
self._invoke('snapshot-create', {'volume': volume_name,
'snapshot': snapshot_name})
return [v for v in self.snapshots(volume_name)
if v['name'] == snapshot_name][0]
def snapshot_file_restore_num(self):
"""
Returns the number of executing file restore snapshots.
"""
rc = self._invoke('snapshot-restore-file-info')
if 'sfsr-in-progress' in rc:
return int(rc['sfsr-in-progress'])
return 0
def snapshot_restore_volume(self, fs_name, snapshot_name):
"""
Restores all files on a volume
"""
params = {'snapshot': snapshot_name, 'volume': fs_name}
self._invoke('snapshot-restore-volume', params)
def snapshot_restore_file(self, snapshot_name, restore_path, restore_file):
"""
Restore a list of files
"""
params = {'snapshot': snapshot_name, 'path': restore_path}
if restore_file:
params['restore-path'] = restore_file
self._invoke('snapshot-restore-file', params)
def snapshot_delete(self, volume_name, snapshot_name):
self._invoke('snapshot-delete',
{'volume': volume_name, 'snapshot': snapshot_name})
def export_auth_types(self):
rc = self._invoke('nfs-get-supported-sec-flavors')
return [e['flavor'] for e in
to_list(rc['sec-flavor']['sec-flavor-info'])]
@staticmethod
def _build_list(pylist, list_name, elem_name):
"""
Given a python list, build the appropriate dict that contains the
list items so that it can be converted to xml to be sent on the wire.
"""
return [{list_name: {elem_name: l}} for l in pylist]
@staticmethod
def _build_export_fs_all():
return Filer._build_list(
['true'], 'exports-hostname-info', 'all-hosts')
@staticmethod
def _build_export_fs_list(hosts):
if hosts[0] == '*':
return Filer._build_export_fs_all()
else:
return Filer._build_list(hosts, 'exports-hostname-info', 'name')
@staticmethod
def _build_export_rules(volume_path, export_path, ro_list, rw_list,
root_list, anonuid=None, sec_flavor=None):
"""
Common logic to build up the rules for nfs
"""
# One of the more complicated data structures to push down to the
# controller
rule = {'pathname': volume_path}
if volume_path != export_path:
rule['actual-pathname'] = volume_path
rule['pathname'] = export_path
rule['security-rules'] = {}
rule['security-rules']['security-rule-info'] = {}
r = rule['security-rules']['security-rule-info']
if len(ro_list):
r['read-only'] = Filer._build_export_fs_list(ro_list)
if len(rw_list):
r['read-write'] = Filer._build_export_fs_list(rw_list)
if len(root_list):
r['root'] = Filer._build_export_fs_list(root_list)
if anonuid:
uid = long(anonuid)
if uid != -1 and uid != 0xFFFFFFFFFFFFFFFF:
r['anon'] = str(uid)
if sec_flavor:
r['sec-flavor'] = Filer._build_list(
[sec_flavor], 'sec-flavor-info', 'flavor')
return rule
def nfs_export_fs2(self, volume_path, export_path, ro_list, rw_list,
root_list, anonuid=None, sec_flavor=None):
"""
NFS export a volume.
"""
rule = self._build_export_rules(
volume_path, export_path, ro_list, rw_list, root_list, anonuid,
sec_flavor)
params = {'persistent': 'true',
'rules': {'exports-rule-info-2': [rule]}, 'verbose': 'true'}
self._invoke('nfs-exportfs-append-rules-2', params)
def nfs_export_fs_modify2(self, volume_path, export_path, ro_list, rw_list,
root_list, anonuid=None, sec_flavor=None):
"""
Modifies an existing rule.
"""
rule = self._build_export_rules(
volume_path, export_path, ro_list, rw_list, root_list, anonuid,
sec_flavor)
params = {
'persistent': 'true', 'rule': {'exports-rule-info-2': [rule]}}
self._invoke('nfs-exportfs-modify-rule-2', params)
def nfs_export_remove(self, export_paths):
"""
Removes an existing export
"""
assert (type(export_paths) is list)
paths = Filer._build_list(export_paths, 'pathname-info', 'name')
self._invoke('nfs-exportfs-delete-rules',
{'pathnames': paths, 'persistent': 'true'})
def nfs_exports(self):
"""
Returns a list of exports (in hash form)
"""
rc = []
exports = self._invoke('nfs-exportfs-list-rules')
if 'rules' in exports and exports['rules']:
rc = to_list(exports['rules']['exports-rule-info'])
return rc
def volume_children(self, volume):
params = {'volume': volume}
rc = self._invoke('volume-list-info', params)
if 'clone-children' in rc['volumes']['volume-info']:
tmp = rc['volumes']['volume-info']['clone-children'][
'clone-child-info']
rc = [c['clone-child-name'] for c in to_list(tmp)]
else:
rc = None
return rc
def volume_split_clone(self, volume):
self._invoke('volume-clone-split-start', {'volume': volume})
def volume_split_status(self):
result = []
rc = self._invoke('volume-clone-split-status')
if 'clone-split-details' in rc:
tmp = rc['clone-split-details']['clone-split-detail-info']
result = [r['name'] for r in to_list(tmp)]
return result
def fcp_list(self):
fcp_list = []
try:
rc = self._invoke('fcp-adapter-list-info')
if 'fcp-config-adapters' in rc:
if 'fcp-config-adapter-info' in rc['fcp-config-adapters']:
fc_config = rc['fcp-config-adapters']
adapters = fc_config['fcp-config-adapter-info']
for f in adapters:
fcp_list.append(dict(addr=f['port-name'],
adapter=f['adapter']))
except FilerError as na:
if na.errno != Filer.EAPILICENSE:
raise
return fcp_list
def iscsi_node_name(self):
try:
rc = self._invoke('iscsi-node-get-name')
if 'node-name' in rc:
return rc['node-name']
except FilerError as na:
if na.errno != Filer.EAPILICENSE:
raise
return None
def interface_get_infos(self):
i_info = {}
rc = self._invoke('net-ifconfig-get')
if 'interface-config-info' in rc:
i_config = rc['interface-config-info']
if 'interface-config-info' in i_config:
tmp = to_list(i_config['interface-config-info'])
for i in tmp:
i_info[i['interface-name']] = i
return i_info
def iscsi_list(self):
i_list = []
# Get interface information
i_info = self.interface_get_infos()
try:
rc = self._invoke('iscsi-portal-list-info')
if 'iscsi-portal-list-entries' in rc:
portal_entries = rc['iscsi-portal-list-entries']
if 'iscsi-portal-list-entry-info' in portal_entries:
tmp = portal_entries['iscsi-portal-list-entry-info']
portals = to_list(tmp)
for p in portals:
mac = i_info[p['interface-name']]['mac-address']
i_list.append(dict(interface=p['interface-name'],
ip=p['ip-address'],
port=p['ip-port'],
mac=mac))
except FilerError as na:
if na.errno != Filer.EAPILICENSE:
raise
return i_list
if __name__ == '__main__':
try:
# TODO: Need some unit test code
pass
except FilerError as fe:
print('Errno=', fe.errno, 'reason=', fe.reason)
|
clebergnu/autotest
|
refs/heads/master
|
client/tests/kvm/tests/ksm_overcommit.py
|
2
|
import logging, time, random, math, os
from autotest_lib.client.common_lib import error
from autotest_lib.client.bin import utils
from autotest_lib.client.virt import virt_utils, virt_test_utils, aexpect
from autotest_lib.client.virt import virt_env_process
def run_ksm_overcommit(test, params, env):
"""
Test how KSM (Kernel Shared Memory) act when more than physical memory is
used. In second part we also test how KVM handles a situation when the host
runs out of memory (it is expected to pause the guest system, wait until
some process returns memory and bring the guest back to life)
@param test: kvm test object.
@param params: Dictionary with test parameters.
@param env: Dictionary with the test wnvironment.
"""
def _start_allocator(vm, session, timeout):
"""
Execute ksm_overcommit_guest.py on a guest, wait until it is initialized.
@param vm: VM object.
@param session: Remote session to a VM object.
@param timeout: Timeout that will be used to verify if
ksm_overcommit_guest.py started properly.
"""
logging.debug("Starting ksm_overcommit_guest.py on guest %s", vm.name)
session.sendline("python /tmp/ksm_overcommit_guest.py")
try:
session.read_until_last_line_matches(["PASS:", "FAIL:"], timeout)
except aexpect.ExpectProcessTerminatedError, e:
e_msg = ("Command ksm_overcommit_guest.py on vm '%s' failed: %s" %
(vm.name, str(e)))
raise error.TestFail(e_msg)
def _execute_allocator(command, vm, session, timeout):
"""
Execute a given command on ksm_overcommit_guest.py main loop,
indicating the vm the command was executed on.
@param command: Command that will be executed.
@param vm: VM object.
@param session: Remote session to VM object.
@param timeout: Timeout used to verify expected output.
@return: Tuple (match index, data)
"""
logging.debug("Executing '%s' on ksm_overcommit_guest.py loop, "
"vm: %s, timeout: %s", command, vm.name, timeout)
session.sendline(command)
try:
(match, data) = session.read_until_last_line_matches(
["PASS:","FAIL:"],
timeout)
except aexpect.ExpectProcessTerminatedError, e:
e_msg = ("Failed to execute command '%s' on "
"ksm_overcommit_guest.py, vm '%s': %s" %
(command, vm.name, str(e)))
raise error.TestFail(e_msg)
return (match, data)
def get_ksmstat():
"""
Return sharing memory by ksm in MB
@return: memory in MB
"""
f = open('/sys/kernel/mm/ksm/pages_sharing')
ksm_pages = int(f.read())
f.close()
return ((ksm_pages*4096)/1e6)
def initialize_guests():
"""
Initialize guests (fill their memories with specified patterns).
"""
logging.info("Phase 1: filling guest memory pages")
for session in lsessions:
vm = lvms[lsessions.index(session)]
logging.debug("Turning off swap on vm %s", vm.name)
session.cmd("swapoff -a", timeout=300)
# Start the allocator
_start_allocator(vm, session, 60 * perf_ratio)
# Execute allocator on guests
for i in range(0, vmsc):
vm = lvms[i]
a_cmd = "mem = MemFill(%d, %s, %s)" % (ksm_size, skeys[i], dkeys[i])
_execute_allocator(a_cmd, vm, lsessions[i], 60 * perf_ratio)
a_cmd = "mem.value_fill(%d)" % skeys[0]
_execute_allocator(a_cmd, vm, lsessions[i], 120 * perf_ratio)
# Let ksm_overcommit_guest.py do its job
# (until shared mem reaches expected value)
shm = 0
j = 0
logging.debug("Target shared meminfo for guest %s: %s", vm.name,
ksm_size)
while ((new_ksm and (shm < (ksm_size*(i+1)))) or
(not new_ksm and (shm < (ksm_size)))):
if j > 64:
logging.debug(virt_test_utils.get_memory_info(lvms))
raise error.TestError("SHM didn't merge the memory until "
"the DL on guest: %s" % vm.name)
st = ksm_size / 200 * perf_ratio
logging.debug("Waiting %ds before proceeding...", st)
time.sleep(st)
if (new_ksm):
shm = get_ksmstat()
else:
shm = vm.get_shared_meminfo()
logging.debug("Shared meminfo for guest %s after "
"iteration %s: %s", vm.name, j, shm)
j += 1
# Keep some reserve
rt = ksm_size / 200 * perf_ratio
logging.debug("Waiting %ds before proceeding...", rt)
time.sleep(rt)
logging.debug(virt_test_utils.get_memory_info(lvms))
logging.info("Phase 1: PASS")
def separate_first_guest():
"""
Separate memory of the first guest by generating special random series
"""
logging.info("Phase 2: Split the pages on the first guest")
a_cmd = "mem.static_random_fill()"
data = _execute_allocator(a_cmd, lvms[0], lsessions[0],
120 * perf_ratio)[1]
r_msg = data.splitlines()[-1]
logging.debug("Return message of static_random_fill: %s", r_msg)
out = int(r_msg.split()[4])
logging.debug("Performance: %dMB * 1000 / %dms = %dMB/s", ksm_size, out,
(ksm_size * 1000 / out))
logging.debug(virt_test_utils.get_memory_info(lvms))
logging.debug("Phase 2: PASS")
def split_guest():
"""
Sequential split of pages on guests up to memory limit
"""
logging.info("Phase 3a: Sequential split of pages on guests up to "
"memory limit")
last_vm = 0
session = None
vm = None
for i in range(1, vmsc):
# Check VMs
for j in range(0, vmsc):
if not lvms[j].is_alive:
e_msg = ("VM %d died while executing static_random_fill in "
"VM %d on allocator loop" % (j, i))
raise error.TestFail(e_msg)
vm = lvms[i]
session = lsessions[i]
a_cmd = "mem.static_random_fill()"
logging.debug("Executing %s on ksm_overcommit_guest.py loop, "
"vm: %s", a_cmd, vm.name)
session.sendline(a_cmd)
out = ""
try:
logging.debug("Watching host memory while filling vm %s memory",
vm.name)
while not out.startswith("PASS") and not out.startswith("FAIL"):
if not vm.is_alive():
e_msg = ("VM %d died while executing static_random_fill"
" on allocator loop" % i)
raise error.TestFail(e_msg)
free_mem = int(utils.read_from_meminfo("MemFree"))
if (ksm_swap):
free_mem = (free_mem +
int(utils.read_from_meminfo("SwapFree")))
logging.debug("Free memory on host: %d", free_mem)
# We need to keep some memory for python to run.
if (free_mem < 64000) or (ksm_swap and
free_mem < (450000 * perf_ratio)):
vm.monitor.cmd("stop")
for j in range(0, i):
lvms[j].destroy(gracefully = False)
time.sleep(20)
vm.monitor.cmd("c")
logging.debug("Only %s free memory, killing %d guests",
free_mem, (i - 1))
last_vm = i
break
out = session.read_nonblocking(0.1)
time.sleep(2)
except OSError:
logging.debug("Only %s host free memory, killing %d guests",
free_mem, (i - 1))
logging.debug("Stopping %s", vm.name)
vm.monitor.cmd("stop")
for j in range(0, i):
logging.debug("Destroying %s", lvms[j].name)
lvms[j].destroy(gracefully = False)
time.sleep(20)
vm.monitor.cmd("c")
last_vm = i
if last_vm != 0:
break
logging.debug("Memory filled for guest %s", vm.name)
logging.info("Phase 3a: PASS")
logging.info("Phase 3b: Check if memory in max loading guest is right")
for i in range(last_vm + 1, vmsc):
lsessions[i].close()
if i == (vmsc - 1):
logging.debug(virt_test_utils.get_memory_info([lvms[i]]))
logging.debug("Destroying guest %s", lvms[i].name)
lvms[i].destroy(gracefully = False)
# Verify last machine with randomly generated memory
a_cmd = "mem.static_random_verify()"
_execute_allocator(a_cmd, lvms[last_vm], lsessions[last_vm],
(mem / 200 * 50 * perf_ratio))
logging.debug(virt_test_utils.get_memory_info([lvms[last_vm]]))
lsessions[i].cmd_output("die()", 20)
lvms[last_vm].destroy(gracefully = False)
logging.info("Phase 3b: PASS")
def split_parallel():
"""
Parallel page spliting
"""
logging.info("Phase 1: parallel page spliting")
# We have to wait until allocator is finished (it waits 5 seconds to
# clean the socket
session = lsessions[0]
vm = lvms[0]
for i in range(1, max_alloc):
lsessions.append(vm.wait_for_login(timeout=360))
session.cmd("swapoff -a", timeout=300)
for i in range(0, max_alloc):
# Start the allocator
_start_allocator(vm, lsessions[i], 60 * perf_ratio)
logging.info("Phase 1: PASS")
logging.info("Phase 2a: Simultaneous merging")
logging.debug("Memory used by allocator on guests = %dMB",
(ksm_size / max_alloc))
for i in range(0, max_alloc):
a_cmd = "mem = MemFill(%d, %s, %s)" % ((ksm_size / max_alloc),
skeys[i], dkeys[i])
_execute_allocator(a_cmd, vm, lsessions[i], 60 * perf_ratio)
a_cmd = "mem.value_fill(%d)" % (skeys[0])
_execute_allocator(a_cmd, vm, lsessions[i], 90 * perf_ratio)
# Wait until ksm_overcommit_guest.py merges the pages (3 * ksm_size / 3)
shm = 0
i = 0
logging.debug("Target shared memory size: %s", ksm_size)
while (shm < ksm_size):
if i > 64:
logging.debug(virt_test_utils.get_memory_info(lvms))
raise error.TestError("SHM didn't merge the memory until DL")
wt = ksm_size / 200 * perf_ratio
logging.debug("Waiting %ds before proceed...", wt)
time.sleep(wt)
if (new_ksm):
shm = get_ksmstat()
else:
shm = vm.get_shared_meminfo()
logging.debug("Shared meminfo after attempt %s: %s", i, shm)
i += 1
logging.debug(virt_test_utils.get_memory_info([vm]))
logging.info("Phase 2a: PASS")
logging.info("Phase 2b: Simultaneous spliting")
# Actual splitting
for i in range(0, max_alloc):
a_cmd = "mem.static_random_fill()"
data = _execute_allocator(a_cmd, vm, lsessions[i],
90 * perf_ratio)[1]
data = data.splitlines()[-1]
logging.debug(data)
out = int(data.split()[4])
logging.debug("Performance: %dMB * 1000 / %dms = %dMB/s",
(ksm_size / max_alloc), out,
(ksm_size * 1000 / out / max_alloc))
logging.debug(virt_test_utils.get_memory_info([vm]))
logging.info("Phase 2b: PASS")
logging.info("Phase 2c: Simultaneous verification")
for i in range(0, max_alloc):
a_cmd = "mem.static_random_verify()"
data = _execute_allocator(a_cmd, vm, lsessions[i],
(mem / 200 * 50 * perf_ratio))[1]
logging.info("Phase 2c: PASS")
logging.info("Phase 2d: Simultaneous merging")
# Actual splitting
for i in range(0, max_alloc):
a_cmd = "mem.value_fill(%d)" % skeys[0]
data = _execute_allocator(a_cmd, vm, lsessions[i],
120 * perf_ratio)[1]
logging.debug(virt_test_utils.get_memory_info([vm]))
logging.info("Phase 2d: PASS")
logging.info("Phase 2e: Simultaneous verification")
for i in range(0, max_alloc):
a_cmd = "mem.value_check(%d)" % skeys[0]
data = _execute_allocator(a_cmd, vm, lsessions[i],
(mem / 200 * 50 * perf_ratio))[1]
logging.info("Phase 2e: PASS")
logging.info("Phase 2f: Simultaneous spliting last 96B")
for i in range(0, max_alloc):
a_cmd = "mem.static_random_fill(96)"
data = _execute_allocator(a_cmd, vm, lsessions[i],
60 * perf_ratio)[1]
data = data.splitlines()[-1]
out = int(data.split()[4])
logging.debug("Performance: %dMB * 1000 / %dms = %dMB/s",
ksm_size/max_alloc, out,
(ksm_size * 1000 / out / max_alloc))
logging.debug(virt_test_utils.get_memory_info([vm]))
logging.info("Phase 2f: PASS")
logging.info("Phase 2g: Simultaneous verification last 96B")
for i in range(0, max_alloc):
a_cmd = "mem.static_random_verify(96)"
(match, data) = _execute_allocator(a_cmd, vm, lsessions[i],
(mem / 200 * 50 * perf_ratio))
logging.debug(virt_test_utils.get_memory_info([vm]))
logging.info("Phase 2g: PASS")
logging.debug("Cleaning up...")
for i in range(0, max_alloc):
lsessions[i].cmd_output("die()", 20)
session.close()
vm.destroy(gracefully = False)
# Main test code
logging.info("Starting phase 0: Initialization")
new_ksm = False
if (os.path.exists("/sys/kernel/mm/ksm/run")):
utils.run("echo 50 > /sys/kernel/mm/ksm/sleep_millisecs")
utils.run("echo 5000 > /sys/kernel/mm/ksm/pages_to_scan")
utils.run("echo 1 > /sys/kernel/mm/ksm/run")
e_up = "/sys/kernel/mm/transparent_hugepage/enabled"
e_rh = "/sys/kernel/mm/redhat_transparent_hugepage/enabled"
if os.path.exists(e_up):
utils.run("echo 'never' > %s" % e_up)
if os.path.exists(e_rh):
utils.run("echo 'never' > %s" % e_rh)
new_ksm = True
else:
try:
utils.run("modprobe ksm")
utils.run("ksmctl start 5000 100")
except error.CmdError, e:
raise error.TestFail("Failed to load KSM: %s" % e)
# host_reserve: mem reserve kept for the host system to run
host_reserve = int(params.get("ksm_host_reserve", -1))
if (host_reserve == -1):
# default host_reserve = MemAvailable + one_minimal_guest(128MB)
# later we add 64MB per additional guest
host_reserve = ((utils.memtotal() - utils.read_from_meminfo("MemFree"))
/ 1024 + 128)
# using default reserve
_host_reserve = True
else:
_host_reserve = False
# guest_reserve: mem reserve kept to avoid guest OS to kill processes
guest_reserve = int(params.get("ksm_guest_reserve", -1))
if (guest_reserve == -1):
# default guest_reserve = minimal_system_mem(256MB)
# later we add tmpfs overhead
guest_reserve = 256
# using default reserve
_guest_reserve = True
else:
_guest_reserve = False
max_vms = int(params.get("max_vms", 2))
overcommit = float(params.get("ksm_overcommit_ratio", 2.0))
max_alloc = int(params.get("ksm_parallel_ratio", 1))
# vmsc: count of all used VMs
vmsc = int(overcommit) + 1
vmsc = max(vmsc, max_vms)
if (params['ksm_mode'] == "serial"):
max_alloc = vmsc
if _host_reserve:
# First round of additional guest reserves
host_reserve += vmsc * 64
_host_reserve = vmsc
host_mem = (int(utils.memtotal()) / 1024 - host_reserve)
ksm_swap = False
if params.get("ksm_swap") == "yes":
ksm_swap = True
# Performance ratio
perf_ratio = params.get("ksm_perf_ratio")
if perf_ratio:
perf_ratio = float(perf_ratio)
else:
perf_ratio = 1
if (params['ksm_mode'] == "parallel"):
vmsc = 1
overcommit = 1
mem = host_mem
# 32bit system adjustment
if not params['image_name'].endswith("64"):
logging.debug("Probably i386 guest architecture, "
"max allocator mem = 2G")
# Guest can have more than 2G but
# kvm mem + 1MB (allocator itself) can't
if (host_mem > 3100):
mem = 3100
if os.popen("uname -i").readline().startswith("i386"):
logging.debug("Host is i386 architecture, max guest mem is 2G")
# Guest system with qemu overhead (64M) can't have more than 2G
if mem > 3100 - 64:
mem = 3100 - 64
else:
# mem: Memory of the guest systems. Maximum must be less than
# host's physical ram
mem = int(overcommit * host_mem / vmsc)
# 32bit system adjustment
if not params['image_name'].endswith("64"):
logging.debug("Probably i386 guest architecture, "
"max allocator mem = 2G")
# Guest can have more than 2G but
# kvm mem + 1MB (allocator itself) can't
if mem - guest_reserve - 1 > 3100:
vmsc = int(math.ceil((host_mem * overcommit) /
(3100 + guest_reserve)))
if _host_reserve:
host_reserve += (vmsc - _host_reserve) * 64
host_mem -= (vmsc - _host_reserve) * 64
_host_reserve = vmsc
mem = int(math.floor(host_mem * overcommit / vmsc))
if os.popen("uname -i").readline().startswith("i386"):
logging.debug("Host is i386 architecture, max guest mem is 2G")
# Guest system with qemu overhead (64M) can't have more than 2G
if mem > 3100 - 64:
vmsc = int(math.ceil((host_mem * overcommit) /
(3100 - 64.0)))
if _host_reserve:
host_reserve += (vmsc - _host_reserve) * 64
host_mem -= (vmsc - _host_reserve) * 64
_host_reserve = vmsc
mem = int(math.floor(host_mem * overcommit / vmsc))
# 0.055 represents OS + TMPFS additional reserve per guest ram MB
if _guest_reserve:
guest_reserve += math.ceil(mem * 0.055)
swap = int(utils.read_from_meminfo("SwapTotal")) / 1024
logging.debug("Overcommit = %f", overcommit)
logging.debug("True overcommit = %f ", (float(vmsc * mem) /
float(host_mem)))
logging.debug("Host memory = %dM", host_mem)
logging.debug("Guest memory = %dM", mem)
logging.debug("Using swap = %s", ksm_swap)
logging.debug("Swap = %dM", swap)
logging.debug("max_vms = %d", max_vms)
logging.debug("Count of all used VMs = %d", vmsc)
logging.debug("Performance_ratio = %f", perf_ratio)
# Generate unique keys for random series
skeys = []
dkeys = []
for i in range(0, max(vmsc, max_alloc)):
key = random.randrange(0, 255)
while key in skeys:
key = random.randrange(0, 255)
skeys.append(key)
key = random.randrange(0, 999)
while key in dkeys:
key = random.randrange(0, 999)
dkeys.append(key)
logging.debug("skeys: %s", skeys)
logging.debug("dkeys: %s", dkeys)
lvms = []
lsessions = []
# As we don't know the number and memory amount of VMs in advance,
# we need to specify and create them here
vm_name = params.get("main_vm")
params['mem'] = mem
params['vms'] = vm_name
# Associate pidfile name
params['pid_' + vm_name] = virt_utils.generate_tmp_file_name(vm_name,
'pid')
if not params.get('extra_params'):
params['extra_params'] = ' '
params['extra_params_' + vm_name] = params.get('extra_params')
params['extra_params_' + vm_name] += (" -pidfile %s" %
(params.get('pid_' + vm_name)))
params['extra_params'] = params.get('extra_params_'+vm_name)
# ksm_size: amount of memory used by allocator
ksm_size = mem - guest_reserve
logging.debug("Memory used by allocator on guests = %dM", ksm_size)
# Creating the first guest
virt_env_process.preprocess_vm(test, params, env, vm_name)
lvms.append(env.get_vm(vm_name))
if not lvms[0]:
raise error.TestError("VM object not found in environment")
if not lvms[0].is_alive():
raise error.TestError("VM seems to be dead; Test requires a living "
"VM")
logging.debug("Booting first guest %s", lvms[0].name)
lsessions.append(lvms[0].wait_for_login(timeout=360))
# Associate vm PID
try:
tmp = open(params.get('pid_' + vm_name), 'r')
params['pid_' + vm_name] = int(tmp.readline())
except:
raise error.TestFail("Could not get PID of %s" % (vm_name))
# Creating other guest systems
for i in range(1, vmsc):
vm_name = "vm" + str(i + 1)
params['pid_' + vm_name] = virt_utils.generate_tmp_file_name(vm_name,
'pid')
params['extra_params_' + vm_name] = params.get('extra_params')
params['extra_params_' + vm_name] += (" -pidfile %s" %
(params.get('pid_' + vm_name)))
params['extra_params'] = params.get('extra_params_' + vm_name)
# Last VM is later used to run more allocators simultaneously
lvms.append(lvms[0].clone(vm_name, params))
env.register_vm(vm_name, lvms[i])
params['vms'] += " " + vm_name
logging.debug("Booting guest %s", lvms[i].name)
lvms[i].create()
if not lvms[i].is_alive():
raise error.TestError("VM %s seems to be dead; Test requires a"
"living VM" % lvms[i].name)
lsessions.append(lvms[i].wait_for_login(timeout=360))
try:
tmp = open(params.get('pid_' + vm_name), 'r')
params['pid_' + vm_name] = int(tmp.readline())
except:
raise error.TestFail("Could not get PID of %s" % (vm_name))
# Let guests rest a little bit :-)
st = vmsc * 2 * perf_ratio
logging.debug("Waiting %ds before proceed", st)
time.sleep(vmsc * 2 * perf_ratio)
logging.debug(virt_test_utils.get_memory_info(lvms))
# Copy ksm_overcommit_guest.py into guests
pwd = os.path.join(os.environ['AUTODIR'],'tests/kvm')
vksmd_src = os.path.join(pwd, "scripts/ksm_overcommit_guest.py")
dst_dir = "/tmp"
for vm in lvms:
vm.copy_files_to(vksmd_src, dst_dir)
logging.info("Phase 0: PASS")
if params['ksm_mode'] == "parallel":
logging.info("Starting KSM test parallel mode")
split_parallel()
logging.info("KSM test parallel mode: PASS")
elif params['ksm_mode'] == "serial":
logging.info("Starting KSM test serial mode")
initialize_guests()
separate_first_guest()
split_guest()
logging.info("KSM test serial mode: PASS")
|
zhanqxun/cv_fish
|
refs/heads/master
|
win32comext/directsound/test/ds_record.py
|
4
|
import pywintypes
import struct
import win32event, win32api
import os
import win32com.directsound.directsound as ds
def wav_header_pack(wfx, datasize):
return struct.pack('<4sl4s4slhhllhh4sl', 'RIFF', 36 + datasize,
'WAVE', 'fmt ', 16,
wfx.wFormatTag, wfx.nChannels, wfx.nSamplesPerSec,
wfx.nAvgBytesPerSec, wfx.nBlockAlign,
wfx.wBitsPerSample, 'data', datasize);
d = ds.DirectSoundCaptureCreate(None, None)
sdesc = ds.DSCBUFFERDESC()
sdesc.dwBufferBytes = 352800 # 2 seconds
sdesc.lpwfxFormat = pywintypes.WAVEFORMATEX()
sdesc.lpwfxFormat.wFormatTag = pywintypes.WAVE_FORMAT_PCM
sdesc.lpwfxFormat.nChannels = 2
sdesc.lpwfxFormat.nSamplesPerSec = 44100
sdesc.lpwfxFormat.nAvgBytesPerSec = 176400
sdesc.lpwfxFormat.nBlockAlign = 4
sdesc.lpwfxFormat.wBitsPerSample = 16
print sdesc
print d
buffer = d.CreateCaptureBuffer(sdesc)
event = win32event.CreateEvent(None, 0, 0, None)
notify = buffer.QueryInterface(ds.IID_IDirectSoundNotify)
notify.SetNotificationPositions((ds.DSBPN_OFFSETSTOP, event))
buffer.Start(0)
win32event.WaitForSingleObject(event, -1)
data = buffer.Update(0, 352800)
fname=os.path.join(win32api.GetTempPath(), 'test_directsound_record.wav')
f = open(fname, 'wb')
f.write(wav_header_pack(sdesc.lpwfxFormat, 352800))
f.write(data)
f.close()
|
robwarm/gpaw-symm
|
refs/heads/master
|
gpaw/kpt_descriptor.py
|
1
|
# Copyright (C) 2003 CAMP
# Please see the accompanying LICENSE file for further information.
"""K-point/spin combination-descriptors
This module contains classes for defining combinations of two indices:
* Index k for irreducible kpoints in the 1st Brillouin zone.
* Index s for spin up/down if spin-polarized (otherwise ignored).
"""
import numpy as np
from ase.units import Bohr
from ase.dft.kpoints import monkhorst_pack, get_monkhorst_pack_size_and_offset
from gpaw.symmetry import Symmetry
from gpaw.kpoint import KPoint
import gpaw.mpi as mpi
import _gpaw
def to1bz(bzk_kc, cell_cv):
"""Wrap k-points to 1. BZ.
Return k-points wrapped to the 1. BZ.
bzk_kc: (n,3) ndarray
Array of k-points in units of the reciprocal lattice vectors.
cell_cv: (3,3) ndarray
Unit cell.
"""
B_cv = 2.0 * np.pi * np.linalg.inv(cell_cv).T
K_kv = np.dot(bzk_kc, B_cv)
N_xc = np.indices((3, 3, 3)).reshape((3, 27)).T - 1
G_xv = np.dot(N_xc, B_cv)
bz1k_kc = bzk_kc.copy()
# Find the closest reciprocal lattice vector:
for k, K_v in enumerate(K_kv):
# If a k-point has the same distance to several reciprocal
# lattice vectors, we don't want to pick a random one on the
# basis of numerical noise, so we round off the differences
# between the shortest distances to 6 decimals and chose the
# one with the lowest index.
d = ((G_xv - K_v)**2).sum(1)
x = (d - d.min()).round(6).argmin()
bz1k_kc[k] -= N_xc[x]
return bz1k_kc
class KPointDescriptor:
"""Descriptor-class for k-points."""
def __init__(self, kpts, nspins=1, collinear=True, usefractrans=False):
"""Construct descriptor object for kpoint/spin combinations (ks-pair).
Parameters
----------
kpts: None, sequence of 3 ints, or (n,3)-shaped array
Specification of the k-point grid. None=Gamma, list of
ints=Monkhorst-Pack, ndarray=user specified.
nspins: int
Number of spins.
usefractrans: bool
Switch for the use of non-symmorphic symmetries aka: symmetries
with fractional translations. False by default (experimental!!!)
Attributes
=================== =================================================
``N_c`` Number of k-points in the different directions.
``nspins`` Number of spins in total.
``mynspins`` Number of spins on this CPU.
``nibzkpts`` Number of irreducible kpoints in 1st BZ.
``nks`` Number of k-point/spin combinations in total.
``mynks`` Number of k-point/spin combinations on this CPU.
``gamma`` Boolean indicator for gamma point calculation.
``comm`` MPI-communicator for kpoint distribution.
``weight_k`` Weights of each k-point
``ibzk_kc`` Unknown
``sym_k`` Unknown
``time_reversal_k`` Unknown
``bz2ibz_k`` Unknown
``ibz2bz_k`` Unknown
``bz2bz_ks`` Unknown
``symmetry`` Object representing symmetries
=================== =================================================
"""
if kpts is None:
self.bzk_kc = np.zeros((1, 3))
self.N_c = np.array((1, 1, 1), dtype=int)
self.offset_c = np.zeros(3)
elif isinstance(kpts[0], int):
self.bzk_kc = monkhorst_pack(kpts)
self.N_c = np.array(kpts, dtype=int)
self.offset_c = np.zeros(3)
else:
self.bzk_kc = np.array(kpts, float)
try:
self.N_c, self.offset_c = \
get_monkhorst_pack_size_and_offset(self.bzk_kc)
except ValueError:
self.N_c = None
self.offset_c = None
self.collinear = collinear
self.nspins = nspins
self.nbzkpts = len(self.bzk_kc)
# Gamma-point calculation?
self.usefractrans = usefractrans
self.gamma = (self.nbzkpts == 1 and np.allclose(self.bzk_kc[0], 0.0))
self.set_symmetry(None, None, usesymm=None)
self.set_communicator(mpi.serial_comm)
if self.gamma:
self.description = '1 k-point (Gamma)'
else:
self.description = '%d k-points' % self.nbzkpts
if self.N_c is not None:
self.description += (': %d x %d x %d Monkhorst-Pack grid' %
tuple(self.N_c))
if self.offset_c.any():
self.description += ' + ['
for x in self.offset_c:
if x != 0 and abs(round(1 / x) - 1 / x) < 1e-12:
self.description += '1/%d,' % round(1 / x)
else:
self.description += '%f,' % x
self.description = self.description[:-1] + ']'
def __len__(self):
"""Return number of k-point/spin combinations of local CPU."""
return self.mynks
def set_symmetry(self, atoms, setups, magmom_av=None,
usesymm=False, N_c=None, comm=None):
"""Create symmetry object and construct irreducible Brillouin zone.
atoms: Atoms object
Defines atom positions and types and also unit cell and
boundary conditions.
setups: instance of class Setups
PAW setups for the atoms.
magmom_av: ndarray
Initial magnetic moments.
usesymm: bool
Symmetry flag.
N_c: three int's or None
If not None: Check also symmetry of grid.
"""
if atoms is not None:
for c, periodic in enumerate(atoms.pbc):
if not periodic and not np.allclose(self.bzk_kc[:, c], 0.0):
raise ValueError('K-points can only be used with PBCs!')
self.cell_cv = atoms.cell / Bohr
if magmom_av is None:
magmom_av = np.zeros((len(atoms), 3))
magmom_av[:, 2] = atoms.get_initial_magnetic_moments()
magmom_av = magmom_av.round(decimals=3) # round off
id_a = zip(setups.id_a, *magmom_av.T)
# Construct a Symmetry instance containing the identity operation
# only
self.symmetry = Symmetry(id_a, atoms.cell / Bohr, atoms.pbc, fractrans=self.usefractrans)
self.usefractrans = self.symmetry.usefractrans
else:
self.symmetry = None
if self.gamma or usesymm is None:
# Point group and time-reversal symmetry neglected
self.weight_k = np.ones(self.nbzkpts) / self.nbzkpts
self.ibzk_kc = self.bzk_kc.copy()
self.sym_k = np.zeros(self.nbzkpts, int)
self.time_reversal_k = np.zeros(self.nbzkpts, bool)
self.bz2ibz_k = np.arange(self.nbzkpts)
self.ibz2bz_k = np.arange(self.nbzkpts)
self.bz2bz_ks = np.arange(self.nbzkpts)[:, np.newaxis]
else:
if usesymm:
# Find symmetry operations of atoms
self.symmetry.analyze(atoms.get_scaled_positions())
if N_c is not None:
if self.usefractrans:
## adjust N_c to symmetries
# the factor (denominator) the grid must follow
factor = np.ones(3, float)
indexes = np.where(np.abs(self.symmetry.ft_sc) > 1e-3)
for i in range(len(indexes[0])):
# find smallest common denominator
a = factor[indexes[1][i]]
b = np.rint(1. / self.symmetry.ft_sc[indexes[0][i]][indexes[1][i]])
factor[indexes[1][i]] = a * b
while b != 0:
rem = a % b
a = b
b = rem
factor[indexes[1][i]] /= a
Nnew_c = np.array(np.rint(N_c / factor) * factor, int)
# make sure new grid is not less dense
Nnew_c = np.array(np.where(Nnew_c >= N_c, Nnew_c, Nnew_c + factor), int)
N_c = Nnew_c
else:
## adjust symmetries to grid
self.symmetry.prune_symmetries_grid(N_c)
(self.ibzk_kc, self.weight_k,
self.sym_k,
self.time_reversal_k,
self.bz2ibz_k,
self.ibz2bz_k,
self.bz2bz_ks) = self.symmetry.reduce(self.bzk_kc, comm)
if setups is not None:
setups.set_symmetry(self.symmetry)
# Number of irreducible k-points and k-point/spin combinations.
self.nibzkpts = len(self.ibzk_kc)
if self.collinear:
self.nks = self.nibzkpts * self.nspins
else:
self.nks = self.nibzkpts
return N_c
def set_communicator(self, comm):
"""Set k-point communicator."""
# Ranks < self.rank0 have mynks0 k-point/spin combinations and
# ranks >= self.rank0 have mynks0+1 k-point/spin combinations.
mynks0, x = divmod(self.nks, comm.size)
self.rank0 = comm.size - x
self.comm = comm
# My number and offset of k-point/spin combinations
self.mynks = self.get_count()
self.ks0 = self.get_offset()
if self.nspins == 2 and comm.size == 1: # NCXXXXXXXX
# Avoid duplicating k-points in local list of k-points.
self.ibzk_qc = self.ibzk_kc.copy()
self.weight_q = self.weight_k
else:
self.ibzk_qc = np.vstack((self.ibzk_kc,
self.ibzk_kc))[self.get_slice()]
self.weight_q = np.hstack((self.weight_k,
self.weight_k))[self.get_slice()]
def copy(self, comm=mpi.serial_comm):
"""Create a copy with shared symmetry object."""
kd = KPointDescriptor(self.bzk_kc, self.nspins)
kd.weight_k = self.weight_k
kd.ibzk_kc = self.ibzk_kc
kd.sym_k = self.sym_k
kd.time_reversal_k = self.time_reversal_k
kd.bz2ibz_k = self.bz2ibz_k
kd.ibz2bz_k = self.ibz2bz_k
kd.bz2bz_ks = self.bz2bz_ks
kd.symmetry = self.symmetry
kd.nibzkpts = self.nibzkpts
kd.nks = self.nks
kd.set_communicator(comm)
return kd
def create_k_points(self, gd):
"""Return a list of KPoints."""
sdisp_cd = gd.sdisp_cd
kpt_u = []
for ks in range(self.ks0, self.ks0 + self.mynks):
s, k = divmod(ks, self.nibzkpts)
q = (ks - self.ks0) % self.nibzkpts
if self.collinear:
weight = self.weight_k[k] * 2 / self.nspins
else:
weight = self.weight_k[k]
if self.gamma:
phase_cd = np.ones((3, 2), complex)
else:
phase_cd = np.exp(2j * np.pi *
sdisp_cd * self.ibzk_kc[k, :, np.newaxis])
kpt_u.append(KPoint(weight, s, k, q, phase_cd))
return kpt_u
def collect(self, a_ux, broadcast=True):
"""Collect distributed data to all."""
if self.comm.rank == 0 or broadcast:
xshape = a_ux.shape[1:]
a_skx = np.empty((self.nspins, self.nibzkpts) + xshape, a_ux.dtype)
a_Ux = a_skx.reshape((-1,) + xshape)
else:
a_skx = None
if self.comm.rank > 0:
self.comm.send(a_ux, 0)
else:
u1 = self.get_count(0)
a_Ux[0:u1] = a_ux
requests = []
for rank in range(1, self.comm.size):
u2 = u1 + self.get_count(rank)
requests.append(self.comm.receive(a_Ux[u1:u2], rank,
block=False))
u1 = u2
assert u1 == len(a_Ux)
self.comm.waitall(requests)
if broadcast:
self.comm.broadcast(a_Ux, 0)
return a_skx
def transform_wave_function(self, psit_G, k, index_G=None, phase_G=None):
"""Transform wave function from IBZ to BZ.
k is the index of the desired k-point in the full BZ.
"""
s = self.sym_k[k]
time_reversal = self.time_reversal_k[k]
op_cc = np.linalg.inv(self.symmetry.op_scc[s]).round().astype(int)
# Identity
if (np.abs(op_cc - np.eye(3, dtype=int)) < 1e-10).all():
if time_reversal:
return psit_G.conj()
else:
return psit_G
# General point group symmetry
else:
ik = self.bz2ibz_k[k]
kibz_c = self.ibzk_kc[ik]
b_g = np.zeros_like(psit_G)
kbz_c = np.dot(self.symmetry.op_scc[s], kibz_c)
if index_G is not None:
assert index_G.shape == psit_G.shape == phase_G.shape,\
'Shape mismatch %s vs %s vs %s' % (index_G.shape,
psit_G.shape,
phase_G.shape)
_gpaw.symmetrize_with_index(psit_G, b_g, index_G, phase_G)
else:
_gpaw.symmetrize_wavefunction(psit_G, b_g, op_cc.copy(),
np.ascontiguousarray(kibz_c),
kbz_c)
if time_reversal:
return b_g.conj()
else:
return b_g
def get_transform_wavefunction_index(self, nG, k):
"""Get the "wavefunction transform index".
This is a permutation of the numbers 1, 2, .. N which
associates k + q to some k, and where N is the total
number of grid points as specified by nG which is a
3D tuple.
Returns index_G and phase_G which are one-dimensional
arrays on the grid."""
s = self.sym_k[k]
op_cc = np.linalg.inv(self.symmetry.op_scc[s]).round().astype(int)
# General point group symmetry
if (np.abs(op_cc - np.eye(3, dtype=int)) < 1e-10).all():
nG0 = np.prod(nG)
index_G = np.arange(nG0).reshape(nG)
phase_G = np.ones(nG)
else:
ik = self.bz2ibz_k[k]
kibz_c = self.ibzk_kc[ik]
index_G = np.zeros(nG, dtype=int)
phase_G = np.zeros(nG, dtype=complex)
kbz_c = np.dot(self.symmetry.op_scc[s], kibz_c)
_gpaw.symmetrize_return_index(index_G, phase_G, op_cc.copy(),
np.ascontiguousarray(kibz_c),
kbz_c)
return index_G, phase_G
#def find_k_plus_q(self, q_c, k_x=None):
def find_k_plus_q(self, q_c, kpts_k=None):
"""Find the indices of k+q for all kpoints in the Brillouin zone.
In case that k+q is outside the BZ, the k-point inside the BZ
corresponding to k+q is given.
Parameters
----------
q_c: ndarray
Coordinates for the q-vector in units of the reciprocal
lattice vectors.
kpts_k: list of ints
Restrict search to specified k-points.
"""
k_x = kpts_k
if k_x is None:
return self.find_k_plus_q(q_c, range(self.nbzkpts))
i_x = []
for k in k_x:
kpt_c = self.bzk_kc[k] + q_c
d_kc = kpt_c - self.bzk_kc
d_k = abs(d_kc - d_kc.round()).sum(1)
i = d_k.argmin()
if d_k[i] > 1e-8:
raise RuntimeError('Could not find k+q!')
i_x.append(i)
return i_x
def get_bz_q_points(self, first=False):
"""Return the q=k1-k2. q-mesh is always Gamma-centered."""
shift_c = 0.5 * ((self.N_c + 1) % 2) / self.N_c
bzq_qc = monkhorst_pack(self.N_c) + shift_c
if first:
return to1bz(bzq_qc, self.cell_cv)
else:
return bzq_qc
def get_ibz_q_points(self, bzq_qc, op_scc):
"""Return ibz q points and the corresponding symmetry operations that
work for k-mesh as well."""
ibzq_qc_tmp = []
ibzq_qc_tmp.append(bzq_qc[-1])
weight_tmp = [0]
for i, op_cc in enumerate(op_scc):
if np.abs(op_cc - np.eye(3)).sum() < 1e-8:
identity_iop = i
break
ibzq_q_tmp = {}
iop_q = {}
timerev_q = {}
diff_qc = {}
for i in range(len(bzq_qc) - 1, -1, -1): # loop opposite to kpoint
try:
ibzk, iop, timerev, diff_c = self.find_ibzkpt(
op_scc, ibzq_qc_tmp, bzq_qc[i])
find = False
for ii, iop1 in enumerate(self.sym_k):
if iop1 == iop and self.time_reversal_k[ii] == timerev:
find = True
break
if find is False:
raise ValueError('cant find k!')
ibzq_q_tmp[i] = ibzk
weight_tmp[ibzk] += 1.
iop_q[i] = iop
timerev_q[i] = timerev
diff_qc[i] = diff_c
except ValueError:
ibzq_qc_tmp.append(bzq_qc[i])
weight_tmp.append(1.)
ibzq_q_tmp[i] = len(ibzq_qc_tmp) - 1
iop_q[i] = identity_iop
timerev_q[i] = False
diff_qc[i] = np.zeros(3)
# reverse the order.
nq = len(ibzq_qc_tmp)
ibzq_qc = np.zeros((nq, 3))
ibzq_q = np.zeros(len(bzq_qc), dtype=int)
for i in range(nq):
ibzq_qc[i] = ibzq_qc_tmp[nq - i - 1]
for i in range(len(bzq_qc)):
ibzq_q[i] = nq - ibzq_q_tmp[i] - 1
self.q_weights = np.array(weight_tmp[::-1]) / len(bzq_qc)
return ibzq_qc, ibzq_q, iop_q, timerev_q, diff_qc
def find_ibzkpt(self, symrel, ibzk_kc, bzk_c):
"""Find index in IBZ and related symmetry operations."""
find = False
ibzkpt = 0
iop = 0
timerev = False
for sign in (1, -1):
for ioptmp, op in enumerate(symrel):
for i, ibzk in enumerate(ibzk_kc):
diff_c = bzk_c - sign * np.dot(op, ibzk)
if (np.abs(diff_c - diff_c.round()) < 1e-8).all():
ibzkpt = i
iop = ioptmp
find = True
if sign == -1:
timerev = True
break
if find == True:
break
if find == True:
break
if find == False:
raise ValueError('Cant find corresponding IBZ kpoint!')
return ibzkpt, iop, timerev, diff_c.round()
def where_is_q(self, q_c, bzq_qc):
"""Find the index of q points in BZ."""
d_qc = q_c - bzq_qc
d_q = abs(d_qc - d_qc.round()).sum(1)
q = d_q.argmin()
if d_q[q] > 1e-8:
raise RuntimeError('Could not find q!')
return q
def get_count(self, rank=None):
"""Return the number of ks-pairs which belong to a given rank."""
if rank is None:
rank = self.comm.rank
assert rank in xrange(self.comm.size)
mynks0 = self.nks // self.comm.size
mynks = mynks0
if rank >= self.rank0:
mynks += 1
return mynks
def get_offset(self, rank=None):
"""Return the offset of the first ks-pair on a given rank."""
if rank is None:
rank = self.comm.rank
assert rank in xrange(self.comm.size)
mynks0 = self.nks // self.comm.size
ks0 = rank * mynks0
if rank >= self.rank0:
ks0 += rank - self.rank0
return ks0
def get_rank_and_index(self, s, k):
"""Find rank and local index of k-point/spin combination."""
u = self.where_is(s, k)
rank, myu = self.who_has(u)
return rank, myu
def get_slice(self, rank=None):
"""Return the slice of global ks-pairs which belong to a given rank."""
if rank is None:
rank = self.comm.rank
assert rank in xrange(self.comm.size)
mynks, ks0 = self.get_count(rank), self.get_offset(rank)
uslice = slice(ks0, ks0 + mynks)
return uslice
def get_indices(self, rank=None):
"""Return the global ks-pair indices which belong to a given rank."""
uslice = self.get_slice(rank)
return np.arange(*uslice.indices(self.nks))
def get_ranks(self):
"""Return array of ranks as a function of global ks-pair indices."""
ranks = np.empty(self.nks, dtype=int)
for rank in range(self.comm.size):
uslice = self.get_slice(rank)
ranks[uslice] = rank
assert (ranks >= 0).all() and (ranks < self.comm.size).all()
return ranks
def who_has(self, u):
"""Convert global index to rank information and local index."""
mynks0 = self.nks // self.comm.size
if u < mynks0 * self.rank0:
rank, myu = divmod(u, mynks0)
else:
rank, myu = divmod(u - mynks0 * self.rank0, mynks0 + 1)
rank += self.rank0
return rank, myu
def global_index(self, myu, rank=None):
"""Convert rank information and local index to global index."""
if rank is None:
rank = self.comm.rank
assert rank in xrange(self.comm.size)
ks0 = self.get_offset(rank)
u = ks0 + myu
return u
def what_is(self, u):
"""Convert global index to corresponding kpoint/spin combination."""
s, k = divmod(u, self.nibzkpts)
return s, k
def where_is(self, s, k):
"""Convert kpoint/spin combination to the global index thereof."""
u = k + self.nibzkpts * s
return u
#def get_size_of_global_array(self):
# return (self.nspins*self.nibzkpts,)
#
#def ...
class KPointDescriptorOld:
"""Descriptor-class for ordered lists of kpoint/spin combinations
TODO
"""
def __init__(self, nspins, nibzkpts, comm=None, gamma=True, dtype=float):
"""Construct descriptor object for kpoint/spin combinations (ks-pair).
Parameters:
nspins: int
Number of spins.
nibzkpts: int
Number of irreducible kpoints in 1st Brillouin zone.
comm: MPI-communicator
Communicator for kpoint-groups.
gamma: bool
More to follow.
dtype: NumPy dtype
More to follow.
Note that if comm.size is greater than the number of spins, then
the kpoints cannot all be located at the gamma point and therefor
the gamma boolean loses its significance.
Attributes:
============ ======================================================
``nspins`` Number of spins.
``nibzkpts`` Number of irreducible kpoints in 1st Brillouin zone.
``nks`` Number of k-point/spin combinations in total.
``mynks`` Number of k-point/spin combinations on this CPU.
``gamma`` Boolean indicator for gamma point calculation.
``dtype`` Data type appropriate for wave functions.
``beg`` Beginning of ks-pair indices in group (inclusive).
``end`` End of ks-pair indices in group (exclusive).
``step`` Stride for ks-pair indices between ``beg`` and ``end``.
``comm`` MPI-communicator for kpoint distribution.
============ ======================================================
"""
if comm is None:
comm = mpi.serial_comm
self.comm = comm
self.rank = self.comm.rank
self.nspins = nspins
self.nibzkpts = nibzkpts
self.nks = self.nibzkpts * self.nspins
# XXX Check from distribute_cpus in mpi/__init__.py line 239 rev. 4187
if self.nks % self.comm.size != 0:
raise RuntimeError('Cannot distribute %d k-point/spin ' \
'combinations to %d processors' % \
(self.nks, self.comm.size))
self.mynks = self.nks // self.comm.size
# TODO Move code from PAW.initialize in paw.py lines 319-328 rev. 4187
self.gamma = gamma
self.dtype = dtype
uslice = self.get_slice()
self.beg, self.end, self.step = uslice.indices(self.nks)
#XXX u is global kpoint index
def __len__(self):
return self.mynks
def get_rank_and_index(self, s, k):
"""Find rank and local index of k-point/spin combination."""
u = self.where_is(s, k)
rank, myu = self.who_has(u)
return rank, myu
def get_slice(self, rank=None):
"""Return the slice of global ks-pairs which belong to a given rank."""
if rank is None:
rank = self.comm.rank
assert rank in xrange(self.comm.size)
ks0 = rank * self.mynks
uslice = slice(ks0, ks0 + self.mynks)
return uslice
def get_indices(self, rank=None):
"""Return the global ks-pair indices which belong to a given rank."""
uslice = self.get_slice(rank)
return np.arange(*uslice.indices(self.nks))
def get_ranks(self):
"""Return array of ranks as a function of global ks-pair indices."""
ranks = np.empty(self.nks, dtype=int)
for rank in range(self.comm.size):
uslice = self.get_slice(rank)
ranks[uslice] = rank
assert (ranks >= 0).all() and (ranks < self.comm.size).all()
return ranks
def who_has(self, u):
"""Convert global index to rank information and local index."""
rank, myu = divmod(u, self.mynks)
return rank, myu
def global_index(self, myu, rank=None):
"""Convert rank information and local index to global index."""
if rank is None:
rank = self.comm.rank
u = rank * self.mynks + myu
return u
def what_is(self, u):
"""Convert global index to corresponding kpoint/spin combination."""
s, k = divmod(u, self.nibzkpts)
return s, k
def where_is(self, s, k):
"""Convert kpoint/spin combination to the global index thereof."""
u = k + self.nibzkpts * s
return u
#def get_size_of_global_array(self):
# return (self.nspins*self.nibzkpts,)
#
#def ...
|
wafaast/afefuc-project
|
refs/heads/master
|
src/afefuc.py
|
2
|
#!/usr/bin/env python
'''
Created on Apr 25, 2013
@author: Bartosz Alchimowicz
'''
import sys
import signal
sys.path.append('../src-ui')
from PyQt4 import QtCore, QtGui
from gui.MainWindowWrapper import MainWindowWrapper
if __name__ == "__main__":
signal.signal(signal.SIGINT, signal.SIG_DFL)
app = QtGui.QApplication(sys.argv)
app.setStyleSheet("QLineEdit { background-color: white }\nQLineEdit[readOnly=\"true\"] { color: gray }");
myapp = MainWindowWrapper(application = app)
myapp.show()
sys.exit(app.exec_())
|
firerszd/kbengine
|
refs/heads/master
|
kbe/src/lib/python/Lib/xmlrpc/client.py
|
69
|
#
# XML-RPC CLIENT LIBRARY
# $Id$
#
# an XML-RPC client interface for Python.
#
# the marshalling and response parser code can also be used to
# implement XML-RPC servers.
#
# Notes:
# this version is designed to work with Python 2.1 or newer.
#
# History:
# 1999-01-14 fl Created
# 1999-01-15 fl Changed dateTime to use localtime
# 1999-01-16 fl Added Binary/base64 element, default to RPC2 service
# 1999-01-19 fl Fixed array data element (from Skip Montanaro)
# 1999-01-21 fl Fixed dateTime constructor, etc.
# 1999-02-02 fl Added fault handling, handle empty sequences, etc.
# 1999-02-10 fl Fixed problem with empty responses (from Skip Montanaro)
# 1999-06-20 fl Speed improvements, pluggable parsers/transports (0.9.8)
# 2000-11-28 fl Changed boolean to check the truth value of its argument
# 2001-02-24 fl Added encoding/Unicode/SafeTransport patches
# 2001-02-26 fl Added compare support to wrappers (0.9.9/1.0b1)
# 2001-03-28 fl Make sure response tuple is a singleton
# 2001-03-29 fl Don't require empty params element (from Nicholas Riley)
# 2001-06-10 fl Folded in _xmlrpclib accelerator support (1.0b2)
# 2001-08-20 fl Base xmlrpclib.Error on built-in Exception (from Paul Prescod)
# 2001-09-03 fl Allow Transport subclass to override getparser
# 2001-09-10 fl Lazy import of urllib, cgi, xmllib (20x import speedup)
# 2001-10-01 fl Remove containers from memo cache when done with them
# 2001-10-01 fl Use faster escape method (80% dumps speedup)
# 2001-10-02 fl More dumps microtuning
# 2001-10-04 fl Make sure import expat gets a parser (from Guido van Rossum)
# 2001-10-10 sm Allow long ints to be passed as ints if they don't overflow
# 2001-10-17 sm Test for int and long overflow (allows use on 64-bit systems)
# 2001-11-12 fl Use repr() to marshal doubles (from Paul Felix)
# 2002-03-17 fl Avoid buffered read when possible (from James Rucker)
# 2002-04-07 fl Added pythondoc comments
# 2002-04-16 fl Added __str__ methods to datetime/binary wrappers
# 2002-05-15 fl Added error constants (from Andrew Kuchling)
# 2002-06-27 fl Merged with Python CVS version
# 2002-10-22 fl Added basic authentication (based on code from Phillip Eby)
# 2003-01-22 sm Add support for the bool type
# 2003-02-27 gvr Remove apply calls
# 2003-04-24 sm Use cStringIO if available
# 2003-04-25 ak Add support for nil
# 2003-06-15 gn Add support for time.struct_time
# 2003-07-12 gp Correct marshalling of Faults
# 2003-10-31 mvl Add multicall support
# 2004-08-20 mvl Bump minimum supported Python version to 2.1
#
# Copyright (c) 1999-2002 by Secret Labs AB.
# Copyright (c) 1999-2002 by Fredrik Lundh.
#
# [email protected]
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The XML-RPC client interface is
#
# Copyright (c) 1999-2002 by Secret Labs AB
# Copyright (c) 1999-2002 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
"""
An XML-RPC client interface for Python.
The marshalling and response parser code can also be used to
implement XML-RPC servers.
Exported exceptions:
Error Base class for client errors
ProtocolError Indicates an HTTP protocol error
ResponseError Indicates a broken response package
Fault Indicates an XML-RPC fault package
Exported classes:
ServerProxy Represents a logical connection to an XML-RPC server
MultiCall Executor of boxcared xmlrpc requests
DateTime dateTime wrapper for an ISO 8601 string or time tuple or
localtime integer value to generate a "dateTime.iso8601"
XML-RPC value
Binary binary data wrapper
Marshaller Generate an XML-RPC params chunk from a Python data structure
Unmarshaller Unmarshal an XML-RPC response from incoming XML event message
Transport Handles an HTTP transaction to an XML-RPC server
SafeTransport Handles an HTTPS transaction to an XML-RPC server
Exported constants:
(none)
Exported functions:
getparser Create instance of the fastest available parser & attach
to an unmarshalling object
dumps Convert an argument tuple or a Fault instance to an XML-RPC
request (or response, if the methodresponse option is used).
loads Convert an XML-RPC packet to unmarshalled data plus a method
name (None if not present).
"""
import base64
import sys
import time
from datetime import datetime
import http.client
import urllib.parse
from xml.parsers import expat
import errno
from io import BytesIO
try:
import gzip
except ImportError:
gzip = None #python can be built without zlib/gzip support
# --------------------------------------------------------------------
# Internal stuff
def escape(s):
s = s.replace("&", "&")
s = s.replace("<", "<")
return s.replace(">", ">",)
# used in User-Agent header sent
__version__ = sys.version[:3]
# xmlrpc integer limits
MAXINT = 2**31-1
MININT = -2**31
# --------------------------------------------------------------------
# Error constants (from Dan Libby's specification at
# http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php)
# Ranges of errors
PARSE_ERROR = -32700
SERVER_ERROR = -32600
APPLICATION_ERROR = -32500
SYSTEM_ERROR = -32400
TRANSPORT_ERROR = -32300
# Specific errors
NOT_WELLFORMED_ERROR = -32700
UNSUPPORTED_ENCODING = -32701
INVALID_ENCODING_CHAR = -32702
INVALID_XMLRPC = -32600
METHOD_NOT_FOUND = -32601
INVALID_METHOD_PARAMS = -32602
INTERNAL_ERROR = -32603
# --------------------------------------------------------------------
# Exceptions
##
# Base class for all kinds of client-side errors.
class Error(Exception):
"""Base class for client errors."""
def __str__(self):
return repr(self)
##
# Indicates an HTTP-level protocol error. This is raised by the HTTP
# transport layer, if the server returns an error code other than 200
# (OK).
#
# @param url The target URL.
# @param errcode The HTTP error code.
# @param errmsg The HTTP error message.
# @param headers The HTTP header dictionary.
class ProtocolError(Error):
"""Indicates an HTTP protocol error."""
def __init__(self, url, errcode, errmsg, headers):
Error.__init__(self)
self.url = url
self.errcode = errcode
self.errmsg = errmsg
self.headers = headers
def __repr__(self):
return (
"<ProtocolError for %s: %s %s>" %
(self.url, self.errcode, self.errmsg)
)
##
# Indicates a broken XML-RPC response package. This exception is
# raised by the unmarshalling layer, if the XML-RPC response is
# malformed.
class ResponseError(Error):
"""Indicates a broken response package."""
pass
##
# Indicates an XML-RPC fault response package. This exception is
# raised by the unmarshalling layer, if the XML-RPC response contains
# a fault string. This exception can also be used as a class, to
# generate a fault XML-RPC message.
#
# @param faultCode The XML-RPC fault code.
# @param faultString The XML-RPC fault string.
class Fault(Error):
"""Indicates an XML-RPC fault package."""
def __init__(self, faultCode, faultString, **extra):
Error.__init__(self)
self.faultCode = faultCode
self.faultString = faultString
def __repr__(self):
return "<Fault %s: %r>" % (self.faultCode, self.faultString)
# --------------------------------------------------------------------
# Special values
##
# Backwards compatibility
boolean = Boolean = bool
##
# Wrapper for XML-RPC DateTime values. This converts a time value to
# the format used by XML-RPC.
# <p>
# The value can be given as a datetime object, as a string in the
# format "yyyymmddThh:mm:ss", as a 9-item time tuple (as returned by
# time.localtime()), or an integer value (as returned by time.time()).
# The wrapper uses time.localtime() to convert an integer to a time
# tuple.
#
# @param value The time, given as a datetime object, an ISO 8601 string,
# a time tuple, or an integer time value.
# Issue #13305: different format codes across platforms
_day0 = datetime(1, 1, 1)
if _day0.strftime('%Y') == '0001': # Mac OS X
def _iso8601_format(value):
return value.strftime("%Y%m%dT%H:%M:%S")
elif _day0.strftime('%4Y') == '0001': # Linux
def _iso8601_format(value):
return value.strftime("%4Y%m%dT%H:%M:%S")
else:
def _iso8601_format(value):
return value.strftime("%Y%m%dT%H:%M:%S").zfill(17)
del _day0
def _strftime(value):
if isinstance(value, datetime):
return _iso8601_format(value)
if not isinstance(value, (tuple, time.struct_time)):
if value == 0:
value = time.time()
value = time.localtime(value)
return "%04d%02d%02dT%02d:%02d:%02d" % value[:6]
class DateTime:
"""DateTime wrapper for an ISO 8601 string or time tuple or
localtime integer value to generate 'dateTime.iso8601' XML-RPC
value.
"""
def __init__(self, value=0):
if isinstance(value, str):
self.value = value
else:
self.value = _strftime(value)
def make_comparable(self, other):
if isinstance(other, DateTime):
s = self.value
o = other.value
elif isinstance(other, datetime):
s = self.value
o = _iso8601_format(other)
elif isinstance(other, str):
s = self.value
o = other
elif hasattr(other, "timetuple"):
s = self.timetuple()
o = other.timetuple()
else:
otype = (hasattr(other, "__class__")
and other.__class__.__name__
or type(other))
raise TypeError("Can't compare %s and %s" %
(self.__class__.__name__, otype))
return s, o
def __lt__(self, other):
s, o = self.make_comparable(other)
return s < o
def __le__(self, other):
s, o = self.make_comparable(other)
return s <= o
def __gt__(self, other):
s, o = self.make_comparable(other)
return s > o
def __ge__(self, other):
s, o = self.make_comparable(other)
return s >= o
def __eq__(self, other):
s, o = self.make_comparable(other)
return s == o
def __ne__(self, other):
s, o = self.make_comparable(other)
return s != o
def timetuple(self):
return time.strptime(self.value, "%Y%m%dT%H:%M:%S")
##
# Get date/time value.
#
# @return Date/time value, as an ISO 8601 string.
def __str__(self):
return self.value
def __repr__(self):
return "<DateTime %r at %x>" % (self.value, id(self))
def decode(self, data):
self.value = str(data).strip()
def encode(self, out):
out.write("<value><dateTime.iso8601>")
out.write(self.value)
out.write("</dateTime.iso8601></value>\n")
def _datetime(data):
# decode xml element contents into a DateTime structure.
value = DateTime()
value.decode(data)
return value
def _datetime_type(data):
return datetime.strptime(data, "%Y%m%dT%H:%M:%S")
##
# Wrapper for binary data. This can be used to transport any kind
# of binary data over XML-RPC, using BASE64 encoding.
#
# @param data An 8-bit string containing arbitrary data.
class Binary:
"""Wrapper for binary data."""
def __init__(self, data=None):
if data is None:
data = b""
else:
if not isinstance(data, (bytes, bytearray)):
raise TypeError("expected bytes or bytearray, not %s" %
data.__class__.__name__)
data = bytes(data) # Make a copy of the bytes!
self.data = data
##
# Get buffer contents.
#
# @return Buffer contents, as an 8-bit string.
def __str__(self):
return str(self.data, "latin-1") # XXX encoding?!
def __eq__(self, other):
if isinstance(other, Binary):
other = other.data
return self.data == other
def __ne__(self, other):
if isinstance(other, Binary):
other = other.data
return self.data != other
def decode(self, data):
self.data = base64.decodebytes(data)
def encode(self, out):
out.write("<value><base64>\n")
encoded = base64.encodebytes(self.data)
out.write(encoded.decode('ascii'))
out.write("</base64></value>\n")
def _binary(data):
# decode xml element contents into a Binary structure
value = Binary()
value.decode(data)
return value
WRAPPERS = (DateTime, Binary)
# --------------------------------------------------------------------
# XML parsers
class ExpatParser:
# fast expat parser for Python 2.0 and later.
def __init__(self, target):
self._parser = parser = expat.ParserCreate(None, None)
self._target = target
parser.StartElementHandler = target.start
parser.EndElementHandler = target.end
parser.CharacterDataHandler = target.data
encoding = None
target.xml(encoding, None)
def feed(self, data):
self._parser.Parse(data, 0)
def close(self):
self._parser.Parse("", 1) # end of data
del self._target, self._parser # get rid of circular references
# --------------------------------------------------------------------
# XML-RPC marshalling and unmarshalling code
##
# XML-RPC marshaller.
#
# @param encoding Default encoding for 8-bit strings. The default
# value is None (interpreted as UTF-8).
# @see dumps
class Marshaller:
"""Generate an XML-RPC params chunk from a Python data structure.
Create a Marshaller instance for each set of parameters, and use
the "dumps" method to convert your data (represented as a tuple)
to an XML-RPC params chunk. To write a fault response, pass a
Fault instance instead. You may prefer to use the "dumps" module
function for this purpose.
"""
# by the way, if you don't understand what's going on in here,
# that's perfectly ok.
def __init__(self, encoding=None, allow_none=False):
self.memo = {}
self.data = None
self.encoding = encoding
self.allow_none = allow_none
dispatch = {}
def dumps(self, values):
out = []
write = out.append
dump = self.__dump
if isinstance(values, Fault):
# fault instance
write("<fault>\n")
dump({'faultCode': values.faultCode,
'faultString': values.faultString},
write)
write("</fault>\n")
else:
# parameter block
# FIXME: the xml-rpc specification allows us to leave out
# the entire <params> block if there are no parameters.
# however, changing this may break older code (including
# old versions of xmlrpclib.py), so this is better left as
# is for now. See @XMLRPC3 for more information. /F
write("<params>\n")
for v in values:
write("<param>\n")
dump(v, write)
write("</param>\n")
write("</params>\n")
result = "".join(out)
return result
def __dump(self, value, write):
try:
f = self.dispatch[type(value)]
except KeyError:
# check if this object can be marshalled as a structure
if not hasattr(value, '__dict__'):
raise TypeError("cannot marshal %s objects" % type(value))
# check if this class is a sub-class of a basic type,
# because we don't know how to marshal these types
# (e.g. a string sub-class)
for type_ in type(value).__mro__:
if type_ in self.dispatch.keys():
raise TypeError("cannot marshal %s objects" % type(value))
# XXX(twouters): using "_arbitrary_instance" as key as a quick-fix
# for the p3yk merge, this should probably be fixed more neatly.
f = self.dispatch["_arbitrary_instance"]
f(self, value, write)
def dump_nil (self, value, write):
if not self.allow_none:
raise TypeError("cannot marshal None unless allow_none is enabled")
write("<value><nil/></value>")
dispatch[type(None)] = dump_nil
def dump_bool(self, value, write):
write("<value><boolean>")
write(value and "1" or "0")
write("</boolean></value>\n")
dispatch[bool] = dump_bool
def dump_long(self, value, write):
if value > MAXINT or value < MININT:
raise OverflowError("int exceeds XML-RPC limits")
write("<value><int>")
write(str(int(value)))
write("</int></value>\n")
dispatch[int] = dump_long
# backward compatible
dump_int = dump_long
def dump_double(self, value, write):
write("<value><double>")
write(repr(value))
write("</double></value>\n")
dispatch[float] = dump_double
def dump_unicode(self, value, write, escape=escape):
write("<value><string>")
write(escape(value))
write("</string></value>\n")
dispatch[str] = dump_unicode
def dump_bytes(self, value, write):
write("<value><base64>\n")
encoded = base64.encodebytes(value)
write(encoded.decode('ascii'))
write("</base64></value>\n")
dispatch[bytes] = dump_bytes
dispatch[bytearray] = dump_bytes
def dump_array(self, value, write):
i = id(value)
if i in self.memo:
raise TypeError("cannot marshal recursive sequences")
self.memo[i] = None
dump = self.__dump
write("<value><array><data>\n")
for v in value:
dump(v, write)
write("</data></array></value>\n")
del self.memo[i]
dispatch[tuple] = dump_array
dispatch[list] = dump_array
def dump_struct(self, value, write, escape=escape):
i = id(value)
if i in self.memo:
raise TypeError("cannot marshal recursive dictionaries")
self.memo[i] = None
dump = self.__dump
write("<value><struct>\n")
for k, v in value.items():
write("<member>\n")
if not isinstance(k, str):
raise TypeError("dictionary key must be string")
write("<name>%s</name>\n" % escape(k))
dump(v, write)
write("</member>\n")
write("</struct></value>\n")
del self.memo[i]
dispatch[dict] = dump_struct
def dump_datetime(self, value, write):
write("<value><dateTime.iso8601>")
write(_strftime(value))
write("</dateTime.iso8601></value>\n")
dispatch[datetime] = dump_datetime
def dump_instance(self, value, write):
# check for special wrappers
if value.__class__ in WRAPPERS:
self.write = write
value.encode(self)
del self.write
else:
# store instance attributes as a struct (really?)
self.dump_struct(value.__dict__, write)
dispatch[DateTime] = dump_instance
dispatch[Binary] = dump_instance
# XXX(twouters): using "_arbitrary_instance" as key as a quick-fix
# for the p3yk merge, this should probably be fixed more neatly.
dispatch["_arbitrary_instance"] = dump_instance
##
# XML-RPC unmarshaller.
#
# @see loads
class Unmarshaller:
"""Unmarshal an XML-RPC response, based on incoming XML event
messages (start, data, end). Call close() to get the resulting
data structure.
Note that this reader is fairly tolerant, and gladly accepts bogus
XML-RPC data without complaining (but not bogus XML).
"""
# and again, if you don't understand what's going on in here,
# that's perfectly ok.
def __init__(self, use_datetime=False, use_builtin_types=False):
self._type = None
self._stack = []
self._marks = []
self._data = []
self._methodname = None
self._encoding = "utf-8"
self.append = self._stack.append
self._use_datetime = use_builtin_types or use_datetime
self._use_bytes = use_builtin_types
def close(self):
# return response tuple and target method
if self._type is None or self._marks:
raise ResponseError()
if self._type == "fault":
raise Fault(**self._stack[0])
return tuple(self._stack)
def getmethodname(self):
return self._methodname
#
# event handlers
def xml(self, encoding, standalone):
self._encoding = encoding
# FIXME: assert standalone == 1 ???
def start(self, tag, attrs):
# prepare to handle this element
if tag == "array" or tag == "struct":
self._marks.append(len(self._stack))
self._data = []
self._value = (tag == "value")
def data(self, text):
self._data.append(text)
def end(self, tag):
# call the appropriate end tag handler
try:
f = self.dispatch[tag]
except KeyError:
pass # unknown tag ?
else:
return f(self, "".join(self._data))
#
# accelerator support
def end_dispatch(self, tag, data):
# dispatch data
try:
f = self.dispatch[tag]
except KeyError:
pass # unknown tag ?
else:
return f(self, data)
#
# element decoders
dispatch = {}
def end_nil (self, data):
self.append(None)
self._value = 0
dispatch["nil"] = end_nil
def end_boolean(self, data):
if data == "0":
self.append(False)
elif data == "1":
self.append(True)
else:
raise TypeError("bad boolean value")
self._value = 0
dispatch["boolean"] = end_boolean
def end_int(self, data):
self.append(int(data))
self._value = 0
dispatch["i4"] = end_int
dispatch["i8"] = end_int
dispatch["int"] = end_int
def end_double(self, data):
self.append(float(data))
self._value = 0
dispatch["double"] = end_double
def end_string(self, data):
if self._encoding:
data = data.decode(self._encoding)
self.append(data)
self._value = 0
dispatch["string"] = end_string
dispatch["name"] = end_string # struct keys are always strings
def end_array(self, data):
mark = self._marks.pop()
# map arrays to Python lists
self._stack[mark:] = [self._stack[mark:]]
self._value = 0
dispatch["array"] = end_array
def end_struct(self, data):
mark = self._marks.pop()
# map structs to Python dictionaries
dict = {}
items = self._stack[mark:]
for i in range(0, len(items), 2):
dict[items[i]] = items[i+1]
self._stack[mark:] = [dict]
self._value = 0
dispatch["struct"] = end_struct
def end_base64(self, data):
value = Binary()
value.decode(data.encode("ascii"))
if self._use_bytes:
value = value.data
self.append(value)
self._value = 0
dispatch["base64"] = end_base64
def end_dateTime(self, data):
value = DateTime()
value.decode(data)
if self._use_datetime:
value = _datetime_type(data)
self.append(value)
dispatch["dateTime.iso8601"] = end_dateTime
def end_value(self, data):
# if we stumble upon a value element with no internal
# elements, treat it as a string element
if self._value:
self.end_string(data)
dispatch["value"] = end_value
def end_params(self, data):
self._type = "params"
dispatch["params"] = end_params
def end_fault(self, data):
self._type = "fault"
dispatch["fault"] = end_fault
def end_methodName(self, data):
if self._encoding:
data = data.decode(self._encoding)
self._methodname = data
self._type = "methodName" # no params
dispatch["methodName"] = end_methodName
## Multicall support
#
class _MultiCallMethod:
# some lesser magic to store calls made to a MultiCall object
# for batch execution
def __init__(self, call_list, name):
self.__call_list = call_list
self.__name = name
def __getattr__(self, name):
return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name))
def __call__(self, *args):
self.__call_list.append((self.__name, args))
class MultiCallIterator:
"""Iterates over the results of a multicall. Exceptions are
raised in response to xmlrpc faults."""
def __init__(self, results):
self.results = results
def __getitem__(self, i):
item = self.results[i]
if type(item) == type({}):
raise Fault(item['faultCode'], item['faultString'])
elif type(item) == type([]):
return item[0]
else:
raise ValueError("unexpected type in multicall result")
class MultiCall:
"""server -> a object used to boxcar method calls
server should be a ServerProxy object.
Methods can be added to the MultiCall using normal
method call syntax e.g.:
multicall = MultiCall(server_proxy)
multicall.add(2,3)
multicall.get_address("Guido")
To execute the multicall, call the MultiCall object e.g.:
add_result, address = multicall()
"""
def __init__(self, server):
self.__server = server
self.__call_list = []
def __repr__(self):
return "<MultiCall at %x>" % id(self)
__str__ = __repr__
def __getattr__(self, name):
return _MultiCallMethod(self.__call_list, name)
def __call__(self):
marshalled_list = []
for name, args in self.__call_list:
marshalled_list.append({'methodName' : name, 'params' : args})
return MultiCallIterator(self.__server.system.multicall(marshalled_list))
# --------------------------------------------------------------------
# convenience functions
FastMarshaller = FastParser = FastUnmarshaller = None
##
# Create a parser object, and connect it to an unmarshalling instance.
# This function picks the fastest available XML parser.
#
# return A (parser, unmarshaller) tuple.
def getparser(use_datetime=False, use_builtin_types=False):
"""getparser() -> parser, unmarshaller
Create an instance of the fastest available parser, and attach it
to an unmarshalling object. Return both objects.
"""
if FastParser and FastUnmarshaller:
if use_builtin_types:
mkdatetime = _datetime_type
mkbytes = base64.decodebytes
elif use_datetime:
mkdatetime = _datetime_type
mkbytes = _binary
else:
mkdatetime = _datetime
mkbytes = _binary
target = FastUnmarshaller(True, False, mkbytes, mkdatetime, Fault)
parser = FastParser(target)
else:
target = Unmarshaller(use_datetime=use_datetime, use_builtin_types=use_builtin_types)
if FastParser:
parser = FastParser(target)
else:
parser = ExpatParser(target)
return parser, target
##
# Convert a Python tuple or a Fault instance to an XML-RPC packet.
#
# @def dumps(params, **options)
# @param params A tuple or Fault instance.
# @keyparam methodname If given, create a methodCall request for
# this method name.
# @keyparam methodresponse If given, create a methodResponse packet.
# If used with a tuple, the tuple must be a singleton (that is,
# it must contain exactly one element).
# @keyparam encoding The packet encoding.
# @return A string containing marshalled data.
def dumps(params, methodname=None, methodresponse=None, encoding=None,
allow_none=False):
"""data [,options] -> marshalled data
Convert an argument tuple or a Fault instance to an XML-RPC
request (or response, if the methodresponse option is used).
In addition to the data object, the following options can be given
as keyword arguments:
methodname: the method name for a methodCall packet
methodresponse: true to create a methodResponse packet.
If this option is used with a tuple, the tuple must be
a singleton (i.e. it can contain only one element).
encoding: the packet encoding (default is UTF-8)
All byte strings in the data structure are assumed to use the
packet encoding. Unicode strings are automatically converted,
where necessary.
"""
assert isinstance(params, (tuple, Fault)), "argument must be tuple or Fault instance"
if isinstance(params, Fault):
methodresponse = 1
elif methodresponse and isinstance(params, tuple):
assert len(params) == 1, "response tuple must be a singleton"
if not encoding:
encoding = "utf-8"
if FastMarshaller:
m = FastMarshaller(encoding)
else:
m = Marshaller(encoding, allow_none)
data = m.dumps(params)
if encoding != "utf-8":
xmlheader = "<?xml version='1.0' encoding='%s'?>\n" % str(encoding)
else:
xmlheader = "<?xml version='1.0'?>\n" # utf-8 is default
# standard XML-RPC wrappings
if methodname:
# a method call
if not isinstance(methodname, str):
methodname = methodname.encode(encoding)
data = (
xmlheader,
"<methodCall>\n"
"<methodName>", methodname, "</methodName>\n",
data,
"</methodCall>\n"
)
elif methodresponse:
# a method response, or a fault structure
data = (
xmlheader,
"<methodResponse>\n",
data,
"</methodResponse>\n"
)
else:
return data # return as is
return "".join(data)
##
# Convert an XML-RPC packet to a Python object. If the XML-RPC packet
# represents a fault condition, this function raises a Fault exception.
#
# @param data An XML-RPC packet, given as an 8-bit string.
# @return A tuple containing the unpacked data, and the method name
# (None if not present).
# @see Fault
def loads(data, use_datetime=False, use_builtin_types=False):
"""data -> unmarshalled data, method name
Convert an XML-RPC packet to unmarshalled data plus a method
name (None if not present).
If the XML-RPC packet represents a fault condition, this function
raises a Fault exception.
"""
p, u = getparser(use_datetime=use_datetime, use_builtin_types=use_builtin_types)
p.feed(data)
p.close()
return u.close(), u.getmethodname()
##
# Encode a string using the gzip content encoding such as specified by the
# Content-Encoding: gzip
# in the HTTP header, as described in RFC 1952
#
# @param data the unencoded data
# @return the encoded data
def gzip_encode(data):
"""data -> gzip encoded data
Encode data using the gzip content encoding as described in RFC 1952
"""
if not gzip:
raise NotImplementedError
f = BytesIO()
gzf = gzip.GzipFile(mode="wb", fileobj=f, compresslevel=1)
gzf.write(data)
gzf.close()
encoded = f.getvalue()
f.close()
return encoded
##
# Decode a string using the gzip content encoding such as specified by the
# Content-Encoding: gzip
# in the HTTP header, as described in RFC 1952
#
# @param data The encoded data
# @return the unencoded data
# @raises ValueError if data is not correctly coded.
def gzip_decode(data):
"""gzip encoded data -> unencoded data
Decode data using the gzip content encoding as described in RFC 1952
"""
if not gzip:
raise NotImplementedError
f = BytesIO(data)
gzf = gzip.GzipFile(mode="rb", fileobj=f)
try:
decoded = gzf.read()
except OSError:
raise ValueError("invalid data")
f.close()
gzf.close()
return decoded
##
# Return a decoded file-like object for the gzip encoding
# as described in RFC 1952.
#
# @param response A stream supporting a read() method
# @return a file-like object that the decoded data can be read() from
class GzipDecodedResponse(gzip.GzipFile if gzip else object):
"""a file-like object to decode a response encoded with the gzip
method, as described in RFC 1952.
"""
def __init__(self, response):
#response doesn't support tell() and read(), required by
#GzipFile
if not gzip:
raise NotImplementedError
self.io = BytesIO(response.read())
gzip.GzipFile.__init__(self, mode="rb", fileobj=self.io)
def close(self):
gzip.GzipFile.close(self)
self.io.close()
# --------------------------------------------------------------------
# request dispatcher
class _Method:
# some magic to bind an XML-RPC method to an RPC server.
# supports "nested" methods (e.g. examples.getStateName)
def __init__(self, send, name):
self.__send = send
self.__name = name
def __getattr__(self, name):
return _Method(self.__send, "%s.%s" % (self.__name, name))
def __call__(self, *args):
return self.__send(self.__name, args)
##
# Standard transport class for XML-RPC over HTTP.
# <p>
# You can create custom transports by subclassing this method, and
# overriding selected methods.
class Transport:
"""Handles an HTTP transaction to an XML-RPC server."""
# client identifier (may be overridden)
user_agent = "Python-xmlrpc/%s" % __version__
#if true, we'll request gzip encoding
accept_gzip_encoding = True
# if positive, encode request using gzip if it exceeds this threshold
# note that many server will get confused, so only use it if you know
# that they can decode such a request
encode_threshold = None #None = don't encode
def __init__(self, use_datetime=False, use_builtin_types=False):
self._use_datetime = use_datetime
self._use_builtin_types = use_builtin_types
self._connection = (None, None)
self._extra_headers = []
##
# Send a complete request, and parse the response.
# Retry request if a cached connection has disconnected.
#
# @param host Target host.
# @param handler Target PRC handler.
# @param request_body XML-RPC request body.
# @param verbose Debugging flag.
# @return Parsed response.
def request(self, host, handler, request_body, verbose=False):
#retry request once if cached connection has gone cold
for i in (0, 1):
try:
return self.single_request(host, handler, request_body, verbose)
except OSError as e:
if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED,
errno.EPIPE):
raise
except http.client.BadStatusLine: #close after we sent request
if i:
raise
def single_request(self, host, handler, request_body, verbose=False):
# issue XML-RPC request
try:
http_conn = self.send_request(host, handler, request_body, verbose)
resp = http_conn.getresponse()
if resp.status == 200:
self.verbose = verbose
return self.parse_response(resp)
except Fault:
raise
except Exception:
#All unexpected errors leave connection in
# a strange state, so we clear it.
self.close()
raise
#We got an error response.
#Discard any response data and raise exception
if resp.getheader("content-length", ""):
resp.read()
raise ProtocolError(
host + handler,
resp.status, resp.reason,
dict(resp.getheaders())
)
##
# Create parser.
#
# @return A 2-tuple containing a parser and a unmarshaller.
def getparser(self):
# get parser and unmarshaller
return getparser(use_datetime=self._use_datetime,
use_builtin_types=self._use_builtin_types)
##
# Get authorization info from host parameter
# Host may be a string, or a (host, x509-dict) tuple; if a string,
# it is checked for a "user:pw@host" format, and a "Basic
# Authentication" header is added if appropriate.
#
# @param host Host descriptor (URL or (URL, x509 info) tuple).
# @return A 3-tuple containing (actual host, extra headers,
# x509 info). The header and x509 fields may be None.
def get_host_info(self, host):
x509 = {}
if isinstance(host, tuple):
host, x509 = host
auth, host = urllib.parse.splituser(host)
if auth:
auth = urllib.parse.unquote_to_bytes(auth)
auth = base64.encodebytes(auth).decode("utf-8")
auth = "".join(auth.split()) # get rid of whitespace
extra_headers = [
("Authorization", "Basic " + auth)
]
else:
extra_headers = []
return host, extra_headers, x509
##
# Connect to server.
#
# @param host Target host.
# @return An HTTPConnection object
def make_connection(self, host):
#return an existing connection if possible. This allows
#HTTP/1.1 keep-alive.
if self._connection and host == self._connection[0]:
return self._connection[1]
# create a HTTP connection object from a host descriptor
chost, self._extra_headers, x509 = self.get_host_info(host)
self._connection = host, http.client.HTTPConnection(chost)
return self._connection[1]
##
# Clear any cached connection object.
# Used in the event of socket errors.
#
def close(self):
if self._connection[1]:
self._connection[1].close()
self._connection = (None, None)
##
# Send HTTP request.
#
# @param host Host descriptor (URL or (URL, x509 info) tuple).
# @param handler Targer RPC handler (a path relative to host)
# @param request_body The XML-RPC request body
# @param debug Enable debugging if debug is true.
# @return An HTTPConnection.
def send_request(self, host, handler, request_body, debug):
connection = self.make_connection(host)
headers = self._extra_headers[:]
if debug:
connection.set_debuglevel(1)
if self.accept_gzip_encoding and gzip:
connection.putrequest("POST", handler, skip_accept_encoding=True)
headers.append(("Accept-Encoding", "gzip"))
else:
connection.putrequest("POST", handler)
headers.append(("Content-Type", "text/xml"))
headers.append(("User-Agent", self.user_agent))
self.send_headers(connection, headers)
self.send_content(connection, request_body)
return connection
##
# Send request headers.
# This function provides a useful hook for subclassing
#
# @param connection httpConnection.
# @param headers list of key,value pairs for HTTP headers
def send_headers(self, connection, headers):
for key, val in headers:
connection.putheader(key, val)
##
# Send request body.
# This function provides a useful hook for subclassing
#
# @param connection httpConnection.
# @param request_body XML-RPC request body.
def send_content(self, connection, request_body):
#optionally encode the request
if (self.encode_threshold is not None and
self.encode_threshold < len(request_body) and
gzip):
connection.putheader("Content-Encoding", "gzip")
request_body = gzip_encode(request_body)
connection.putheader("Content-Length", str(len(request_body)))
connection.endheaders(request_body)
##
# Parse response.
#
# @param file Stream.
# @return Response tuple and target method.
def parse_response(self, response):
# read response data from httpresponse, and parse it
# Check for new http response object, otherwise it is a file object.
if hasattr(response, 'getheader'):
if response.getheader("Content-Encoding", "") == "gzip":
stream = GzipDecodedResponse(response)
else:
stream = response
else:
stream = response
p, u = self.getparser()
while 1:
data = stream.read(1024)
if not data:
break
if self.verbose:
print("body:", repr(data))
p.feed(data)
if stream is not response:
stream.close()
p.close()
return u.close()
##
# Standard transport class for XML-RPC over HTTPS.
class SafeTransport(Transport):
"""Handles an HTTPS transaction to an XML-RPC server."""
# FIXME: mostly untested
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
if not hasattr(http.client, "HTTPSConnection"):
raise NotImplementedError(
"your version of http.client doesn't support HTTPS")
# create a HTTPS connection object from a host descriptor
# host may be a string, or a (host, x509-dict) tuple
chost, self._extra_headers, x509 = self.get_host_info(host)
self._connection = host, http.client.HTTPSConnection(chost,
None, **(x509 or {}))
return self._connection[1]
##
# Standard server proxy. This class establishes a virtual connection
# to an XML-RPC server.
# <p>
# This class is available as ServerProxy and Server. New code should
# use ServerProxy, to avoid confusion.
#
# @def ServerProxy(uri, **options)
# @param uri The connection point on the server.
# @keyparam transport A transport factory, compatible with the
# standard transport class.
# @keyparam encoding The default encoding used for 8-bit strings
# (default is UTF-8).
# @keyparam verbose Use a true value to enable debugging output.
# (printed to standard output).
# @see Transport
class ServerProxy:
"""uri [,options] -> a logical connection to an XML-RPC server
uri is the connection point on the server, given as
scheme://host/target.
The standard implementation always supports the "http" scheme. If
SSL socket support is available (Python 2.0), it also supports
"https".
If the target part and the slash preceding it are both omitted,
"/RPC2" is assumed.
The following options can be given as keyword arguments:
transport: a transport factory
encoding: the request encoding (default is UTF-8)
All 8-bit strings passed to the server proxy are assumed to use
the given encoding.
"""
def __init__(self, uri, transport=None, encoding=None, verbose=False,
allow_none=False, use_datetime=False, use_builtin_types=False):
# establish a "logical" server connection
# get the url
type, uri = urllib.parse.splittype(uri)
if type not in ("http", "https"):
raise OSError("unsupported XML-RPC protocol")
self.__host, self.__handler = urllib.parse.splithost(uri)
if not self.__handler:
self.__handler = "/RPC2"
if transport is None:
if type == "https":
handler = SafeTransport
else:
handler = Transport
transport = handler(use_datetime=use_datetime,
use_builtin_types=use_builtin_types)
self.__transport = transport
self.__encoding = encoding or 'utf-8'
self.__verbose = verbose
self.__allow_none = allow_none
def __close(self):
self.__transport.close()
def __request(self, methodname, params):
# call a method on the remote server
request = dumps(params, methodname, encoding=self.__encoding,
allow_none=self.__allow_none).encode(self.__encoding)
response = self.__transport.request(
self.__host,
self.__handler,
request,
verbose=self.__verbose
)
if len(response) == 1:
response = response[0]
return response
def __repr__(self):
return (
"<ServerProxy for %s%s>" %
(self.__host, self.__handler)
)
__str__ = __repr__
def __getattr__(self, name):
# magic method dispatcher
return _Method(self.__request, name)
# note: to call a remote object with an non-standard name, use
# result getattr(server, "strange-python-name")(args)
def __call__(self, attr):
"""A workaround to get special attributes on the ServerProxy
without interfering with the magic __getattr__
"""
if attr == "close":
return self.__close
elif attr == "transport":
return self.__transport
raise AttributeError("Attribute %r not found" % (attr,))
# compatibility
Server = ServerProxy
# --------------------------------------------------------------------
# test code
if __name__ == "__main__":
# simple test program (from the XML-RPC specification)
# local server, available from Lib/xmlrpc/server.py
server = ServerProxy("http://localhost:8000")
try:
print(server.currentTime.getCurrentTime())
except Error as v:
print("ERROR", v)
multi = MultiCall(server)
multi.getData()
multi.pow(2,9)
multi.add(1,2)
try:
for response in multi():
print(response)
except Error as v:
print("ERROR", v)
|
kkdd/arangodb
|
refs/heads/devel
|
3rdParty/V8-4.3.61/build/gyp/test/mac/gyptest-objc-gc.py
|
90
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that GC objc settings are handled correctly.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
# set |match| to ignore build stderr output.
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
match = lambda a, b: True)
CHDIR = 'objc-gc'
test.run_gyp('test.gyp', chdir=CHDIR)
build_error_code = {
'xcode': [1, 65], # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
'make': 2,
'ninja': 1,
}[test.format]
test.build('test.gyp', 'gc_exe_fails', chdir=CHDIR, status=build_error_code)
test.build(
'test.gyp', 'gc_off_exe_req_lib', chdir=CHDIR, status=build_error_code)
test.build('test.gyp', 'gc_req_exe', chdir=CHDIR)
test.run_built_executable('gc_req_exe', chdir=CHDIR, stdout="gc on: 1\n")
test.build('test.gyp', 'gc_exe_req_lib', chdir=CHDIR)
test.run_built_executable('gc_exe_req_lib', chdir=CHDIR, stdout="gc on: 1\n")
test.build('test.gyp', 'gc_exe', chdir=CHDIR)
test.run_built_executable('gc_exe', chdir=CHDIR, stdout="gc on: 1\n")
test.build('test.gyp', 'gc_off_exe', chdir=CHDIR)
test.run_built_executable('gc_off_exe', chdir=CHDIR, stdout="gc on: 0\n")
test.pass_test()
|
danicampora/micropython
|
refs/heads/master
|
tests/basics/dict_clear.py
|
118
|
d = {1: 2, 3: 4}
print(len(d))
d.clear()
print(d)
d[2] = 42
print(d)
|
nozuono/calibre-webserver
|
refs/heads/master
|
src/calibre/gui2/metadata/diff.py
|
4
|
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import os
from collections import OrderedDict, namedtuple
from functools import partial
from future_builtins import zip
from PyQt4.Qt import (
QDialog, QWidget, QGridLayout, QLineEdit, QLabel, QToolButton, QIcon,
QVBoxLayout, QDialogButtonBox, QApplication, pyqtSignal, QFont, QPixmap,
QSize, QPainter, Qt, QColor, QPen, QSizePolicy, QScrollArea, QFrame)
from calibre import fit_image
from calibre.ebooks.metadata import title_sort, authors_to_sort_string
from calibre.gui2 import pixmap_to_data, gprefs
from calibre.gui2.comments_editor import Editor
from calibre.gui2.languages import LanguagesEdit as LE
from calibre.gui2.metadata.basic_widgets import PubdateEdit, RatingEdit
from calibre.ptempfile import PersistentTemporaryFile
from calibre.utils.date import UNDEFINED_DATE
Widgets = namedtuple('Widgets', 'new old label button')
# Widgets {{{
class LineEdit(QLineEdit):
changed = pyqtSignal()
def __init__(self, field, is_new, parent, metadata, extra):
QLineEdit.__init__(self, parent)
self.is_new = is_new
self.field = field
self.metadata = metadata
if not is_new:
self.setReadOnly(True)
self.textChanged.connect(self.changed)
def from_mi(self, mi):
val = mi.get(self.field, default='') or ''
ism = self.metadata['is_multiple']
if ism:
if not val:
val = ''
else:
val = ism['list_to_ui'].join(val)
self.setText(val)
self.setCursorPosition(0)
def to_mi(self, mi):
val = unicode(self.text()).strip()
ism = self.metadata['is_multiple']
if ism:
if not val:
val = []
else:
val = [x.strip() for x in val.split(ism['list_to_ui']) if x.strip()]
mi.set(self.field, val)
if self.field == 'title':
mi.set('title_sort', title_sort(val, lang=mi.language))
elif self.field == 'authors':
mi.set('author_sort', authors_to_sort_string(val))
@dynamic_property
def current_val(self):
def fget(self):
return unicode(self.text())
def fset(self, val):
self.setText(val)
self.setCursorPosition(0)
return property(fget=fget, fset=fset)
@property
def is_blank(self):
val = self.current_val.strip()
if self.field in {'title', 'authors'}:
return val in {'', _('Unknown')}
return not val
def same_as(self, other):
return self.current_val == other.current_val
class LanguagesEdit(LE):
changed = pyqtSignal()
def __init__(self, field, is_new, parent, metadata, extra):
LE.__init__(self, parent=parent)
self.is_new = is_new
self.field = field
self.metadata = metadata
self.textChanged.connect(self.changed)
if not is_new:
self.lineEdit().setReadOnly(True)
@dynamic_property
def current_val(self):
def fget(self):
return self.lang_codes
def fset(self, val):
self.lang_codes = val
return property(fget=fget, fset=fset)
def from_mi(self, mi):
self.lang_codes = mi.languages
def to_mi(self, mi):
mi.languages = self.lang_codes
@property
def is_blank(self):
return not self.current_val
def same_as(self, other):
return self.current_val == other.current_val
class RatingsEdit(RatingEdit):
changed = pyqtSignal()
def __init__(self, field, is_new, parent, metadata, extra):
RatingEdit.__init__(self, parent)
self.is_new = is_new
self.field = field
self.metadata = metadata
self.valueChanged.connect(self.changed)
if not is_new:
self.setReadOnly(True)
def from_mi(self, mi):
val = (mi.get(self.field, default=0) or 0)/2
self.setValue(val)
def to_mi(self, mi):
mi.set(self.field, self.value() * 2)
@property
def is_blank(self):
return self.value() == 0
def same_as(self, other):
return self.current_val == other.current_val
class DateEdit(PubdateEdit):
changed = pyqtSignal()
def __init__(self, field, is_new, parent, metadata, extra):
PubdateEdit.__init__(self, parent, create_clear_button=False)
self.is_new = is_new
self.field = field
self.metadata = metadata
self.setDisplayFormat(extra)
self.dateTimeChanged.connect(self.changed)
if not is_new:
self.setReadOnly(True)
def from_mi(self, mi):
self.current_val = mi.get(self.field, default=None)
def to_mi(self, mi):
mi.set(self.field, self.current_val)
@property
def is_blank(self):
return self.current_val.year <= UNDEFINED_DATE.year
def same_as(self, other):
return self.text() == other.text()
class SeriesEdit(LineEdit):
def from_mi(self, mi):
series = mi.get(self.field, default='')
series_index = mi.get(self.field + '_index', default=1.0)
val = ''
if series:
val = '%s [%s]' % (series, mi.format_series_index(series_index))
self.setText(val)
self.setCursorPosition(0)
def to_mi(self, mi):
val = unicode(self.text()).strip()
try:
series_index = float(val.rpartition('[')[-1].rstrip(']').strip())
except:
series_index = 1.0
series = val.rpartition('[')[0].strip() or None
mi.set(self.field, series)
mi.set(self.field + '_index', series_index)
class IdentifiersEdit(LineEdit):
def from_mi(self, mi):
val = ('%s:%s' % (k, v) for k, v in mi.identifiers.iteritems())
self.setText(', '.join(val))
self.setCursorPosition(0)
def to_mi(self, mi):
parts = (x.strip() for x in self.current_val.split(',') if x.strip())
val = {x.partition(':')[0].strip():x.partition(':')[-1].strip() for x in parts}
mi.set_identifiers({k:v for k, v in val.iteritems() if k and v})
class CommentsEdit(Editor):
changed = pyqtSignal()
def __init__(self, field, is_new, parent, metadata, extra):
Editor.__init__(self, parent, one_line_toolbar=False)
self.set_minimum_height_for_editor(150)
self.is_new = is_new
self.field = field
self.metadata = metadata
self.hide_tabs()
if not is_new:
self.hide_toolbars()
self.set_readonly(True)
@dynamic_property
def current_val(self):
def fget(self):
return self.html
def fset(self, val):
self.html = val or ''
self.changed.emit()
return property(fget=fget, fset=fset)
def from_mi(self, mi):
val = mi.get(self.field, default='')
self.current_val = val
def to_mi(self, mi):
mi.set(self.field, self.current_val)
def sizeHint(self):
return QSize(450, 200)
@property
def is_blank(self):
return not self.current_val.strip()
def same_as(self, other):
return self.current_val == other.current_val
class CoverView(QWidget):
changed = pyqtSignal()
def __init__(self, field, is_new, parent, metadata, extra):
QWidget.__init__(self, parent)
self.is_new = is_new
self.field = field
self.metadata = metadata
self.pixmap = None
self.blank = QPixmap(I('blank.png'))
self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.GrowFlag|QSizePolicy.ExpandFlag)
self.sizePolicy().setHeightForWidth(True)
@property
def is_blank(self):
return self.pixmap is None
@dynamic_property
def current_val(self):
def fget(self):
return self.pixmap
def fset(self, val):
self.pixmap = val
self.changed.emit()
self.update()
return property(fget=fget, fset=fset)
def from_mi(self, mi):
p = getattr(mi, 'cover', None)
if p and os.path.exists(p):
pmap = QPixmap()
with open(p, 'rb') as f:
pmap.loadFromData(f.read())
if not pmap.isNull():
self.pixmap = pmap
self.update()
self.changed.emit()
return
cd = getattr(mi, 'cover_data', (None, None))
if cd and cd[1]:
pmap = QPixmap()
pmap.loadFromData(cd[1])
if not pmap.isNull():
self.pixmap = pmap
self.update()
self.changed.emit()
return
self.pixmap = None
self.update()
self.changed.emit()
def to_mi(self, mi):
mi.cover, mi.cover_data = None, (None, None)
if self.pixmap is not None and not self.pixmap.isNull():
with PersistentTemporaryFile('.jpg') as pt:
pt.write(pixmap_to_data(self.pixmap))
mi.cover = pt.name
def same_as(self, other):
return self.current_val == other.current_val
def sizeHint(self):
return QSize(225, 300)
def paintEvent(self, event):
pmap = self.blank if self.pixmap is None or self.pixmap.isNull() else self.pixmap
target = self.rect()
scaled, width, height = fit_image(pmap.width(), pmap.height(), target.width(), target.height())
target.setRect(target.x(), target.y(), width, height)
p = QPainter(self)
p.setRenderHints(QPainter.Antialiasing | QPainter.SmoothPixmapTransform)
p.drawPixmap(target, pmap)
if self.pixmap is not None and not self.pixmap.isNull():
sztgt = target.adjusted(0, 0, 0, -4)
f = p.font()
f.setBold(True)
p.setFont(f)
sz = u'\u00a0%d x %d\u00a0'%(self.pixmap.width(), self.pixmap.height())
flags = Qt.AlignBottom|Qt.AlignRight|Qt.TextSingleLine
szrect = p.boundingRect(sztgt, flags, sz)
p.fillRect(szrect.adjusted(0, 0, 0, 4), QColor(0, 0, 0, 200))
p.setPen(QPen(QColor(255,255,255)))
p.drawText(sztgt, flags, sz)
p.end()
# }}}
class CompareSingle(QWidget):
def __init__(
self, field_metadata, parent=None, revert_tooltip=None,
datetime_fmt='MMMM yyyy', blank_as_equal=True,
fields=('title', 'authors', 'series', 'tags', 'rating', 'publisher', 'pubdate', 'identifiers', 'languages', 'comments', 'cover')):
QWidget.__init__(self, parent)
self.l = l = QGridLayout()
l.setContentsMargins(0, 0, 0, 0)
self.setLayout(l)
revert_tooltip = revert_tooltip or _('Revert %s')
self.current_mi = None
self.changed_font = QFont(QApplication.font())
self.changed_font.setBold(True)
self.changed_font.setItalic(True)
self.blank_as_equal = blank_as_equal
self.widgets = OrderedDict()
row = 0
for field in fields:
m = field_metadata[field]
dt = m['datatype']
extra = None
if 'series' in {field, dt}:
cls = SeriesEdit
elif field == 'identifiers':
cls = IdentifiersEdit
elif field == 'languages':
cls = LanguagesEdit
elif 'comments' in {field, dt}:
cls = CommentsEdit
elif 'rating' in {field, dt}:
cls = RatingsEdit
elif dt == 'datetime':
extra = datetime_fmt
cls = DateEdit
elif field == 'cover':
cls = CoverView
elif dt in {'text', 'enum'}:
cls = LineEdit
else:
continue
neww = cls(field, True, self, m, extra)
neww.changed.connect(partial(self.changed, field))
oldw = cls(field, False, self, m, extra)
newl = QLabel('&%s:' % m['name'])
newl.setBuddy(neww)
button = QToolButton(self)
button.setIcon(QIcon(I('back.png')))
button.clicked.connect(partial(self.revert, field))
button.setToolTip(revert_tooltip % m['name'])
self.widgets[field] = Widgets(neww, oldw, newl, button)
for i, w in enumerate((newl, neww, button, oldw)):
c = i if i < 2 else i + 1
if w is oldw:
c += 1
l.addWidget(w, row, c)
row += 1
self.sep = f = QFrame(self)
f.setFrameShape(f.VLine)
l.addWidget(f, 0, 2, row, 1)
self.sep2 = f = QFrame(self)
f.setFrameShape(f.VLine)
l.addWidget(f, 0, 4, row, 1)
if 'comments' in self.widgets and not gprefs.get('diff_widget_show_comments_controls', True):
self.widgets['comments'].new.hide_toolbars()
def save_comments_controls_state(self):
if 'comments' in self.widgets:
vis = self.widgets['comments'].new.toolbars_visible
if vis != gprefs.get('diff_widget_show_comments_controls', True):
gprefs.set('diff_widget_show_comments_controls', vis)
def changed(self, field):
w = self.widgets[field]
if not w.new.same_as(w.old) and (not self.blank_as_equal or not w.new.is_blank):
w.label.setFont(self.changed_font)
else:
w.label.setFont(QApplication.font())
def revert(self, field):
widgets = self.widgets[field]
neww, oldw = widgets[:2]
neww.current_val = oldw.current_val
def __call__(self, oldmi, newmi):
self.current_mi = newmi
self.initial_vals = {}
for field, widgets in self.widgets.iteritems():
widgets.old.from_mi(oldmi)
widgets.new.from_mi(newmi)
self.initial_vals[field] = widgets.new.current_val
def apply_changes(self):
changed = False
for field, widgets in self.widgets.iteritems():
val = widgets.new.current_val
if val != self.initial_vals[field]:
widgets.new.to_mi(self.current_mi)
changed = True
return changed
class CompareMany(QDialog):
def __init__(self, ids, get_metadata, field_metadata, parent=None,
window_title=None,
reject_button_tooltip=None,
accept_all_tooltip=None,
reject_all_tooltip=None,
revert_tooltip=None,
intro_msg=None,
action_button=None,
**kwargs):
QDialog.__init__(self, parent)
self.l = l = QVBoxLayout()
self.setLayout(l)
self.setWindowIcon(QIcon(I('auto_author_sort.png')))
self.get_metadata = get_metadata
self.ids = list(ids)
self.total = len(self.ids)
self.accepted = OrderedDict()
self.window_title = window_title or _('Compare metadata')
if intro_msg:
self.la = la = QLabel(intro_msg)
la.setWordWrap(True)
l.addWidget(la)
self.compare_widget = CompareSingle(field_metadata, parent=parent, revert_tooltip=revert_tooltip, **kwargs)
self.sa = sa = QScrollArea()
l.addWidget(sa)
sa.setWidget(self.compare_widget)
sa.setWidgetResizable(True)
self.bb = bb = QDialogButtonBox(QDialogButtonBox.Cancel)
bb.rejected.connect(self.reject)
if self.total > 1:
self.aarb = b = bb.addButton(_('&Accept all remaining'), bb.YesRole)
b.setIcon(QIcon(I('ok.png')))
if accept_all_tooltip:
b.setToolTip(accept_all_tooltip)
b.clicked.connect(self.accept_all_remaining)
self.rarb = b = bb.addButton(_('Re&ject all remaining'), bb.NoRole)
b.setIcon(QIcon(I('minus.png')))
if reject_all_tooltip:
b.setToolTip(reject_all_tooltip)
b.clicked.connect(self.reject_all_remaining)
self.sb = b = bb.addButton(_('&Reject'), bb.ActionRole)
b.clicked.connect(partial(self.next_item, False))
b.setIcon(QIcon(I('minus.png')))
if reject_button_tooltip:
b.setToolTip(reject_button_tooltip)
if action_button is not None:
self.acb = b = bb.addButton(action_button[0], bb.ActionRole)
b.setIcon(QIcon(action_button[1]))
self.action_button_action = action_button[2]
b.clicked.connect(self.action_button_clicked)
self.nb = b = bb.addButton(_('&Next') if self.total > 1 else _('&OK'), bb.ActionRole)
b.setIcon(QIcon(I('forward.png' if self.total > 1 else 'ok.png')))
b.clicked.connect(partial(self.next_item, True))
b.setDefault(True)
l.addWidget(bb)
self.next_item(True)
desktop = QApplication.instance().desktop()
geom = desktop.availableGeometry(parent or self)
width = max(700, min(950, geom.width()-50))
height = max(650, min(1000, geom.height()-100))
self.resize(QSize(width, height))
geom = gprefs.get('diff_dialog_geom', None)
if geom is not None:
self.restoreGeometry(geom)
b.setFocus(Qt.OtherFocusReason)
def action_button_clicked(self):
self.action_button_action(self.ids[0])
def accept(self):
gprefs.set('diff_dialog_geom', bytearray(self.saveGeometry()))
self.compare_widget.save_comments_controls_state()
super(CompareMany, self).accept()
def reject(self):
gprefs.set('diff_dialog_geom', bytearray(self.saveGeometry()))
self.compare_widget.save_comments_controls_state()
super(CompareMany, self).reject()
@property
def current_mi(self):
return self.compare_widget.current_mi
def next_item(self, accept):
if not self.ids:
return self.accept()
if self.current_mi is not None:
changed = self.compare_widget.apply_changes()
if self.current_mi is not None:
old_id = self.ids.pop(0)
self.accepted[old_id] = (changed, self.current_mi) if accept else (False, None)
if not self.ids:
return self.accept()
self.setWindowTitle(self.window_title + _(' [%(num)d of %(tot)d]') % dict(
num=(self.total - len(self.ids) + 1), tot=self.total))
oldmi, newmi = self.get_metadata(self.ids[0])
self.compare_widget(oldmi, newmi)
def accept_all_remaining(self):
self.next_item(True)
for id_ in self.ids:
oldmi, newmi = self.get_metadata(id_)
self.accepted[id_] = (False, newmi)
self.ids = []
self.accept()
def reject_all_remaining(self):
self.next_item(False)
for id_ in self.ids:
oldmi, newmi = self.get_metadata(id_)
self.accepted[id_] = (False, None)
self.ids = []
self.accept()
if __name__ == '__main__':
app = QApplication([])
from calibre.library import db
db = db()
ids = sorted(db.all_ids(), reverse=True)
ids = tuple(zip(ids[0::2], ids[1::2]))
gm = partial(db.get_metadata, index_is_id=True, get_cover=True, cover_as_data=True)
get_metadata = lambda x:map(gm, ids[x])
d = CompareMany(list(xrange(len(ids))), get_metadata, db.field_metadata)
if d.exec_() == d.Accepted:
for changed, mi in d.accepted.itervalues():
if changed and mi is not None:
print (mi)
|
windedge/odoomrp-wip
|
refs/heads/8.0
|
mrp_bom_catch_product_code/models/mrp_bom.py
|
27
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, api
class MrpBom(models.Model):
_inherit = 'mrp.bom'
@api.multi
def onchange_product_tmpl_id(self, product_tmpl_id, product_qty=0):
res = super(MrpBom, self).onchange_product_tmpl_id(
product_tmpl_id, product_qty=product_qty)
if product_tmpl_id:
product_tmpl = self.env['product.template'].browse(product_tmpl_id)
res['value'].update({'code': product_tmpl.default_code})
return res
@api.one
@api.onchange('product_id')
def onchange_product_id(self):
if self.product_id:
self.code = self.product_id.default_code
@api.model
def create(self, values):
if values.get('product_id'):
product = self.env['product.product'].browse(
values.get('product_id'))
values['code'] = ('%s%s') % (values.get('code', ''),
product.default_code or '')
elif values.get('product_tmpl_id'):
product = self.env['product.template'].browse(
values.get('product_tmpl_id'))
values['code'] = ('%s%s') % (values.get('code', ''),
product.default_code or '')
return super(MrpBom, self).create(values)
@api.one
def write(self, values):
product_obj = self.env['product.product']
template_obj = self.env['product.template']
if 'code' in values and not values.get('code'):
product = (product_obj.browse(values.get('product_id')) or
self.product_id)
if not product:
product = (
template_obj.browse(values.get('product_tmpl_id')) or
self.product_tmpl_id)
values['code'] = product.default_code or ''
return super(MrpBom, self).write(values)
|
conan-io/conan
|
refs/heads/develop
|
conans/test/functional/workspace/workspace_test.py
|
1
|
import os
import platform
import unittest
from textwrap import dedent
import pytest
import six
import time
from parameterized.parameterized import parameterized
from conans.client import tools
from conans.errors import ConanException
from conans.model.workspace import Workspace
from conans.test.utils.test_files import temp_folder
from conans.test.utils.tools import TestClient, GenConanfile
from conans.util.files import load, save
conanfile_build = """from conans import ConanFile, CMake
class Pkg(ConanFile):
settings = "os", "compiler", "arch", "build_type"
requires = {deps}
generators = "cmake", "cmake_multi"
exports_sources = "src/*"
def build(self):
cmake = CMake(self)
cmake.configure(source_folder="src")
cmake.build()
def package(self):
self.copy("*.h", src="src", dst="include")
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["hello{name}"]
"""
hello_cpp = """#include <iostream>
#include "hello{name}.h"
{includes}
void hello{name}(){{
{calls}
#ifdef NDEBUG
std::cout << "Hello World {name} Release!" <<std::endl;
#else
std::cout << "Hello World {name} Debug!" <<std::endl;
#endif
}}
"""
main_cpp = """#include "helloA.h"
int main(){
helloA();
}
"""
hello_h = """#pragma once
#ifdef WIN32
#define HELLO_EXPORT __declspec(dllexport)
#else
#define HELLO_EXPORT
#endif
HELLO_EXPORT void hello{name}();
"""
cmake = """set(CMAKE_CXX_COMPILER_WORKS 1)
project(Hello CXX)
cmake_minimum_required(VERSION 2.8.12)
include(${{CMAKE_CURRENT_BINARY_DIR}}/conanbuildinfo.cmake)
conan_basic_setup()
add_library(hello{name} hello.cpp)
target_link_libraries(hello{name} ${{CONAN_LIBS}})
"""
cmake_multi = """set(CMAKE_CXX_COMPILER_WORKS 1)
project(Hello CXX)
cmake_minimum_required(VERSION 2.8.12)
include(${{CMAKE_CURRENT_BINARY_DIR}}/conanbuildinfo_multi.cmake)
conan_basic_setup()
add_library(hello{name} hello.cpp)
conan_target_link_libraries(hello{name})
"""
cmake_targets = """set(CMAKE_CXX_COMPILER_WORKS 1)
project(Hello CXX)
cmake_minimum_required(VERSION 2.8.12)
include(${{CMAKE_CURRENT_BINARY_DIR}}/conanbuildinfo.cmake)
conan_basic_setup(TARGETS)
add_library(hello{name} hello.cpp)
target_link_libraries(hello{name} {dep})
"""
class WorkspaceTest(unittest.TestCase):
def test_parse(self):
folder = temp_folder()
path = os.path.join(folder, "conanws.yml")
project = "root: Hellob/0.1@lasote/stable"
save(path, project)
with six.assertRaisesRegex(self, ConanException,
"Root Hellob/0.1@lasote/stable is not defined as editable"):
Workspace(path, None)
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
path: B
random: something
root: HelloB/0.1@lasote/stable
""")
save(path, project)
with six.assertRaisesRegex(self, ConanException,
"Workspace unrecognized fields: {'random': 'something'}"):
Workspace(path, None)
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
path: B
root: HelloB/0.1@lasote/stable
random: something
""")
save(path, project)
with six.assertRaisesRegex(self, ConanException,
"Workspace unrecognized fields: {'random': 'something'}"):
Workspace(path, None)
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
root: HelloB/0.1@lasote/stable
""")
save(path, project)
with six.assertRaisesRegex(self, ConanException,
"Workspace editable HelloB/0.1@lasote/stable "
"does not define path"):
Workspace(path, None)
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
layout: layout
root: HelloB/0.1@lasote/stable
""")
save(path, project)
with six.assertRaisesRegex(self, ConanException,
"Workspace editable HelloB/0.1@lasote/stable "
"does not define path"):
Workspace(path, None)
@pytest.mark.tool_compiler
def test_simple(self):
client = TestClient()
def files(name, depend=None):
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile_build.format(deps=deps, name=name)}
client.save(files("C"), path=os.path.join(client.current_folder, "C"))
client.save(files("B", "C"), path=os.path.join(client.current_folder, "B"))
client.save(files("A", "B"), path=os.path.join(client.current_folder, "A"))
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
path: B
HelloC/0.1@lasote/stable:
path: C
HelloA/0.1@lasote/stable:
path: A
layout: layout
root: HelloA/0.1@lasote/stable
""")
layout = dedent("""
[build_folder]
""")
client.save({"conanws.yml": project,
"layout": layout})
client.run("workspace install conanws.yml")
self.assertIn("HelloA/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloB/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloC/0.1@lasote/stable from user folder - Editable", client.out)
for sub in ("A", "B", "C"):
for f in ("conanbuildinfo.cmake", "conaninfo.txt", "conanbuildinfo.txt"):
self.assertTrue(os.path.exists(os.path.join(client.current_folder, sub, f)))
@parameterized.expand([("csv",), ("list",), (("abbreviated_list"))])
@pytest.mark.tool_cmake
def test_multiple_roots(self, root_attribute_format):
# https://github.com/conan-io/conan/issues/4720
client = TestClient()
def files(name, depend=None):
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile_build.format(deps=deps, name=name)}
client.save(files("D"), path=os.path.join(client.current_folder, "D"))
client.save(files("C", "D"), path=os.path.join(client.current_folder, "C"))
client.save(files("A", "C"), path=os.path.join(client.current_folder, "A"))
client.save(files("B", "D"), path=os.path.join(client.current_folder, "B"))
# https://github.com/conan-io/conan/issues/5155
roots = ["HelloA/0.1@lasote/stable", "HelloB/0.1@lasote/stable"]
root_attribute = {
"csv": ", ".join(roots),
"list": "".join(["\n - %s" % r for r in roots]),
"abbreviated_list": str(roots),
}[root_attribute_format]
project = dedent("""
editables:
HelloD/0.1@lasote/stable:
path: D
HelloB/0.1@lasote/stable:
path: B
HelloC/0.1@lasote/stable:
path: C
HelloA/0.1@lasote/stable:
path: A
layout: layout
root: {root_attribute}
""").format(root_attribute=root_attribute)
layout = dedent("""
[build_folder]
""")
client.save({"conanws.yml": project,
"layout": layout})
client.run("workspace install conanws.yml")
self.assertIn("HelloA/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloB/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloC/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloD/0.1@lasote/stable from user folder - Editable", client.out)
a_cmake = client.load(os.path.join("A", "conanbuildinfo.cmake"))
self.assertIn("set(CONAN_LIBS helloC helloD ${CONAN_LIBS})", a_cmake)
b_cmake = client.load(os.path.join("B", "conanbuildinfo.cmake"))
self.assertIn("set(CONAN_LIBS helloD ${CONAN_LIBS})", b_cmake)
@pytest.mark.tool_compiler
def test_transitivity(self):
# https://github.com/conan-io/conan/issues/4720
client = TestClient()
def files(name, depend=None):
if isinstance(depend, list):
deps = ", ".join(["'Hello%s/0.1@lasote/stable'" % d for d in depend])
else:
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile_build.format(deps=deps, name=name)}
client.save(files("D"), path=os.path.join(client.current_folder, "D"))
client.save(files("C", "D"), path=os.path.join(client.current_folder, "C"))
client.save(files("B", "C"), path=os.path.join(client.current_folder, "B"))
client.save(files("A", ["D", "C", "B"]), path=os.path.join(client.current_folder, "A"))
project = dedent("""
editables:
HelloD/0.1@lasote/stable:
path: D
HelloB/0.1@lasote/stable:
path: B
HelloC/0.1@lasote/stable:
path: C
HelloA/0.1@lasote/stable:
path: A
layout: layout
root: HelloA/0.1@lasote/stable
""")
layout = dedent("""
[build_folder]
""")
client.save({"conanws.yml": project,
"layout": layout})
client.run("workspace install conanws.yml")
self.assertIn("HelloA/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloB/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloC/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloD/0.1@lasote/stable from user folder - Editable", client.out)
a_cmake = client.load(os.path.join("A", "conanbuildinfo.cmake"))
self.assertIn("set(CONAN_LIBS helloB helloC helloD ${CONAN_LIBS})", a_cmake)
b_cmake = client.load(os.path.join("B", "conanbuildinfo.cmake"))
self.assertIn("set(CONAN_LIBS helloC helloD ${CONAN_LIBS})", b_cmake)
@pytest.mark.tool_cmake
def test_missing_layout_cmake(self):
# Specifying cmake generator without layout file raised exception
# https://github.com/conan-io/conan/issues/4752
client = TestClient()
def files(name, depend=None):
if isinstance(depend, list):
deps = ", ".join(["'Hello%s/0.1@lasote/stable'" % d for d in depend])
else:
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile_build.format(deps=deps, name=name)}
client.save(files("D"), path=os.path.join(client.current_folder, "D"))
client.save(files("C", "D"), path=os.path.join(client.current_folder, "C"))
project = dedent("""
editables:
HelloD/0.1@lasote/stable:
path: D
HelloC/0.1@lasote/stable:
path: C
workspace_generator: cmake
root: HelloC/0.1@lasote/stable
""")
client.save({"conanws.yml": project})
client.run("workspace install conanws.yml")
self.assertIn("HelloD/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloD/0.1@lasote/stable from user folder - Editable", client.out)
@pytest.mark.tool_cmake
def test_simple_build(self):
client = TestClient()
def files(name, depend=None):
includes = ('#include "hello%s.h"' % depend) if depend else ""
calls = ('hello%s();' % depend) if depend else ""
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile_build.format(deps=deps, name=name),
"src/hello%s.h" % name: hello_h.format(name=name),
"src/hello.cpp": hello_cpp.format(name=name, includes=includes, calls=calls),
"src/CMakeLists.txt": cmake.format(name=name)}
client.save(files("C"), path=os.path.join(client.current_folder, "C"))
client.save(files("B", "C"), path=os.path.join(client.current_folder, "B"))
a = files("A", "B")
a["src/CMakeLists.txt"] += ("add_executable(app main.cpp)\n"
"target_link_libraries(app helloA)\n")
a["src/main.cpp"] = main_cpp
client.save(a, path=os.path.join(client.current_folder, "A"))
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
path: B
HelloC/0.1@lasote/stable:
path: C
HelloA/0.1@lasote/stable:
path: A
layout: layout
root: HelloA/0.1@lasote/stable
""")
layout = dedent("""
[build_folder]
build/{{settings.build_type}}
[includedirs]
src
[libdirs]
build/{{settings.build_type}}/lib
""")
client.save({"conanws.yml": project,
"layout": layout})
client.run("workspace install conanws.yml")
client.run("workspace install conanws.yml -s build_type=Debug")
self.assertIn("HelloA/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloB/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloC/0.1@lasote/stable from user folder - Editable", client.out)
build_type = "Release"
client.run("build C -bf=C/build/%s" % build_type)
client.run("build B -bf=B/build/%s" % build_type)
client.run("build A -bf=A/build/%s" % build_type)
cmd_release = os.path.normpath("./A/build/Release/bin/app")
cmd_debug = os.path.normpath("./A/build/Debug/bin/app")
client.run_command(cmd_release)
self.assertIn("Hello World C Release!", client.out)
self.assertIn("Hello World B Release!", client.out)
self.assertIn("Hello World A Release!", client.out)
build_type = "Debug"
client.run("build C -bf=C/build/%s" % build_type)
client.run("build B -bf=B/build/%s" % build_type)
client.run("build A -bf=A/build/%s" % build_type)
client.run_command(cmd_debug)
self.assertIn("Hello World C Debug!", client.out)
self.assertIn("Hello World B Debug!", client.out)
self.assertIn("Hello World A Debug!", client.out)
@pytest.mark.tool_cmake
def test_simple_out_of_source_build(self):
client = TestClient()
def files(name, depend=None):
includes = ('#include "hello%s.h"' % depend) if depend else ""
calls = ('hello%s();' % depend) if depend else ""
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile_build.format(deps=deps, name=name),
"src/hello%s.h" % name: hello_h.format(name=name),
"src/hello.cpp": hello_cpp.format(name=name, includes=includes, calls=calls),
"src/CMakeLists.txt": cmake.format(name=name)}
client.save(files("C"), path=os.path.join(client.current_folder, "HelloC"))
client.save(files("B", "C"), path=os.path.join(client.current_folder, "HelloB"))
a = files("A", "B")
a["src/CMakeLists.txt"] += ("add_executable(app main.cpp)\n"
"target_link_libraries(app helloA)\n")
a["src/main.cpp"] = main_cpp
client.save(a, path=os.path.join(client.current_folder, "HelloA"))
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
path: HelloB
HelloC/0.1@lasote/stable:
path: HelloC
HelloA/0.1@lasote/stable:
path: HelloA
layout: layout
root: HelloA/0.1@lasote/stable
""")
layout = dedent("""
[build_folder]
../build/{{reference.name}}/{{settings.build_type}}
[includedirs]
src
[libdirs]
../build/{{reference.name}}/{{settings.build_type}}/lib
""")
client.save({"conanws.yml": project,
"layout": layout})
client.run("workspace install conanws.yml")
client.run("workspace install conanws.yml -s build_type=Debug")
self.assertIn("HelloA/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloB/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloC/0.1@lasote/stable from user folder - Editable", client.out)
build_type = "Release"
client.run("build HelloC -bf=build/HelloC/%s" % build_type)
client.run("build HelloB -bf=build/HelloB/%s" % build_type)
client.run("build HelloA -bf=build/HelloA/%s" % build_type)
cmd_release = os.path.normpath("./build/HelloA/Release/bin/app")
cmd_debug = os.path.normpath("./build/HelloA/Debug/bin/app")
client.run_command(cmd_release)
self.assertIn("Hello World C Release!", client.out)
self.assertIn("Hello World B Release!", client.out)
self.assertIn("Hello World A Release!", client.out)
build_type = "Debug"
client.run("build HelloC -bf=build/HelloC/%s" % build_type)
client.run("build HelloB -bf=build/HelloB/%s" % build_type)
client.run("build HelloA -bf=build/HelloA/%s" % build_type)
client.run_command(cmd_debug)
self.assertIn("Hello World C Debug!", client.out)
self.assertIn("Hello World B Debug!", client.out)
self.assertIn("Hello World A Debug!", client.out)
@pytest.mark.tool_cmake
def test_complete_single_conf_build(self):
client = TestClient()
def files(name, depend=None):
includes = ('#include "hello%s.h"' % depend) if depend else ""
calls = ('hello%s();' % depend) if depend else ""
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile_build.format(deps=deps, name=name),
"src/hello%s.h" % name: hello_h.format(name=name),
"src/hello.cpp": hello_cpp.format(name=name, includes=includes, calls=calls),
"src/CMakeLists.txt": cmake.format(name=name)}
client.save(files("C"), path=os.path.join(client.current_folder, "C"))
client.save(files("B", "C"), path=os.path.join(client.current_folder, "B"))
a = files("A", "B")
a["src/CMakeLists.txt"] += ("add_executable(app main.cpp)\n"
"target_link_libraries(app helloA)\n")
a["src/main.cpp"] = main_cpp
client.save(a, path=os.path.join(client.current_folder, "A"))
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
path: B
HelloC/0.1@lasote/stable:
path: C
HelloA/0.1@lasote/stable:
path: A
layout: layout
workspace_generator: cmake
root: HelloA/0.1@lasote/stable
""")
layout = dedent("""
[build_folder]
build/{{settings.build_type}}
[source_folder]
src
[includedirs]
src
[libdirs]
build/{{settings.build_type}}/lib
""")
metacmake = dedent("""
cmake_minimum_required(VERSION 3.3)
project(MyProject CXX)
include(${CMAKE_BINARY_DIR}/conanworkspace.cmake)
conan_workspace_subdirectories()
""")
client.save({"conanws.yml": project,
"layout": layout,
"CMakeLists.txt": metacmake})
base_release = os.path.join(client.current_folder, "build_release")
base_debug = os.path.join(client.current_folder, "build_debug")
with client.chdir("build_release"):
client.run("workspace install ../conanws.yml")
with client.chdir("build_debug"):
client.run("workspace install ../conanws.yml -s build_type=Debug")
generator = "Visual Studio 15 Win64" if platform.system() == "Windows" else "Unix Makefiles"
with client.chdir(base_release):
client.run_command('cmake .. -G "%s" -DCMAKE_BUILD_TYPE=Release' % generator)
client.run_command('cmake --build . --config Release')
cmd_release = os.path.normpath("./A/build/Release/bin/app")
cmd_debug = os.path.normpath("./A/build/Debug/bin/app")
client.run_command(cmd_release)
self.assertIn("Hello World C Release!", client.out)
self.assertIn("Hello World B Release!", client.out)
self.assertIn("Hello World A Release!", client.out)
time.sleep(1)
tools.replace_in_file(os.path.join(client.current_folder, "C/src/hello.cpp"),
"Hello World", "Bye Moon", output=client.out)
time.sleep(1)
client.run_command('cmake --build . --config Release', cwd=base_release)
client.run_command(cmd_release)
self.assertIn("Bye Moon C Release!", client.out)
self.assertIn("Hello World B Release!", client.out)
self.assertIn("Hello World A Release!", client.out)
time.sleep(1)
tools.replace_in_file(os.path.join(client.current_folder, "B/src/hello.cpp"),
"Hello World", "Bye Moon", output=client.out)
time.sleep(1)
client.run_command('cmake --build . --config Release', cwd=base_release)
client.run_command(cmd_release)
self.assertIn("Bye Moon C Release!", client.out)
self.assertIn("Bye Moon B Release!", client.out)
self.assertIn("Hello World A Release!", client.out)
self.assertNotIn("Debug", client.out)
client.run_command('cmake .. -G "%s" -DCMAKE_BUILD_TYPE=Debug' % generator, cwd=base_debug)
client.run_command('cmake --build . --config Debug', cwd=base_debug)
client.run_command(cmd_debug)
self.assertIn("Bye Moon C Debug!", client.out)
self.assertIn("Bye Moon B Debug!", client.out)
self.assertIn("Hello World A Debug!", client.out)
time.sleep(1)
tools.replace_in_file(os.path.join(client.current_folder, "C/src/hello.cpp"),
"Bye Moon", "Hello World", output=client.out)
time.sleep(1)
client.run_command('cmake --build . --config Debug', cwd=base_debug)
client.run_command(cmd_debug)
self.assertIn("Hello World C Debug!", client.out)
self.assertIn("Bye Moon B Debug!", client.out)
self.assertIn("Hello World A Debug!", client.out)
self.assertNotIn("Release", client.out)
@pytest.mark.skipif(platform.system() != "Windows", reason="only windows")
@pytest.mark.tool_cmake
def test_complete_multi_conf_build(self):
client = TestClient()
def files(name, depend=None):
includes = ('#include "hello%s.h"' % depend) if depend else ""
calls = ('hello%s();' % depend) if depend else ""
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile_build.format(deps=deps, name=name),
"src/hello%s.h" % name: hello_h.format(name=name),
"src/hello.cpp": hello_cpp.format(name=name, includes=includes, calls=calls),
"src/CMakeLists.txt": cmake_multi.format(name=name)}
client.save(files("C"), path=os.path.join(client.current_folder, "C"))
client.save(files("B", "C"), path=os.path.join(client.current_folder, "B"))
a = files("A", "B")
a["src/CMakeLists.txt"] += ("add_executable(app main.cpp)\n"
"target_link_libraries(app helloA)\n")
a["src/main.cpp"] = main_cpp
client.save(a, path=os.path.join(client.current_folder, "A"))
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
path: B
HelloC/0.1@lasote/stable:
path: C
HelloA/0.1@lasote/stable:
path: A
layout: layout
workspace_generator: cmake
root: HelloA/0.1@lasote/stable
""")
layout = dedent("""
[build_folder]
build
[source_folder]
src
[includedirs]
src
[libdirs]
build/{{settings.build_type}}
""")
metacmake = dedent("""
cmake_minimum_required(VERSION 3.3)
project(MyProject CXX)
include(${CMAKE_BINARY_DIR}/conanworkspace.cmake)
conan_workspace_subdirectories()
""")
client.save({"conanws.yml": project,
"layout": layout,
"CMakeLists.txt": metacmake})
build = os.path.join(client.current_folder, "build")
with client.chdir("build"):
client.run("workspace install ../conanws.yml")
client.run("workspace install ../conanws.yml -s build_type=Debug")
generator = "Visual Studio 15 Win64"
with client.chdir(build):
client.run_command('cmake .. -G "%s" -DCMAKE_BUILD_TYPE=Release' % generator)
client.run_command('cmake --build . --config Release')
cmd_release = os.path.normpath("./A/build/Release/app")
cmd_debug = os.path.normpath("./A/build/Debug/app")
client.run_command(cmd_release)
self.assertIn("Hello World C Release!", client.out)
self.assertIn("Hello World B Release!", client.out)
self.assertIn("Hello World A Release!", client.out)
tools.replace_in_file(os.path.join(client.current_folder, "C/src/hello.cpp"),
"Hello World", "Bye Moon", output=client.out)
with client.chdir(build):
client.run_command('cmake --build . --config Release')
client.run_command(cmd_release)
self.assertIn("Bye Moon C Release!", client.out)
self.assertIn("Hello World B Release!", client.out)
self.assertIn("Hello World A Release!", client.out)
tools.replace_in_file(os.path.join(client.current_folder, "B/src/hello.cpp"),
"Hello World", "Bye Moon", output=client.out)
with client.chdir(build):
client.run_command('cmake --build . --config Release')
client.run_command(cmd_release)
self.assertIn("Bye Moon C Release!", client.out)
self.assertIn("Bye Moon B Release!", client.out)
self.assertIn("Hello World A Release!", client.out)
self.assertNotIn("Debug", client.out)
client.run_command('cmake .. -G "%s" -DCMAKE_BUILD_TYPE=Debug' % generator, cwd=build)
# CMake configure will find the Release libraries, as we are in cmake-multi mode
# Need to reset the output after that
client.run_command('cmake --build . --config Debug', cwd=build)
client.run_command(cmd_debug)
self.assertIn("Bye Moon C Debug!", client.out)
self.assertIn("Bye Moon B Debug!", client.out)
self.assertIn("Hello World A Debug!", client.out)
tools.replace_in_file(os.path.join(client.current_folder, "C/src/hello.cpp"),
"Bye Moon", "Hello World", output=client.out)
client.run_command('cmake --build . --config Debug', cwd=build)
client.run_command(cmd_debug)
self.assertIn("Hello World C Debug!", client.out)
self.assertIn("Bye Moon B Debug!", client.out)
self.assertIn("Hello World A Debug!", client.out)
self.assertNotIn("Release", client.out)
def test_build_requires(self):
# https://github.com/conan-io/conan/issues/3075
client = TestClient()
tool = """from conans import ConanFile
class Tool(ConanFile):
def package_info(self):
self.cpp_info.libs = ["MyToolLib"]
"""
client.save({"conanfile.py": tool})
client.run("create . Tool/0.1@user/testing")
conanfile = """from conans import ConanFile
import os
class Pkg(ConanFile):
requires = {deps}
build_requires = "Tool/0.1@user/testing"
generators = "cmake"
"""
def files(name, depend=None):
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile.format(deps=deps, name=name)}
client.save(files("C"), path=os.path.join(client.current_folder, "C"))
client.save(files("B", "C"), path=os.path.join(client.current_folder, "B"))
client.save(files("A", "B"), path=os.path.join(client.current_folder, "A"))
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
path: B
HelloC/0.1@lasote/stable:
path: C
HelloA/0.1@lasote/stable:
path: A
layout: layout
root: HelloA/0.1@lasote/stable
""")
layout = dedent("""
[build_folder]
build
""")
client.save({"conanws.yml": project,
"layout": layout})
client.run("workspace install conanws.yml")
self.assertIn("HelloC/0.1@lasote/stable: Applying build-requirement: Tool/0.1@user/testing",
client.out)
self.assertIn("HelloB/0.1@lasote/stable: Applying build-requirement: Tool/0.1@user/testing",
client.out)
self.assertIn("HelloA/0.1@lasote/stable: Applying build-requirement: Tool/0.1@user/testing",
client.out)
for sub in ("A", "B", "C"):
conanbuildinfo = client.load(os.path.join(sub, "build", "conanbuildinfo.cmake"))
self.assertIn("set(CONAN_LIBS_TOOL MyToolLib)", conanbuildinfo)
def test_use_build_requires_editable(self):
client = TestClient()
toolconanfile = """from conans import ConanFile
class Tool(ConanFile):
def package_info(self):
self.cpp_info.libs = ["MyToolLib"]
"""
conanfile = """from conans import ConanFile
import os
class Pkg(ConanFile):
requires = {deps}
build_requires = "Tool/0.1@user/testing"
generators = "cmake"
"""
def files(name, depend=None):
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile.format(deps=deps, name=name)}
client.save({"conanfile.py": toolconanfile},
path=os.path.join(client.current_folder, "Tool"))
client.save(files("A"), path=os.path.join(client.current_folder, "A"))
project = dedent("""
editables:
HelloA/0.1@lasote/stable:
path: A
Tool/0.1@user/testing:
path: Tool
layout: layout
root: HelloA/0.1@lasote/stable
""")
layout = dedent("""
[build_folder]
build
""")
client.save({"conanws.yml": project,
"layout": layout})
client.run("workspace install conanws.yml")
self.assertIn("HelloA/0.1@lasote/stable: Applying build-requirement: Tool/0.1@user/testing",
client.out)
conanbuildinfo = client.load(os.path.join("A", "build", "conanbuildinfo.cmake"))
self.assertIn("set(CONAN_LIBS_TOOL MyToolLib)", conanbuildinfo)
@pytest.mark.tool_compiler
def test_per_package_layout(self):
client = TestClient()
def files(name, depend=None):
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile_build.format(deps=deps, name=name)}
client.save(files("C"), path=os.path.join(client.current_folder, "C"))
client.save(files("B", "C"), path=os.path.join(client.current_folder, "B"))
client.save(files("A", "B"), path=os.path.join(client.current_folder, "A"))
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
path: B
layout: B/layoutB
HelloC/0.1@lasote/stable:
path: C
layout: C/layoutC
HelloA/0.1@lasote/stable:
path: A
layout: A/layoutA
root: HelloA/0.1@lasote/stable
""")
layout = dedent("""
[build_folder]
build
[includedirs]
myinclude{}
""")
client.save({"conanws.yml": project,
"A/layoutA": layout.format("A"),
"B/layoutB": layout.format("B"),
"C/layoutC": layout.format("C")})
client.run("workspace install conanws.yml")
self.assertIn("HelloA/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloB/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloC/0.1@lasote/stable from user folder - Editable", client.out)
cmake = client.load(os.path.join("A", "build", "conanbuildinfo.cmake"))
self.assertIn("myincludeC", cmake)
self.assertIn("myincludeB", cmake)
@pytest.mark.tool_compiler
def test_generators(self):
client = TestClient()
def files(name, depend=None):
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile_build.format(deps=deps, name=name)}
client.save(files("C"), path=os.path.join(client.current_folder, "C"))
client.save(files("B", "C"), path=os.path.join(client.current_folder, "B"))
client.save(files("A", "B"), path=os.path.join(client.current_folder, "A"))
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
path: B
generators: [make, qmake]
HelloC/0.1@lasote/stable:
path: C
HelloA/0.1@lasote/stable:
path: A
generators: visual_studio
layout: layout
generators: cmake
workspace_generator: cmake
root: HelloA/0.1@lasote/stable
""")
layout = dedent("""
[build_folder]
""")
client.save({"conanws.yml": project,
"layout": layout})
client.run("workspace install conanws.yml")
self.assertIn("HelloA/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloB/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloC/0.1@lasote/stable from user folder - Editable", client.out)
self.assertTrue(os.path.exists(os.path.join(client.current_folder, "B",
"conanbuildinfo.mak")))
self.assertTrue(os.path.exists(os.path.join(client.current_folder, "B",
"conanbuildinfo.pri")))
self.assertTrue(os.path.exists(os.path.join(client.current_folder, "A",
"conanbuildinfo.props")))
self.assertTrue(os.path.exists(os.path.join(client.current_folder, "C",
"conanbuildinfo.cmake")))
self.assertTrue(os.path.exists(os.path.join(client.current_folder,
"conanworkspace.cmake")))
@pytest.mark.tool_cmake
def test_gen_subdirectories(self):
client = TestClient()
def files(name, depend=None):
deps = ('"Hello%s/0.1@lasote/stable"' % depend) if depend else "None"
return {"conanfile.py": conanfile_build.format(deps=deps, name=name)}
client.save(files("C"), path=os.path.join(client.current_folder, "C"))
client.save(files("B", "C"), path=os.path.join(client.current_folder, "B"))
client.save(files("A", "B"), path=os.path.join(client.current_folder, "A"))
project = dedent("""
editables:
HelloB/0.1@lasote/stable:
path: B
HelloC/0.1@lasote/stable:
path: C
HelloA/0.1@lasote/stable:
path: A
layout: layout
workspace_generator: cmake
root: HelloA/0.1@lasote/stable
""")
layout = dedent("""
[build_folder]
[source_folder]
""")
client.save({"conanws.yml": project,
"layout": layout})
client.run("workspace install conanws.yml")
self.assertIn("HelloA/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloB/0.1@lasote/stable from user folder - Editable", client.out)
self.assertIn("HelloC/0.1@lasote/stable from user folder - Editable", client.out)
conanws_cmake = client.load("conanworkspace.cmake")
self.assertIn("macro(conan_workspace_subdirectories)", conanws_cmake)
for p in ("HelloC", "HelloB", "HelloA"):
self.assertIn("add_subdirectory(${PACKAGE_%s_SRC} ${PACKAGE_%s_BUILD})" % (p, p),
conanws_cmake)
def test_default_filename(self):
client = TestClient()
path_to_editable = os.path.join(client.current_folder, "A")
project = dedent("""
editables:
HelloA/0.1@lasote/stable:
path: {path_to_editable}
layout: layout
workspace_generator: cmake
root: HelloA/0.1@lasote/stable
""".format(path_to_editable=path_to_editable))
conanfile = dedent("""
from conans import ConanFile
class Lib(ConanFile):
pass
""")
layout = dedent("""
[build_folder]
[source_folder]
""")
client.save({"conanfile.py": conanfile}, path=path_to_editable)
ws_folder = temp_folder()
client.save({os.path.join(ws_folder, Workspace.default_filename): project,
os.path.join(ws_folder, "layout"): layout})
# For the right folder, it works
client.run('workspace install "{}"'.format(ws_folder))
conanws_cmake = client.load("conanworkspace.cmake")
self.assertIn("macro(conan_workspace_subdirectories)", conanws_cmake)
# For a non existing folder, it will try to load the default filename (it fails)
non_existing = temp_folder()
client.run('workspace install "{}"'.format(non_existing), assert_error=True)
trial_path = os.path.join(non_existing, Workspace.default_filename)
self.assertIn("ERROR: Couldn't load workspace file in {}".format(trial_path), client.out)
# For an existing file, it will try to use it (will fail because of the format)
invalid_file = os.path.join(ws_folder, "layout")
client.run('workspace install "{}"'.format(invalid_file), assert_error=True)
self.assertIn("ERROR: There was an error parsing", client.out)
# For an existing folder, without the default file (it will fail looking for it)
no_default_file = os.path.join(client.current_folder)
client.run('workspace install "{}"'.format(no_default_file), assert_error=True)
trial_path = os.path.join(no_default_file, Workspace.default_filename)
self.assertIn("ERROR: Couldn't load workspace file in {}".format(trial_path), client.out)
def test_install_folder(self):
project = dedent("""
editables:
HelloA/0.1@lasote/stable:
path: A
layout: layout
workspace_generator: cmake
root: HelloA/0.1@lasote/stable
""")
conanfile = dedent("""
from conans import ConanFile
class Lib(ConanFile):
pass
""")
layout = dedent("""
[build_folder]
[source_folder]
""")
client = TestClient()
client.save({"conanfile.py": conanfile},
path=os.path.join(client.current_folder, "A"))
client.save({"conanws.yml": project,
"layout": layout})
client.run("workspace install conanws.yml --install-folder=ws_install")
self.assertTrue(os.path.exists(os.path.join(client.current_folder, "ws_install",
"conanworkspace.cmake")))
def test_install_folder_rebuilt_requirements(self):
# https://github.com/conan-io/conan/issues/6046
client = TestClient()
tool = dedent("""
from conans import ConanFile
class Tool(ConanFile):
def package_info(self):
self.cpp_info.libs = ["MyToolLib"]
""")
client.save({"conanfile.py": tool})
client.run("export . Tool/0.1@user/testing")
client.save({"conanfile.py": GenConanfile().with_name("HelloB").with_version("0.1")},
path=os.path.join(client.current_folder, "B"))
client.save({"conanfile.py": GenConanfile().with_name("HelloA").with_version(
"0.1").with_build_requires("Tool/0.1@user/testing").with_require("HelloB/0.1")},
path=os.path.join(client.current_folder, "A"))
project = dedent("""
editables:
HelloB/0.1:
path: B
HelloA/0.1:
path: A
layout: layout
root: HelloA/0.1
workspace_generator: cmake
""")
layout = dedent("""
[build_folder]
build
""")
client.save({"conanws.yml": project,
"layout": layout})
client.run(
"workspace install conanws.yml --install-folder=ws_install --build Tool/0.1@user/testing")
self.assertTrue(os.path.exists(os.path.join(client.current_folder, "ws_install",
"conanworkspace.cmake")))
def test_missing_subarguments(self):
client = TestClient()
client.run("workspace", assert_error=True)
self.assertIn("ERROR: Exiting with code: 2", client.out)
|
daviddupont69/CouchPotatoServer
|
refs/heads/develop
|
couchpotato/core/plugins/base.py
|
3
|
from StringIO import StringIO
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.encoding import tryUrlencode, ss, toSafeString, \
toUnicode, sp
from couchpotato.core.helpers.variable import getExt, md5, isLocalIP
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
from multipartpost import MultipartPostHandler
from tornado import template
from tornado.web import StaticFileHandler
from urlparse import urlparse
import cookielib
import glob
import gzip
import inspect
import math
import os.path
import re
import time
import traceback
import urllib2
log = CPLog(__name__)
class Plugin(object):
_class_name = None
plugin_path = None
enabled_option = 'enabled'
auto_register_static = True
_needs_shutdown = False
_running = None
user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20130519 Firefox/24.0'
http_last_use = {}
http_time_between_calls = 0
http_failed_request = {}
http_failed_disabled = {}
def __new__(typ, *args, **kwargs):
new_plugin = super(Plugin, typ).__new__(typ)
new_plugin.registerPlugin()
return new_plugin
def registerPlugin(self):
addEvent('app.do_shutdown', self.doShutdown)
addEvent('plugin.running', self.isRunning)
self._running = []
if self.auto_register_static:
self.registerStatic(inspect.getfile(self.__class__))
def conf(self, attr, value = None, default = None, section = None):
class_name = self.getName().lower().split(':')
return Env.setting(attr, section = section if section else class_name[0].lower(), value = value, default = default)
def getName(self):
return self._class_name or self.__class__.__name__
def setName(self, name):
self._class_name = name
def renderTemplate(self, parent_file, templ, **params):
t = template.Template(open(os.path.join(os.path.dirname(parent_file), templ), 'r').read())
return t.generate(**params)
def registerStatic(self, plugin_file, add_to_head = True):
# Register plugin path
self.plugin_path = os.path.dirname(plugin_file)
static_folder = toUnicode(os.path.join(self.plugin_path, 'static'))
if not os.path.isdir(static_folder):
return
# Get plugin_name from PluginName
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', self.__class__.__name__)
class_name = re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
# View path
path = 'static/plugin/%s/' % (class_name)
# Add handler to Tornado
Env.get('app').add_handlers(".*$", [(Env.get('web_base') + path + '(.*)', StaticFileHandler, {'path': static_folder})])
# Register for HTML <HEAD>
if add_to_head:
for f in glob.glob(os.path.join(self.plugin_path, 'static', '*')):
ext = getExt(f)
if ext in ['js', 'css']:
fireEvent('register_%s' % ('script' if ext in 'js' else 'style'), path + os.path.basename(f), f)
def createFile(self, path, content, binary = False):
path = ss(path)
self.makeDir(os.path.dirname(path))
try:
f = open(path, 'w+' if not binary else 'w+b')
f.write(content)
f.close()
os.chmod(path, Env.getPermission('file'))
except Exception, e:
log.error('Unable writing to file "%s": %s', (path, e))
def makeDir(self, path):
path = ss(path)
try:
if not os.path.isdir(path):
os.makedirs(path, Env.getPermission('folder'))
return True
except Exception, e:
log.error('Unable to create folder "%s": %s', (path, e))
return False
# http request
def urlopen(self, url, timeout = 30, params = None, headers = None, opener = None, multipart = False, show_error = True):
url = urllib2.quote(ss(url), safe = "%/:=&?~#+!$,;'@()*[]")
if not headers: headers = {}
if not params: params = {}
# Fill in some headers
parsed_url = urlparse(url)
host = '%s%s' % (parsed_url.hostname, (':' + str(parsed_url.port) if parsed_url.port else ''))
headers['Referer'] = headers.get('Referer', '%s://%s' % (parsed_url.scheme, host))
headers['Host'] = headers.get('Host', host)
headers['User-Agent'] = headers.get('User-Agent', self.user_agent)
headers['Accept-encoding'] = headers.get('Accept-encoding', 'gzip')
headers['Connection'] = headers.get('Connection', 'keep-alive')
headers['Cache-Control'] = headers.get('Cache-Control', 'max-age=0')
# Don't try for failed requests
if self.http_failed_disabled.get(host, 0) > 0:
if self.http_failed_disabled[host] > (time.time() - 900):
log.info2('Disabled calls to %s for 15 minutes because so many failed requests.', host)
if not show_error:
raise Exception('Disabled calls to %s for 15 minutes because so many failed requests')
else:
return ''
else:
del self.http_failed_request[host]
del self.http_failed_disabled[host]
self.wait(host)
try:
# Make sure opener has the correct headers
if opener:
opener.add_headers = headers
if multipart:
log.info('Opening multipart url: %s, params: %s', (url, [x for x in params.iterkeys()] if isinstance(params, dict) else 'with data'))
request = urllib2.Request(url, params, headers)
if opener:
opener.add_handler(MultipartPostHandler())
else:
cookies = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies), MultipartPostHandler)
response = opener.open(request, timeout = timeout)
else:
log.info('Opening url: %s, params: %s', (url, [x for x in params.iterkeys()] if isinstance(params, dict) else 'with data'))
if isinstance(params, (str, unicode)) and len(params) > 0:
data = params
else:
data = tryUrlencode(params) if len(params) > 0 else None
request = urllib2.Request(url, data, headers)
if opener:
response = opener.open(request, timeout = timeout)
else:
response = urllib2.urlopen(request, timeout = timeout)
# unzip if needed
if response.info().get('Content-Encoding') == 'gzip':
buf = StringIO(response.read())
f = gzip.GzipFile(fileobj = buf)
data = f.read()
f.close()
else:
data = response.read()
response.close()
self.http_failed_request[host] = 0
except IOError:
if show_error:
log.error('Failed opening url in %s: %s %s', (self.getName(), url, traceback.format_exc(1)))
# Save failed requests by hosts
try:
if not self.http_failed_request.get(host):
self.http_failed_request[host] = 1
else:
self.http_failed_request[host] += 1
# Disable temporarily
if self.http_failed_request[host] > 5 and not isLocalIP(host):
self.http_failed_disabled[host] = time.time()
except:
log.debug('Failed logging failed requests for %s: %s', (url, traceback.format_exc()))
raise
self.http_last_use[host] = time.time()
return data
def wait(self, host = ''):
now = time.time()
last_use = self.http_last_use.get(host, 0)
wait = math.ceil(last_use - now + self.http_time_between_calls)
if wait > 0:
log.debug('Waiting for %s, %d seconds', (self.getName(), wait))
time.sleep(last_use - now + self.http_time_between_calls)
def beforeCall(self, handler):
self.isRunning('%s.%s' % (self.getName(), handler.__name__))
def afterCall(self, handler):
self.isRunning('%s.%s' % (self.getName(), handler.__name__), False)
def doShutdown(self):
self.shuttingDown(True)
return True
def shuttingDown(self, value = None):
if value is None:
return self._needs_shutdown
self._needs_shutdown = value
def isRunning(self, value = None, boolean = True):
if value is None:
return self._running
if boolean:
self._running.append(value)
else:
try:
self._running.remove(value)
except:
log.error("Something went wrong when finishing the plugin function. Could not find the 'is_running' key")
def getCache(self, cache_key, url = None, **kwargs):
cache_key_md5 = md5(cache_key)
cache = Env.get('cache').get(cache_key_md5)
if cache:
if not Env.get('dev'): log.debug('Getting cache %s', cache_key)
return cache
if url:
try:
cache_timeout = 300
if kwargs.get('cache_timeout'):
cache_timeout = kwargs.get('cache_timeout')
del kwargs['cache_timeout']
data = self.urlopen(url, **kwargs)
if data:
self.setCache(cache_key, data, timeout = cache_timeout)
return data
except:
if not kwargs.get('show_error', True):
raise
return ''
def setCache(self, cache_key, value, timeout = 300):
cache_key_md5 = md5(cache_key)
log.debug('Setting cache %s', cache_key)
Env.get('cache').set(cache_key_md5, value, timeout)
return value
def createNzbName(self, data, movie):
tag = self.cpTag(movie)
return '%s%s' % (toSafeString(toUnicode(data.get('name'))[:127 - len(tag)]), tag)
def createFileName(self, data, filedata, movie):
name = sp(os.path.join(self.createNzbName(data, movie)))
if data.get('protocol') == 'nzb' and 'DOCTYPE nzb' not in filedata and '</nzb>' not in filedata:
return '%s.%s' % (name, 'rar')
return '%s.%s' % (name, data.get('protocol'))
def cpTag(self, movie):
if Env.setting('enabled', 'renamer'):
return '.cp(' + movie['library'].get('identifier') + ')' if movie['library'].get('identifier') else ''
return ''
def isDisabled(self):
return not self.isEnabled()
def isEnabled(self):
return self.conf(self.enabled_option) or self.conf(self.enabled_option) is None
|
sideeffects/pycparser
|
refs/heads/master
|
pycparser/ply/yacc.py
|
465
|
# -----------------------------------------------------------------------------
# ply: yacc.py
#
# Copyright (C) 2001-2011,
# David M. Beazley (Dabeaz LLC)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the David Beazley or Dabeaz LLC may be used to
# endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -----------------------------------------------------------------------------
#
# This implements an LR parser that is constructed from grammar rules defined
# as Python functions. The grammer is specified by supplying the BNF inside
# Python documentation strings. The inspiration for this technique was borrowed
# from John Aycock's Spark parsing system. PLY might be viewed as cross between
# Spark and the GNU bison utility.
#
# The current implementation is only somewhat object-oriented. The
# LR parser itself is defined in terms of an object (which allows multiple
# parsers to co-exist). However, most of the variables used during table
# construction are defined in terms of global variables. Users shouldn't
# notice unless they are trying to define multiple parsers at the same
# time using threads (in which case they should have their head examined).
#
# This implementation supports both SLR and LALR(1) parsing. LALR(1)
# support was originally implemented by Elias Ioup ([email protected]),
# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles,
# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced
# by the more efficient DeRemer and Pennello algorithm.
#
# :::::::: WARNING :::::::
#
# Construction of LR parsing tables is fairly complicated and expensive.
# To make this module run fast, a *LOT* of work has been put into
# optimization---often at the expensive of readability and what might
# consider to be good Python "coding style." Modify the code at your
# own risk!
# ----------------------------------------------------------------------------
__version__ = "3.4"
__tabversion__ = "3.2" # Table version
#-----------------------------------------------------------------------------
# === User configurable parameters ===
#
# Change these to modify the default behavior of yacc (if you wish)
#-----------------------------------------------------------------------------
yaccdebug = 1 # Debugging mode. If set, yacc generates a
# a 'parser.out' file in the current directory
debug_file = 'parser.out' # Default name of the debugging file
tab_module = 'parsetab' # Default name of the table module
default_lr = 'LALR' # Default LR table generation method
error_count = 3 # Number of symbols that must be shifted to leave recovery mode
yaccdevel = 0 # Set to True if developing yacc. This turns off optimized
# implementations of certain functions.
resultlimit = 40 # Size limit of results when running in debug mode.
pickle_protocol = 0 # Protocol to use when writing pickle files
import re, types, sys, os.path
# Compatibility function for python 2.6/3.0
if sys.version_info[0] < 3:
def func_code(f):
return f.func_code
else:
def func_code(f):
return f.__code__
# Compatibility
try:
MAXINT = sys.maxint
except AttributeError:
MAXINT = sys.maxsize
# Python 2.x/3.0 compatibility.
def load_ply_lex():
if sys.version_info[0] < 3:
import lex
else:
import ply.lex as lex
return lex
# This object is a stand-in for a logging object created by the
# logging module. PLY will use this by default to create things
# such as the parser.out file. If a user wants more detailed
# information, they can create their own logging object and pass
# it into PLY.
class PlyLogger(object):
def __init__(self,f):
self.f = f
def debug(self,msg,*args,**kwargs):
self.f.write((msg % args) + "\n")
info = debug
def warning(self,msg,*args,**kwargs):
self.f.write("WARNING: "+ (msg % args) + "\n")
def error(self,msg,*args,**kwargs):
self.f.write("ERROR: " + (msg % args) + "\n")
critical = debug
# Null logger is used when no output is generated. Does nothing.
class NullLogger(object):
def __getattribute__(self,name):
return self
def __call__(self,*args,**kwargs):
return self
# Exception raised for yacc-related errors
class YaccError(Exception): pass
# Format the result message that the parser produces when running in debug mode.
def format_result(r):
repr_str = repr(r)
if '\n' in repr_str: repr_str = repr(repr_str)
if len(repr_str) > resultlimit:
repr_str = repr_str[:resultlimit]+" ..."
result = "<%s @ 0x%x> (%s)" % (type(r).__name__,id(r),repr_str)
return result
# Format stack entries when the parser is running in debug mode
def format_stack_entry(r):
repr_str = repr(r)
if '\n' in repr_str: repr_str = repr(repr_str)
if len(repr_str) < 16:
return repr_str
else:
return "<%s @ 0x%x>" % (type(r).__name__,id(r))
#-----------------------------------------------------------------------------
# === LR Parsing Engine ===
#
# The following classes are used for the LR parser itself. These are not
# used during table construction and are independent of the actual LR
# table generation algorithm
#-----------------------------------------------------------------------------
# This class is used to hold non-terminal grammar symbols during parsing.
# It normally has the following attributes set:
# .type = Grammar symbol type
# .value = Symbol value
# .lineno = Starting line number
# .endlineno = Ending line number (optional, set automatically)
# .lexpos = Starting lex position
# .endlexpos = Ending lex position (optional, set automatically)
class YaccSymbol:
def __str__(self): return self.type
def __repr__(self): return str(self)
# This class is a wrapper around the objects actually passed to each
# grammar rule. Index lookup and assignment actually assign the
# .value attribute of the underlying YaccSymbol object.
# The lineno() method returns the line number of a given
# item (or 0 if not defined). The linespan() method returns
# a tuple of (startline,endline) representing the range of lines
# for a symbol. The lexspan() method returns a tuple (lexpos,endlexpos)
# representing the range of positional information for a symbol.
class YaccProduction:
def __init__(self,s,stack=None):
self.slice = s
self.stack = stack
self.lexer = None
self.parser= None
def __getitem__(self,n):
if n >= 0: return self.slice[n].value
else: return self.stack[n].value
def __setitem__(self,n,v):
self.slice[n].value = v
def __getslice__(self,i,j):
return [s.value for s in self.slice[i:j]]
def __len__(self):
return len(self.slice)
def lineno(self,n):
return getattr(self.slice[n],"lineno",0)
def set_lineno(self,n,lineno):
self.slice[n].lineno = lineno
def linespan(self,n):
startline = getattr(self.slice[n],"lineno",0)
endline = getattr(self.slice[n],"endlineno",startline)
return startline,endline
def lexpos(self,n):
return getattr(self.slice[n],"lexpos",0)
def lexspan(self,n):
startpos = getattr(self.slice[n],"lexpos",0)
endpos = getattr(self.slice[n],"endlexpos",startpos)
return startpos,endpos
def error(self):
raise SyntaxError
# -----------------------------------------------------------------------------
# == LRParser ==
#
# The LR Parsing engine.
# -----------------------------------------------------------------------------
class LRParser:
def __init__(self,lrtab,errorf):
self.productions = lrtab.lr_productions
self.action = lrtab.lr_action
self.goto = lrtab.lr_goto
self.errorfunc = errorf
def errok(self):
self.errorok = 1
def restart(self):
del self.statestack[:]
del self.symstack[:]
sym = YaccSymbol()
sym.type = '$end'
self.symstack.append(sym)
self.statestack.append(0)
def parse(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None):
if debug or yaccdevel:
if isinstance(debug,int):
debug = PlyLogger(sys.stderr)
return self.parsedebug(input,lexer,debug,tracking,tokenfunc)
elif tracking:
return self.parseopt(input,lexer,debug,tracking,tokenfunc)
else:
return self.parseopt_notrack(input,lexer,debug,tracking,tokenfunc)
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# parsedebug().
#
# This is the debugging enabled version of parse(). All changes made to the
# parsing engine should be made here. For the non-debugging version,
# copy this code to a method parseopt() and delete all of the sections
# enclosed in:
#
# #--! DEBUG
# statements
# #--! DEBUG
#
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
def parsedebug(self,input=None,lexer=None,debug=None,tracking=0,tokenfunc=None):
lookahead = None # Current lookahead symbol
lookaheadstack = [ ] # Stack of lookahead symbols
actions = self.action # Local reference to action table (to avoid lookup on self.)
goto = self.goto # Local reference to goto table (to avoid lookup on self.)
prod = self.productions # Local reference to production list (to avoid lookup on self.)
pslice = YaccProduction(None) # Production object passed to grammar rules
errorcount = 0 # Used during error recovery
# --! DEBUG
debug.info("PLY: PARSE DEBUG START")
# --! DEBUG
# If no lexer was given, we will try to use the lex module
if not lexer:
lex = load_ply_lex()
lexer = lex.lexer
# Set up the lexer and parser objects on pslice
pslice.lexer = lexer
pslice.parser = self
# If input was supplied, pass to lexer
if input is not None:
lexer.input(input)
if tokenfunc is None:
# Tokenize function
get_token = lexer.token
else:
get_token = tokenfunc
# Set up the state and symbol stacks
statestack = [ ] # Stack of parsing states
self.statestack = statestack
symstack = [ ] # Stack of grammar symbols
self.symstack = symstack
pslice.stack = symstack # Put in the production
errtoken = None # Err token
# The start state is assumed to be (0,$end)
statestack.append(0)
sym = YaccSymbol()
sym.type = "$end"
symstack.append(sym)
state = 0
while 1:
# Get the next symbol on the input. If a lookahead symbol
# is already set, we just use that. Otherwise, we'll pull
# the next token off of the lookaheadstack or from the lexer
# --! DEBUG
debug.debug('')
debug.debug('State : %s', state)
# --! DEBUG
if not lookahead:
if not lookaheadstack:
lookahead = get_token() # Get the next token
else:
lookahead = lookaheadstack.pop()
if not lookahead:
lookahead = YaccSymbol()
lookahead.type = "$end"
# --! DEBUG
debug.debug('Stack : %s',
("%s . %s" % (" ".join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
# --! DEBUG
# Check the action table
ltype = lookahead.type
t = actions[state].get(ltype)
if t is not None:
if t > 0:
# shift a symbol on the stack
statestack.append(t)
state = t
# --! DEBUG
debug.debug("Action : Shift and goto state %s", t)
# --! DEBUG
symstack.append(lookahead)
lookahead = None
# Decrease error count on successful shift
if errorcount: errorcount -=1
continue
if t < 0:
# reduce a symbol on the stack, emit a production
p = prod[-t]
pname = p.name
plen = p.len
# Get production function
sym = YaccSymbol()
sym.type = pname # Production name
sym.value = None
# --! DEBUG
if plen:
debug.info("Action : Reduce rule [%s] with %s and goto state %d", p.str, "["+",".join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+"]",-t)
else:
debug.info("Action : Reduce rule [%s] with %s and goto state %d", p.str, [],-t)
# --! DEBUG
if plen:
targ = symstack[-plen-1:]
targ[0] = sym
# --! TRACKING
if tracking:
t1 = targ[1]
sym.lineno = t1.lineno
sym.lexpos = t1.lexpos
t1 = targ[-1]
sym.endlineno = getattr(t1,"endlineno",t1.lineno)
sym.endlexpos = getattr(t1,"endlexpos",t1.lexpos)
# --! TRACKING
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# below as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
del statestack[-plen:]
p.callable(pslice)
# --! DEBUG
debug.info("Result : %s", format_result(pslice[0]))
# --! DEBUG
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead)
symstack.pop()
statestack.pop()
state = statestack[-1]
sym.type = 'error'
lookahead = sym
errorcount = error_count
self.errorok = 0
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
else:
# --! TRACKING
if tracking:
sym.lineno = lexer.lineno
sym.lexpos = lexer.lexpos
# --! TRACKING
targ = [ sym ]
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# above as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
p.callable(pslice)
# --! DEBUG
debug.info("Result : %s", format_result(pslice[0]))
# --! DEBUG
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead)
symstack.pop()
statestack.pop()
state = statestack[-1]
sym.type = 'error'
lookahead = sym
errorcount = error_count
self.errorok = 0
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
if t == 0:
n = symstack[-1]
result = getattr(n,"value",None)
# --! DEBUG
debug.info("Done : Returning %s", format_result(result))
debug.info("PLY: PARSE DEBUG END")
# --! DEBUG
return result
if t == None:
# --! DEBUG
debug.error('Error : %s',
("%s . %s" % (" ".join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
# --! DEBUG
# We have some kind of parsing error here. To handle
# this, we are going to push the current token onto
# the tokenstack and replace it with an 'error' token.
# If there are any synchronization rules, they may
# catch it.
#
# In addition to pushing the error token, we call call
# the user defined p_error() function if this is the
# first syntax error. This function is only called if
# errorcount == 0.
if errorcount == 0 or self.errorok:
errorcount = error_count
self.errorok = 0
errtoken = lookahead
if errtoken.type == "$end":
errtoken = None # End of file!
if self.errorfunc:
global errok,token,restart
errok = self.errok # Set some special functions available in error recovery
token = get_token
restart = self.restart
if errtoken and not hasattr(errtoken,'lexer'):
errtoken.lexer = lexer
tok = self.errorfunc(errtoken)
del errok, token, restart # Delete special functions
if self.errorok:
# User must have done some kind of panic
# mode recovery on their own. The
# returned token is the next lookahead
lookahead = tok
errtoken = None
continue
else:
if errtoken:
if hasattr(errtoken,"lineno"): lineno = lookahead.lineno
else: lineno = 0
if lineno:
sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type))
else:
sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type)
else:
sys.stderr.write("yacc: Parse error in input. EOF\n")
return
else:
errorcount = error_count
# case 1: the statestack only has 1 entry on it. If we're in this state, the
# entire parse has been rolled back and we're completely hosed. The token is
# discarded and we just keep going.
if len(statestack) <= 1 and lookahead.type != "$end":
lookahead = None
errtoken = None
state = 0
# Nuke the pushback stack
del lookaheadstack[:]
continue
# case 2: the statestack has a couple of entries on it, but we're
# at the end of the file. nuke the top entry and generate an error token
# Start nuking entries on the stack
if lookahead.type == "$end":
# Whoa. We're really hosed here. Bail out
return
if lookahead.type != 'error':
sym = symstack[-1]
if sym.type == 'error':
# Hmmm. Error is on top of stack, we'll just nuke input
# symbol and continue
lookahead = None
continue
t = YaccSymbol()
t.type = 'error'
if hasattr(lookahead,"lineno"):
t.lineno = lookahead.lineno
t.value = lookahead
lookaheadstack.append(lookahead)
lookahead = t
else:
symstack.pop()
statestack.pop()
state = statestack[-1] # Potential bug fix
continue
# Call an error function here
raise RuntimeError("yacc: internal parser error!!!\n")
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# parseopt().
#
# Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY.
# Edit the debug version above, then copy any modifications to the method
# below while removing #--! DEBUG sections.
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
def parseopt(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None):
lookahead = None # Current lookahead symbol
lookaheadstack = [ ] # Stack of lookahead symbols
actions = self.action # Local reference to action table (to avoid lookup on self.)
goto = self.goto # Local reference to goto table (to avoid lookup on self.)
prod = self.productions # Local reference to production list (to avoid lookup on self.)
pslice = YaccProduction(None) # Production object passed to grammar rules
errorcount = 0 # Used during error recovery
# If no lexer was given, we will try to use the lex module
if not lexer:
lex = load_ply_lex()
lexer = lex.lexer
# Set up the lexer and parser objects on pslice
pslice.lexer = lexer
pslice.parser = self
# If input was supplied, pass to lexer
if input is not None:
lexer.input(input)
if tokenfunc is None:
# Tokenize function
get_token = lexer.token
else:
get_token = tokenfunc
# Set up the state and symbol stacks
statestack = [ ] # Stack of parsing states
self.statestack = statestack
symstack = [ ] # Stack of grammar symbols
self.symstack = symstack
pslice.stack = symstack # Put in the production
errtoken = None # Err token
# The start state is assumed to be (0,$end)
statestack.append(0)
sym = YaccSymbol()
sym.type = '$end'
symstack.append(sym)
state = 0
while 1:
# Get the next symbol on the input. If a lookahead symbol
# is already set, we just use that. Otherwise, we'll pull
# the next token off of the lookaheadstack or from the lexer
if not lookahead:
if not lookaheadstack:
lookahead = get_token() # Get the next token
else:
lookahead = lookaheadstack.pop()
if not lookahead:
lookahead = YaccSymbol()
lookahead.type = '$end'
# Check the action table
ltype = lookahead.type
t = actions[state].get(ltype)
if t is not None:
if t > 0:
# shift a symbol on the stack
statestack.append(t)
state = t
symstack.append(lookahead)
lookahead = None
# Decrease error count on successful shift
if errorcount: errorcount -=1
continue
if t < 0:
# reduce a symbol on the stack, emit a production
p = prod[-t]
pname = p.name
plen = p.len
# Get production function
sym = YaccSymbol()
sym.type = pname # Production name
sym.value = None
if plen:
targ = symstack[-plen-1:]
targ[0] = sym
# --! TRACKING
if tracking:
t1 = targ[1]
sym.lineno = t1.lineno
sym.lexpos = t1.lexpos
t1 = targ[-1]
sym.endlineno = getattr(t1,"endlineno",t1.lineno)
sym.endlexpos = getattr(t1,"endlexpos",t1.lexpos)
# --! TRACKING
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# below as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
del statestack[-plen:]
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead)
symstack.pop()
statestack.pop()
state = statestack[-1]
sym.type = 'error'
lookahead = sym
errorcount = error_count
self.errorok = 0
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
else:
# --! TRACKING
if tracking:
sym.lineno = lexer.lineno
sym.lexpos = lexer.lexpos
# --! TRACKING
targ = [ sym ]
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# above as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead)
symstack.pop()
statestack.pop()
state = statestack[-1]
sym.type = 'error'
lookahead = sym
errorcount = error_count
self.errorok = 0
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
if t == 0:
n = symstack[-1]
return getattr(n,"value",None)
if t == None:
# We have some kind of parsing error here. To handle
# this, we are going to push the current token onto
# the tokenstack and replace it with an 'error' token.
# If there are any synchronization rules, they may
# catch it.
#
# In addition to pushing the error token, we call call
# the user defined p_error() function if this is the
# first syntax error. This function is only called if
# errorcount == 0.
if errorcount == 0 or self.errorok:
errorcount = error_count
self.errorok = 0
errtoken = lookahead
if errtoken.type == '$end':
errtoken = None # End of file!
if self.errorfunc:
global errok,token,restart
errok = self.errok # Set some special functions available in error recovery
token = get_token
restart = self.restart
if errtoken and not hasattr(errtoken,'lexer'):
errtoken.lexer = lexer
tok = self.errorfunc(errtoken)
del errok, token, restart # Delete special functions
if self.errorok:
# User must have done some kind of panic
# mode recovery on their own. The
# returned token is the next lookahead
lookahead = tok
errtoken = None
continue
else:
if errtoken:
if hasattr(errtoken,"lineno"): lineno = lookahead.lineno
else: lineno = 0
if lineno:
sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type))
else:
sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type)
else:
sys.stderr.write("yacc: Parse error in input. EOF\n")
return
else:
errorcount = error_count
# case 1: the statestack only has 1 entry on it. If we're in this state, the
# entire parse has been rolled back and we're completely hosed. The token is
# discarded and we just keep going.
if len(statestack) <= 1 and lookahead.type != '$end':
lookahead = None
errtoken = None
state = 0
# Nuke the pushback stack
del lookaheadstack[:]
continue
# case 2: the statestack has a couple of entries on it, but we're
# at the end of the file. nuke the top entry and generate an error token
# Start nuking entries on the stack
if lookahead.type == '$end':
# Whoa. We're really hosed here. Bail out
return
if lookahead.type != 'error':
sym = symstack[-1]
if sym.type == 'error':
# Hmmm. Error is on top of stack, we'll just nuke input
# symbol and continue
lookahead = None
continue
t = YaccSymbol()
t.type = 'error'
if hasattr(lookahead,"lineno"):
t.lineno = lookahead.lineno
t.value = lookahead
lookaheadstack.append(lookahead)
lookahead = t
else:
symstack.pop()
statestack.pop()
state = statestack[-1] # Potential bug fix
continue
# Call an error function here
raise RuntimeError("yacc: internal parser error!!!\n")
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# parseopt_notrack().
#
# Optimized version of parseopt() with line number tracking removed.
# DO NOT EDIT THIS CODE DIRECTLY. Copy the optimized version and remove
# code in the #--! TRACKING sections
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
def parseopt_notrack(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None):
lookahead = None # Current lookahead symbol
lookaheadstack = [ ] # Stack of lookahead symbols
actions = self.action # Local reference to action table (to avoid lookup on self.)
goto = self.goto # Local reference to goto table (to avoid lookup on self.)
prod = self.productions # Local reference to production list (to avoid lookup on self.)
pslice = YaccProduction(None) # Production object passed to grammar rules
errorcount = 0 # Used during error recovery
# If no lexer was given, we will try to use the lex module
if not lexer:
lex = load_ply_lex()
lexer = lex.lexer
# Set up the lexer and parser objects on pslice
pslice.lexer = lexer
pslice.parser = self
# If input was supplied, pass to lexer
if input is not None:
lexer.input(input)
if tokenfunc is None:
# Tokenize function
get_token = lexer.token
else:
get_token = tokenfunc
# Set up the state and symbol stacks
statestack = [ ] # Stack of parsing states
self.statestack = statestack
symstack = [ ] # Stack of grammar symbols
self.symstack = symstack
pslice.stack = symstack # Put in the production
errtoken = None # Err token
# The start state is assumed to be (0,$end)
statestack.append(0)
sym = YaccSymbol()
sym.type = '$end'
symstack.append(sym)
state = 0
while 1:
# Get the next symbol on the input. If a lookahead symbol
# is already set, we just use that. Otherwise, we'll pull
# the next token off of the lookaheadstack or from the lexer
if not lookahead:
if not lookaheadstack:
lookahead = get_token() # Get the next token
else:
lookahead = lookaheadstack.pop()
if not lookahead:
lookahead = YaccSymbol()
lookahead.type = '$end'
# Check the action table
ltype = lookahead.type
t = actions[state].get(ltype)
if t is not None:
if t > 0:
# shift a symbol on the stack
statestack.append(t)
state = t
symstack.append(lookahead)
lookahead = None
# Decrease error count on successful shift
if errorcount: errorcount -=1
continue
if t < 0:
# reduce a symbol on the stack, emit a production
p = prod[-t]
pname = p.name
plen = p.len
# Get production function
sym = YaccSymbol()
sym.type = pname # Production name
sym.value = None
if plen:
targ = symstack[-plen-1:]
targ[0] = sym
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# below as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
del statestack[-plen:]
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead)
symstack.pop()
statestack.pop()
state = statestack[-1]
sym.type = 'error'
lookahead = sym
errorcount = error_count
self.errorok = 0
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
else:
targ = [ sym ]
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# The code enclosed in this section is duplicated
# above as a performance optimization. Make sure
# changes get made in both locations.
pslice.slice = targ
try:
# Call the grammar rule with our special slice object
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
lookaheadstack.append(lookahead)
symstack.pop()
statestack.pop()
state = statestack[-1]
sym.type = 'error'
lookahead = sym
errorcount = error_count
self.errorok = 0
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
if t == 0:
n = symstack[-1]
return getattr(n,"value",None)
if t == None:
# We have some kind of parsing error here. To handle
# this, we are going to push the current token onto
# the tokenstack and replace it with an 'error' token.
# If there are any synchronization rules, they may
# catch it.
#
# In addition to pushing the error token, we call call
# the user defined p_error() function if this is the
# first syntax error. This function is only called if
# errorcount == 0.
if errorcount == 0 or self.errorok:
errorcount = error_count
self.errorok = 0
errtoken = lookahead
if errtoken.type == '$end':
errtoken = None # End of file!
if self.errorfunc:
global errok,token,restart
errok = self.errok # Set some special functions available in error recovery
token = get_token
restart = self.restart
if errtoken and not hasattr(errtoken,'lexer'):
errtoken.lexer = lexer
tok = self.errorfunc(errtoken)
del errok, token, restart # Delete special functions
if self.errorok:
# User must have done some kind of panic
# mode recovery on their own. The
# returned token is the next lookahead
lookahead = tok
errtoken = None
continue
else:
if errtoken:
if hasattr(errtoken,"lineno"): lineno = lookahead.lineno
else: lineno = 0
if lineno:
sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type))
else:
sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type)
else:
sys.stderr.write("yacc: Parse error in input. EOF\n")
return
else:
errorcount = error_count
# case 1: the statestack only has 1 entry on it. If we're in this state, the
# entire parse has been rolled back and we're completely hosed. The token is
# discarded and we just keep going.
if len(statestack) <= 1 and lookahead.type != '$end':
lookahead = None
errtoken = None
state = 0
# Nuke the pushback stack
del lookaheadstack[:]
continue
# case 2: the statestack has a couple of entries on it, but we're
# at the end of the file. nuke the top entry and generate an error token
# Start nuking entries on the stack
if lookahead.type == '$end':
# Whoa. We're really hosed here. Bail out
return
if lookahead.type != 'error':
sym = symstack[-1]
if sym.type == 'error':
# Hmmm. Error is on top of stack, we'll just nuke input
# symbol and continue
lookahead = None
continue
t = YaccSymbol()
t.type = 'error'
if hasattr(lookahead,"lineno"):
t.lineno = lookahead.lineno
t.value = lookahead
lookaheadstack.append(lookahead)
lookahead = t
else:
symstack.pop()
statestack.pop()
state = statestack[-1] # Potential bug fix
continue
# Call an error function here
raise RuntimeError("yacc: internal parser error!!!\n")
# -----------------------------------------------------------------------------
# === Grammar Representation ===
#
# The following functions, classes, and variables are used to represent and
# manipulate the rules that make up a grammar.
# -----------------------------------------------------------------------------
import re
# regex matching identifiers
_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$')
# -----------------------------------------------------------------------------
# class Production:
#
# This class stores the raw information about a single production or grammar rule.
# A grammar rule refers to a specification such as this:
#
# expr : expr PLUS term
#
# Here are the basic attributes defined on all productions
#
# name - Name of the production. For example 'expr'
# prod - A list of symbols on the right side ['expr','PLUS','term']
# prec - Production precedence level
# number - Production number.
# func - Function that executes on reduce
# file - File where production function is defined
# lineno - Line number where production function is defined
#
# The following attributes are defined or optional.
#
# len - Length of the production (number of symbols on right hand side)
# usyms - Set of unique symbols found in the production
# -----------------------------------------------------------------------------
class Production(object):
reduced = 0
def __init__(self,number,name,prod,precedence=('right',0),func=None,file='',line=0):
self.name = name
self.prod = tuple(prod)
self.number = number
self.func = func
self.callable = None
self.file = file
self.line = line
self.prec = precedence
# Internal settings used during table construction
self.len = len(self.prod) # Length of the production
# Create a list of unique production symbols used in the production
self.usyms = [ ]
for s in self.prod:
if s not in self.usyms:
self.usyms.append(s)
# List of all LR items for the production
self.lr_items = []
self.lr_next = None
# Create a string representation
if self.prod:
self.str = "%s -> %s" % (self.name," ".join(self.prod))
else:
self.str = "%s -> <empty>" % self.name
def __str__(self):
return self.str
def __repr__(self):
return "Production("+str(self)+")"
def __len__(self):
return len(self.prod)
def __nonzero__(self):
return 1
def __getitem__(self,index):
return self.prod[index]
# Return the nth lr_item from the production (or None if at the end)
def lr_item(self,n):
if n > len(self.prod): return None
p = LRItem(self,n)
# Precompute the list of productions immediately following. Hack. Remove later
try:
p.lr_after = Prodnames[p.prod[n+1]]
except (IndexError,KeyError):
p.lr_after = []
try:
p.lr_before = p.prod[n-1]
except IndexError:
p.lr_before = None
return p
# Bind the production function name to a callable
def bind(self,pdict):
if self.func:
self.callable = pdict[self.func]
# This class serves as a minimal standin for Production objects when
# reading table data from files. It only contains information
# actually used by the LR parsing engine, plus some additional
# debugging information.
class MiniProduction(object):
def __init__(self,str,name,len,func,file,line):
self.name = name
self.len = len
self.func = func
self.callable = None
self.file = file
self.line = line
self.str = str
def __str__(self):
return self.str
def __repr__(self):
return "MiniProduction(%s)" % self.str
# Bind the production function name to a callable
def bind(self,pdict):
if self.func:
self.callable = pdict[self.func]
# -----------------------------------------------------------------------------
# class LRItem
#
# This class represents a specific stage of parsing a production rule. For
# example:
#
# expr : expr . PLUS term
#
# In the above, the "." represents the current location of the parse. Here
# basic attributes:
#
# name - Name of the production. For example 'expr'
# prod - A list of symbols on the right side ['expr','.', 'PLUS','term']
# number - Production number.
#
# lr_next Next LR item. Example, if we are ' expr -> expr . PLUS term'
# then lr_next refers to 'expr -> expr PLUS . term'
# lr_index - LR item index (location of the ".") in the prod list.
# lookaheads - LALR lookahead symbols for this item
# len - Length of the production (number of symbols on right hand side)
# lr_after - List of all productions that immediately follow
# lr_before - Grammar symbol immediately before
# -----------------------------------------------------------------------------
class LRItem(object):
def __init__(self,p,n):
self.name = p.name
self.prod = list(p.prod)
self.number = p.number
self.lr_index = n
self.lookaheads = { }
self.prod.insert(n,".")
self.prod = tuple(self.prod)
self.len = len(self.prod)
self.usyms = p.usyms
def __str__(self):
if self.prod:
s = "%s -> %s" % (self.name," ".join(self.prod))
else:
s = "%s -> <empty>" % self.name
return s
def __repr__(self):
return "LRItem("+str(self)+")"
# -----------------------------------------------------------------------------
# rightmost_terminal()
#
# Return the rightmost terminal from a list of symbols. Used in add_production()
# -----------------------------------------------------------------------------
def rightmost_terminal(symbols, terminals):
i = len(symbols) - 1
while i >= 0:
if symbols[i] in terminals:
return symbols[i]
i -= 1
return None
# -----------------------------------------------------------------------------
# === GRAMMAR CLASS ===
#
# The following class represents the contents of the specified grammar along
# with various computed properties such as first sets, follow sets, LR items, etc.
# This data is used for critical parts of the table generation process later.
# -----------------------------------------------------------------------------
class GrammarError(YaccError): pass
class Grammar(object):
def __init__(self,terminals):
self.Productions = [None] # A list of all of the productions. The first
# entry is always reserved for the purpose of
# building an augmented grammar
self.Prodnames = { } # A dictionary mapping the names of nonterminals to a list of all
# productions of that nonterminal.
self.Prodmap = { } # A dictionary that is only used to detect duplicate
# productions.
self.Terminals = { } # A dictionary mapping the names of terminal symbols to a
# list of the rules where they are used.
for term in terminals:
self.Terminals[term] = []
self.Terminals['error'] = []
self.Nonterminals = { } # A dictionary mapping names of nonterminals to a list
# of rule numbers where they are used.
self.First = { } # A dictionary of precomputed FIRST(x) symbols
self.Follow = { } # A dictionary of precomputed FOLLOW(x) symbols
self.Precedence = { } # Precedence rules for each terminal. Contains tuples of the
# form ('right',level) or ('nonassoc', level) or ('left',level)
self.UsedPrecedence = { } # Precedence rules that were actually used by the grammer.
# This is only used to provide error checking and to generate
# a warning about unused precedence rules.
self.Start = None # Starting symbol for the grammar
def __len__(self):
return len(self.Productions)
def __getitem__(self,index):
return self.Productions[index]
# -----------------------------------------------------------------------------
# set_precedence()
#
# Sets the precedence for a given terminal. assoc is the associativity such as
# 'left','right', or 'nonassoc'. level is a numeric level.
#
# -----------------------------------------------------------------------------
def set_precedence(self,term,assoc,level):
assert self.Productions == [None],"Must call set_precedence() before add_production()"
if term in self.Precedence:
raise GrammarError("Precedence already specified for terminal '%s'" % term)
if assoc not in ['left','right','nonassoc']:
raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'")
self.Precedence[term] = (assoc,level)
# -----------------------------------------------------------------------------
# add_production()
#
# Given an action function, this function assembles a production rule and
# computes its precedence level.
#
# The production rule is supplied as a list of symbols. For example,
# a rule such as 'expr : expr PLUS term' has a production name of 'expr' and
# symbols ['expr','PLUS','term'].
#
# Precedence is determined by the precedence of the right-most non-terminal
# or the precedence of a terminal specified by %prec.
#
# A variety of error checks are performed to make sure production symbols
# are valid and that %prec is used correctly.
# -----------------------------------------------------------------------------
def add_production(self,prodname,syms,func=None,file='',line=0):
if prodname in self.Terminals:
raise GrammarError("%s:%d: Illegal rule name '%s'. Already defined as a token" % (file,line,prodname))
if prodname == 'error':
raise GrammarError("%s:%d: Illegal rule name '%s'. error is a reserved word" % (file,line,prodname))
if not _is_identifier.match(prodname):
raise GrammarError("%s:%d: Illegal rule name '%s'" % (file,line,prodname))
# Look for literal tokens
for n,s in enumerate(syms):
if s[0] in "'\"":
try:
c = eval(s)
if (len(c) > 1):
raise GrammarError("%s:%d: Literal token %s in rule '%s' may only be a single character" % (file,line,s, prodname))
if not c in self.Terminals:
self.Terminals[c] = []
syms[n] = c
continue
except SyntaxError:
pass
if not _is_identifier.match(s) and s != '%prec':
raise GrammarError("%s:%d: Illegal name '%s' in rule '%s'" % (file,line,s, prodname))
# Determine the precedence level
if '%prec' in syms:
if syms[-1] == '%prec':
raise GrammarError("%s:%d: Syntax error. Nothing follows %%prec" % (file,line))
if syms[-2] != '%prec':
raise GrammarError("%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule" % (file,line))
precname = syms[-1]
prodprec = self.Precedence.get(precname,None)
if not prodprec:
raise GrammarError("%s:%d: Nothing known about the precedence of '%s'" % (file,line,precname))
else:
self.UsedPrecedence[precname] = 1
del syms[-2:] # Drop %prec from the rule
else:
# If no %prec, precedence is determined by the rightmost terminal symbol
precname = rightmost_terminal(syms,self.Terminals)
prodprec = self.Precedence.get(precname,('right',0))
# See if the rule is already in the rulemap
map = "%s -> %s" % (prodname,syms)
if map in self.Prodmap:
m = self.Prodmap[map]
raise GrammarError("%s:%d: Duplicate rule %s. " % (file,line, m) +
"Previous definition at %s:%d" % (m.file, m.line))
# From this point on, everything is valid. Create a new Production instance
pnumber = len(self.Productions)
if not prodname in self.Nonterminals:
self.Nonterminals[prodname] = [ ]
# Add the production number to Terminals and Nonterminals
for t in syms:
if t in self.Terminals:
self.Terminals[t].append(pnumber)
else:
if not t in self.Nonterminals:
self.Nonterminals[t] = [ ]
self.Nonterminals[t].append(pnumber)
# Create a production and add it to the list of productions
p = Production(pnumber,prodname,syms,prodprec,func,file,line)
self.Productions.append(p)
self.Prodmap[map] = p
# Add to the global productions list
try:
self.Prodnames[prodname].append(p)
except KeyError:
self.Prodnames[prodname] = [ p ]
return 0
# -----------------------------------------------------------------------------
# set_start()
#
# Sets the starting symbol and creates the augmented grammar. Production
# rule 0 is S' -> start where start is the start symbol.
# -----------------------------------------------------------------------------
def set_start(self,start=None):
if not start:
start = self.Productions[1].name
if start not in self.Nonterminals:
raise GrammarError("start symbol %s undefined" % start)
self.Productions[0] = Production(0,"S'",[start])
self.Nonterminals[start].append(0)
self.Start = start
# -----------------------------------------------------------------------------
# find_unreachable()
#
# Find all of the nonterminal symbols that can't be reached from the starting
# symbol. Returns a list of nonterminals that can't be reached.
# -----------------------------------------------------------------------------
def find_unreachable(self):
# Mark all symbols that are reachable from a symbol s
def mark_reachable_from(s):
if reachable[s]:
# We've already reached symbol s.
return
reachable[s] = 1
for p in self.Prodnames.get(s,[]):
for r in p.prod:
mark_reachable_from(r)
reachable = { }
for s in list(self.Terminals) + list(self.Nonterminals):
reachable[s] = 0
mark_reachable_from( self.Productions[0].prod[0] )
return [s for s in list(self.Nonterminals)
if not reachable[s]]
# -----------------------------------------------------------------------------
# infinite_cycles()
#
# This function looks at the various parsing rules and tries to detect
# infinite recursion cycles (grammar rules where there is no possible way
# to derive a string of only terminals).
# -----------------------------------------------------------------------------
def infinite_cycles(self):
terminates = {}
# Terminals:
for t in self.Terminals:
terminates[t] = 1
terminates['$end'] = 1
# Nonterminals:
# Initialize to false:
for n in self.Nonterminals:
terminates[n] = 0
# Then propagate termination until no change:
while 1:
some_change = 0
for (n,pl) in self.Prodnames.items():
# Nonterminal n terminates iff any of its productions terminates.
for p in pl:
# Production p terminates iff all of its rhs symbols terminate.
for s in p.prod:
if not terminates[s]:
# The symbol s does not terminate,
# so production p does not terminate.
p_terminates = 0
break
else:
# didn't break from the loop,
# so every symbol s terminates
# so production p terminates.
p_terminates = 1
if p_terminates:
# symbol n terminates!
if not terminates[n]:
terminates[n] = 1
some_change = 1
# Don't need to consider any more productions for this n.
break
if not some_change:
break
infinite = []
for (s,term) in terminates.items():
if not term:
if not s in self.Prodnames and not s in self.Terminals and s != 'error':
# s is used-but-not-defined, and we've already warned of that,
# so it would be overkill to say that it's also non-terminating.
pass
else:
infinite.append(s)
return infinite
# -----------------------------------------------------------------------------
# undefined_symbols()
#
# Find all symbols that were used the grammar, but not defined as tokens or
# grammar rules. Returns a list of tuples (sym, prod) where sym in the symbol
# and prod is the production where the symbol was used.
# -----------------------------------------------------------------------------
def undefined_symbols(self):
result = []
for p in self.Productions:
if not p: continue
for s in p.prod:
if not s in self.Prodnames and not s in self.Terminals and s != 'error':
result.append((s,p))
return result
# -----------------------------------------------------------------------------
# unused_terminals()
#
# Find all terminals that were defined, but not used by the grammar. Returns
# a list of all symbols.
# -----------------------------------------------------------------------------
def unused_terminals(self):
unused_tok = []
for s,v in self.Terminals.items():
if s != 'error' and not v:
unused_tok.append(s)
return unused_tok
# ------------------------------------------------------------------------------
# unused_rules()
#
# Find all grammar rules that were defined, but not used (maybe not reachable)
# Returns a list of productions.
# ------------------------------------------------------------------------------
def unused_rules(self):
unused_prod = []
for s,v in self.Nonterminals.items():
if not v:
p = self.Prodnames[s][0]
unused_prod.append(p)
return unused_prod
# -----------------------------------------------------------------------------
# unused_precedence()
#
# Returns a list of tuples (term,precedence) corresponding to precedence
# rules that were never used by the grammar. term is the name of the terminal
# on which precedence was applied and precedence is a string such as 'left' or
# 'right' corresponding to the type of precedence.
# -----------------------------------------------------------------------------
def unused_precedence(self):
unused = []
for termname in self.Precedence:
if not (termname in self.Terminals or termname in self.UsedPrecedence):
unused.append((termname,self.Precedence[termname][0]))
return unused
# -------------------------------------------------------------------------
# _first()
#
# Compute the value of FIRST1(beta) where beta is a tuple of symbols.
#
# During execution of compute_first1, the result may be incomplete.
# Afterward (e.g., when called from compute_follow()), it will be complete.
# -------------------------------------------------------------------------
def _first(self,beta):
# We are computing First(x1,x2,x3,...,xn)
result = [ ]
for x in beta:
x_produces_empty = 0
# Add all the non-<empty> symbols of First[x] to the result.
for f in self.First[x]:
if f == '<empty>':
x_produces_empty = 1
else:
if f not in result: result.append(f)
if x_produces_empty:
# We have to consider the next x in beta,
# i.e. stay in the loop.
pass
else:
# We don't have to consider any further symbols in beta.
break
else:
# There was no 'break' from the loop,
# so x_produces_empty was true for all x in beta,
# so beta produces empty as well.
result.append('<empty>')
return result
# -------------------------------------------------------------------------
# compute_first()
#
# Compute the value of FIRST1(X) for all symbols
# -------------------------------------------------------------------------
def compute_first(self):
if self.First:
return self.First
# Terminals:
for t in self.Terminals:
self.First[t] = [t]
self.First['$end'] = ['$end']
# Nonterminals:
# Initialize to the empty set:
for n in self.Nonterminals:
self.First[n] = []
# Then propagate symbols until no change:
while 1:
some_change = 0
for n in self.Nonterminals:
for p in self.Prodnames[n]:
for f in self._first(p.prod):
if f not in self.First[n]:
self.First[n].append( f )
some_change = 1
if not some_change:
break
return self.First
# ---------------------------------------------------------------------
# compute_follow()
#
# Computes all of the follow sets for every non-terminal symbol. The
# follow set is the set of all symbols that might follow a given
# non-terminal. See the Dragon book, 2nd Ed. p. 189.
# ---------------------------------------------------------------------
def compute_follow(self,start=None):
# If already computed, return the result
if self.Follow:
return self.Follow
# If first sets not computed yet, do that first.
if not self.First:
self.compute_first()
# Add '$end' to the follow list of the start symbol
for k in self.Nonterminals:
self.Follow[k] = [ ]
if not start:
start = self.Productions[1].name
self.Follow[start] = [ '$end' ]
while 1:
didadd = 0
for p in self.Productions[1:]:
# Here is the production set
for i in range(len(p.prod)):
B = p.prod[i]
if B in self.Nonterminals:
# Okay. We got a non-terminal in a production
fst = self._first(p.prod[i+1:])
hasempty = 0
for f in fst:
if f != '<empty>' and f not in self.Follow[B]:
self.Follow[B].append(f)
didadd = 1
if f == '<empty>':
hasempty = 1
if hasempty or i == (len(p.prod)-1):
# Add elements of follow(a) to follow(b)
for f in self.Follow[p.name]:
if f not in self.Follow[B]:
self.Follow[B].append(f)
didadd = 1
if not didadd: break
return self.Follow
# -----------------------------------------------------------------------------
# build_lritems()
#
# This function walks the list of productions and builds a complete set of the
# LR items. The LR items are stored in two ways: First, they are uniquely
# numbered and placed in the list _lritems. Second, a linked list of LR items
# is built for each production. For example:
#
# E -> E PLUS E
#
# Creates the list
#
# [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ]
# -----------------------------------------------------------------------------
def build_lritems(self):
for p in self.Productions:
lastlri = p
i = 0
lr_items = []
while 1:
if i > len(p):
lri = None
else:
lri = LRItem(p,i)
# Precompute the list of productions immediately following
try:
lri.lr_after = self.Prodnames[lri.prod[i+1]]
except (IndexError,KeyError):
lri.lr_after = []
try:
lri.lr_before = lri.prod[i-1]
except IndexError:
lri.lr_before = None
lastlri.lr_next = lri
if not lri: break
lr_items.append(lri)
lastlri = lri
i += 1
p.lr_items = lr_items
# -----------------------------------------------------------------------------
# == Class LRTable ==
#
# This basic class represents a basic table of LR parsing information.
# Methods for generating the tables are not defined here. They are defined
# in the derived class LRGeneratedTable.
# -----------------------------------------------------------------------------
class VersionError(YaccError): pass
class LRTable(object):
def __init__(self):
self.lr_action = None
self.lr_goto = None
self.lr_productions = None
self.lr_method = None
def read_table(self,module):
if isinstance(module,types.ModuleType):
parsetab = module
else:
if sys.version_info[0] < 3:
exec("import %s as parsetab" % module)
else:
env = { }
exec("import %s as parsetab" % module, env, env)
parsetab = env['parsetab']
if parsetab._tabversion != __tabversion__:
raise VersionError("yacc table file version is out of date")
self.lr_action = parsetab._lr_action
self.lr_goto = parsetab._lr_goto
self.lr_productions = []
for p in parsetab._lr_productions:
self.lr_productions.append(MiniProduction(*p))
self.lr_method = parsetab._lr_method
return parsetab._lr_signature
def read_pickle(self,filename):
try:
import cPickle as pickle
except ImportError:
import pickle
in_f = open(filename,"rb")
tabversion = pickle.load(in_f)
if tabversion != __tabversion__:
raise VersionError("yacc table file version is out of date")
self.lr_method = pickle.load(in_f)
signature = pickle.load(in_f)
self.lr_action = pickle.load(in_f)
self.lr_goto = pickle.load(in_f)
productions = pickle.load(in_f)
self.lr_productions = []
for p in productions:
self.lr_productions.append(MiniProduction(*p))
in_f.close()
return signature
# Bind all production function names to callable objects in pdict
def bind_callables(self,pdict):
for p in self.lr_productions:
p.bind(pdict)
# -----------------------------------------------------------------------------
# === LR Generator ===
#
# The following classes and functions are used to generate LR parsing tables on
# a grammar.
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# digraph()
# traverse()
#
# The following two functions are used to compute set valued functions
# of the form:
#
# F(x) = F'(x) U U{F(y) | x R y}
#
# This is used to compute the values of Read() sets as well as FOLLOW sets
# in LALR(1) generation.
#
# Inputs: X - An input set
# R - A relation
# FP - Set-valued function
# ------------------------------------------------------------------------------
def digraph(X,R,FP):
N = { }
for x in X:
N[x] = 0
stack = []
F = { }
for x in X:
if N[x] == 0: traverse(x,N,stack,F,X,R,FP)
return F
def traverse(x,N,stack,F,X,R,FP):
stack.append(x)
d = len(stack)
N[x] = d
F[x] = FP(x) # F(X) <- F'(x)
rel = R(x) # Get y's related to x
for y in rel:
if N[y] == 0:
traverse(y,N,stack,F,X,R,FP)
N[x] = min(N[x],N[y])
for a in F.get(y,[]):
if a not in F[x]: F[x].append(a)
if N[x] == d:
N[stack[-1]] = MAXINT
F[stack[-1]] = F[x]
element = stack.pop()
while element != x:
N[stack[-1]] = MAXINT
F[stack[-1]] = F[x]
element = stack.pop()
class LALRError(YaccError): pass
# -----------------------------------------------------------------------------
# == LRGeneratedTable ==
#
# This class implements the LR table generation algorithm. There are no
# public methods except for write()
# -----------------------------------------------------------------------------
class LRGeneratedTable(LRTable):
def __init__(self,grammar,method='LALR',log=None):
if method not in ['SLR','LALR']:
raise LALRError("Unsupported method %s" % method)
self.grammar = grammar
self.lr_method = method
# Set up the logger
if not log:
log = NullLogger()
self.log = log
# Internal attributes
self.lr_action = {} # Action table
self.lr_goto = {} # Goto table
self.lr_productions = grammar.Productions # Copy of grammar Production array
self.lr_goto_cache = {} # Cache of computed gotos
self.lr0_cidhash = {} # Cache of closures
self._add_count = 0 # Internal counter used to detect cycles
# Diagonistic information filled in by the table generator
self.sr_conflict = 0
self.rr_conflict = 0
self.conflicts = [] # List of conflicts
self.sr_conflicts = []
self.rr_conflicts = []
# Build the tables
self.grammar.build_lritems()
self.grammar.compute_first()
self.grammar.compute_follow()
self.lr_parse_table()
# Compute the LR(0) closure operation on I, where I is a set of LR(0) items.
def lr0_closure(self,I):
self._add_count += 1
# Add everything in I to J
J = I[:]
didadd = 1
while didadd:
didadd = 0
for j in J:
for x in j.lr_after:
if getattr(x,"lr0_added",0) == self._add_count: continue
# Add B --> .G to J
J.append(x.lr_next)
x.lr0_added = self._add_count
didadd = 1
return J
# Compute the LR(0) goto function goto(I,X) where I is a set
# of LR(0) items and X is a grammar symbol. This function is written
# in a way that guarantees uniqueness of the generated goto sets
# (i.e. the same goto set will never be returned as two different Python
# objects). With uniqueness, we can later do fast set comparisons using
# id(obj) instead of element-wise comparison.
def lr0_goto(self,I,x):
# First we look for a previously cached entry
g = self.lr_goto_cache.get((id(I),x),None)
if g: return g
# Now we generate the goto set in a way that guarantees uniqueness
# of the result
s = self.lr_goto_cache.get(x,None)
if not s:
s = { }
self.lr_goto_cache[x] = s
gs = [ ]
for p in I:
n = p.lr_next
if n and n.lr_before == x:
s1 = s.get(id(n),None)
if not s1:
s1 = { }
s[id(n)] = s1
gs.append(n)
s = s1
g = s.get('$end',None)
if not g:
if gs:
g = self.lr0_closure(gs)
s['$end'] = g
else:
s['$end'] = gs
self.lr_goto_cache[(id(I),x)] = g
return g
# Compute the LR(0) sets of item function
def lr0_items(self):
C = [ self.lr0_closure([self.grammar.Productions[0].lr_next]) ]
i = 0
for I in C:
self.lr0_cidhash[id(I)] = i
i += 1
# Loop over the items in C and each grammar symbols
i = 0
while i < len(C):
I = C[i]
i += 1
# Collect all of the symbols that could possibly be in the goto(I,X) sets
asyms = { }
for ii in I:
for s in ii.usyms:
asyms[s] = None
for x in asyms:
g = self.lr0_goto(I,x)
if not g: continue
if id(g) in self.lr0_cidhash: continue
self.lr0_cidhash[id(g)] = len(C)
C.append(g)
return C
# -----------------------------------------------------------------------------
# ==== LALR(1) Parsing ====
#
# LALR(1) parsing is almost exactly the same as SLR except that instead of
# relying upon Follow() sets when performing reductions, a more selective
# lookahead set that incorporates the state of the LR(0) machine is utilized.
# Thus, we mainly just have to focus on calculating the lookahead sets.
#
# The method used here is due to DeRemer and Pennelo (1982).
#
# DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1)
# Lookahead Sets", ACM Transactions on Programming Languages and Systems,
# Vol. 4, No. 4, Oct. 1982, pp. 615-649
#
# Further details can also be found in:
#
# J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing",
# McGraw-Hill Book Company, (1985).
#
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# compute_nullable_nonterminals()
#
# Creates a dictionary containing all of the non-terminals that might produce
# an empty production.
# -----------------------------------------------------------------------------
def compute_nullable_nonterminals(self):
nullable = {}
num_nullable = 0
while 1:
for p in self.grammar.Productions[1:]:
if p.len == 0:
nullable[p.name] = 1
continue
for t in p.prod:
if not t in nullable: break
else:
nullable[p.name] = 1
if len(nullable) == num_nullable: break
num_nullable = len(nullable)
return nullable
# -----------------------------------------------------------------------------
# find_nonterminal_trans(C)
#
# Given a set of LR(0) items, this functions finds all of the non-terminal
# transitions. These are transitions in which a dot appears immediately before
# a non-terminal. Returns a list of tuples of the form (state,N) where state
# is the state number and N is the nonterminal symbol.
#
# The input C is the set of LR(0) items.
# -----------------------------------------------------------------------------
def find_nonterminal_transitions(self,C):
trans = []
for state in range(len(C)):
for p in C[state]:
if p.lr_index < p.len - 1:
t = (state,p.prod[p.lr_index+1])
if t[1] in self.grammar.Nonterminals:
if t not in trans: trans.append(t)
state = state + 1
return trans
# -----------------------------------------------------------------------------
# dr_relation()
#
# Computes the DR(p,A) relationships for non-terminal transitions. The input
# is a tuple (state,N) where state is a number and N is a nonterminal symbol.
#
# Returns a list of terminals.
# -----------------------------------------------------------------------------
def dr_relation(self,C,trans,nullable):
dr_set = { }
state,N = trans
terms = []
g = self.lr0_goto(C[state],N)
for p in g:
if p.lr_index < p.len - 1:
a = p.prod[p.lr_index+1]
if a in self.grammar.Terminals:
if a not in terms: terms.append(a)
# This extra bit is to handle the start state
if state == 0 and N == self.grammar.Productions[0].prod[0]:
terms.append('$end')
return terms
# -----------------------------------------------------------------------------
# reads_relation()
#
# Computes the READS() relation (p,A) READS (t,C).
# -----------------------------------------------------------------------------
def reads_relation(self,C, trans, empty):
# Look for empty transitions
rel = []
state, N = trans
g = self.lr0_goto(C[state],N)
j = self.lr0_cidhash.get(id(g),-1)
for p in g:
if p.lr_index < p.len - 1:
a = p.prod[p.lr_index + 1]
if a in empty:
rel.append((j,a))
return rel
# -----------------------------------------------------------------------------
# compute_lookback_includes()
#
# Determines the lookback and includes relations
#
# LOOKBACK:
#
# This relation is determined by running the LR(0) state machine forward.
# For example, starting with a production "N : . A B C", we run it forward
# to obtain "N : A B C ." We then build a relationship between this final
# state and the starting state. These relationships are stored in a dictionary
# lookdict.
#
# INCLUDES:
#
# Computes the INCLUDE() relation (p,A) INCLUDES (p',B).
#
# This relation is used to determine non-terminal transitions that occur
# inside of other non-terminal transition states. (p,A) INCLUDES (p', B)
# if the following holds:
#
# B -> LAT, where T -> epsilon and p' -L-> p
#
# L is essentially a prefix (which may be empty), T is a suffix that must be
# able to derive an empty string. State p' must lead to state p with the string L.
#
# -----------------------------------------------------------------------------
def compute_lookback_includes(self,C,trans,nullable):
lookdict = {} # Dictionary of lookback relations
includedict = {} # Dictionary of include relations
# Make a dictionary of non-terminal transitions
dtrans = {}
for t in trans:
dtrans[t] = 1
# Loop over all transitions and compute lookbacks and includes
for state,N in trans:
lookb = []
includes = []
for p in C[state]:
if p.name != N: continue
# Okay, we have a name match. We now follow the production all the way
# through the state machine until we get the . on the right hand side
lr_index = p.lr_index
j = state
while lr_index < p.len - 1:
lr_index = lr_index + 1
t = p.prod[lr_index]
# Check to see if this symbol and state are a non-terminal transition
if (j,t) in dtrans:
# Yes. Okay, there is some chance that this is an includes relation
# the only way to know for certain is whether the rest of the
# production derives empty
li = lr_index + 1
while li < p.len:
if p.prod[li] in self.grammar.Terminals: break # No forget it
if not p.prod[li] in nullable: break
li = li + 1
else:
# Appears to be a relation between (j,t) and (state,N)
includes.append((j,t))
g = self.lr0_goto(C[j],t) # Go to next set
j = self.lr0_cidhash.get(id(g),-1) # Go to next state
# When we get here, j is the final state, now we have to locate the production
for r in C[j]:
if r.name != p.name: continue
if r.len != p.len: continue
i = 0
# This look is comparing a production ". A B C" with "A B C ."
while i < r.lr_index:
if r.prod[i] != p.prod[i+1]: break
i = i + 1
else:
lookb.append((j,r))
for i in includes:
if not i in includedict: includedict[i] = []
includedict[i].append((state,N))
lookdict[(state,N)] = lookb
return lookdict,includedict
# -----------------------------------------------------------------------------
# compute_read_sets()
#
# Given a set of LR(0) items, this function computes the read sets.
#
# Inputs: C = Set of LR(0) items
# ntrans = Set of nonterminal transitions
# nullable = Set of empty transitions
#
# Returns a set containing the read sets
# -----------------------------------------------------------------------------
def compute_read_sets(self,C, ntrans, nullable):
FP = lambda x: self.dr_relation(C,x,nullable)
R = lambda x: self.reads_relation(C,x,nullable)
F = digraph(ntrans,R,FP)
return F
# -----------------------------------------------------------------------------
# compute_follow_sets()
#
# Given a set of LR(0) items, a set of non-terminal transitions, a readset,
# and an include set, this function computes the follow sets
#
# Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)}
#
# Inputs:
# ntrans = Set of nonterminal transitions
# readsets = Readset (previously computed)
# inclsets = Include sets (previously computed)
#
# Returns a set containing the follow sets
# -----------------------------------------------------------------------------
def compute_follow_sets(self,ntrans,readsets,inclsets):
FP = lambda x: readsets[x]
R = lambda x: inclsets.get(x,[])
F = digraph(ntrans,R,FP)
return F
# -----------------------------------------------------------------------------
# add_lookaheads()
#
# Attaches the lookahead symbols to grammar rules.
#
# Inputs: lookbacks - Set of lookback relations
# followset - Computed follow set
#
# This function directly attaches the lookaheads to productions contained
# in the lookbacks set
# -----------------------------------------------------------------------------
def add_lookaheads(self,lookbacks,followset):
for trans,lb in lookbacks.items():
# Loop over productions in lookback
for state,p in lb:
if not state in p.lookaheads:
p.lookaheads[state] = []
f = followset.get(trans,[])
for a in f:
if a not in p.lookaheads[state]: p.lookaheads[state].append(a)
# -----------------------------------------------------------------------------
# add_lalr_lookaheads()
#
# This function does all of the work of adding lookahead information for use
# with LALR parsing
# -----------------------------------------------------------------------------
def add_lalr_lookaheads(self,C):
# Determine all of the nullable nonterminals
nullable = self.compute_nullable_nonterminals()
# Find all non-terminal transitions
trans = self.find_nonterminal_transitions(C)
# Compute read sets
readsets = self.compute_read_sets(C,trans,nullable)
# Compute lookback/includes relations
lookd, included = self.compute_lookback_includes(C,trans,nullable)
# Compute LALR FOLLOW sets
followsets = self.compute_follow_sets(trans,readsets,included)
# Add all of the lookaheads
self.add_lookaheads(lookd,followsets)
# -----------------------------------------------------------------------------
# lr_parse_table()
#
# This function constructs the parse tables for SLR or LALR
# -----------------------------------------------------------------------------
def lr_parse_table(self):
Productions = self.grammar.Productions
Precedence = self.grammar.Precedence
goto = self.lr_goto # Goto array
action = self.lr_action # Action array
log = self.log # Logger for output
actionp = { } # Action production array (temporary)
log.info("Parsing method: %s", self.lr_method)
# Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items
# This determines the number of states
C = self.lr0_items()
if self.lr_method == 'LALR':
self.add_lalr_lookaheads(C)
# Build the parser table, state by state
st = 0
for I in C:
# Loop over each production in I
actlist = [ ] # List of actions
st_action = { }
st_actionp = { }
st_goto = { }
log.info("")
log.info("state %d", st)
log.info("")
for p in I:
log.info(" (%d) %s", p.number, str(p))
log.info("")
for p in I:
if p.len == p.lr_index + 1:
if p.name == "S'":
# Start symbol. Accept!
st_action["$end"] = 0
st_actionp["$end"] = p
else:
# We are at the end of a production. Reduce!
if self.lr_method == 'LALR':
laheads = p.lookaheads[st]
else:
laheads = self.grammar.Follow[p.name]
for a in laheads:
actlist.append((a,p,"reduce using rule %d (%s)" % (p.number,p)))
r = st_action.get(a,None)
if r is not None:
# Whoa. Have a shift/reduce or reduce/reduce conflict
if r > 0:
# Need to decide on shift or reduce here
# By default we favor shifting. Need to add
# some precedence rules here.
sprec,slevel = Productions[st_actionp[a].number].prec
rprec,rlevel = Precedence.get(a,('right',0))
if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):
# We really need to reduce here.
st_action[a] = -p.number
st_actionp[a] = p
if not slevel and not rlevel:
log.info(" ! shift/reduce conflict for %s resolved as reduce",a)
self.sr_conflicts.append((st,a,'reduce'))
Productions[p.number].reduced += 1
elif (slevel == rlevel) and (rprec == 'nonassoc'):
st_action[a] = None
else:
# Hmmm. Guess we'll keep the shift
if not rlevel:
log.info(" ! shift/reduce conflict for %s resolved as shift",a)
self.sr_conflicts.append((st,a,'shift'))
elif r < 0:
# Reduce/reduce conflict. In this case, we favor the rule
# that was defined first in the grammar file
oldp = Productions[-r]
pp = Productions[p.number]
if oldp.line > pp.line:
st_action[a] = -p.number
st_actionp[a] = p
chosenp,rejectp = pp,oldp
Productions[p.number].reduced += 1
Productions[oldp.number].reduced -= 1
else:
chosenp,rejectp = oldp,pp
self.rr_conflicts.append((st,chosenp,rejectp))
log.info(" ! reduce/reduce conflict for %s resolved using rule %d (%s)", a,st_actionp[a].number, st_actionp[a])
else:
raise LALRError("Unknown conflict in state %d" % st)
else:
st_action[a] = -p.number
st_actionp[a] = p
Productions[p.number].reduced += 1
else:
i = p.lr_index
a = p.prod[i+1] # Get symbol right after the "."
if a in self.grammar.Terminals:
g = self.lr0_goto(I,a)
j = self.lr0_cidhash.get(id(g),-1)
if j >= 0:
# We are in a shift state
actlist.append((a,p,"shift and go to state %d" % j))
r = st_action.get(a,None)
if r is not None:
# Whoa have a shift/reduce or shift/shift conflict
if r > 0:
if r != j:
raise LALRError("Shift/shift conflict in state %d" % st)
elif r < 0:
# Do a precedence check.
# - if precedence of reduce rule is higher, we reduce.
# - if precedence of reduce is same and left assoc, we reduce.
# - otherwise we shift
rprec,rlevel = Productions[st_actionp[a].number].prec
sprec,slevel = Precedence.get(a,('right',0))
if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')):
# We decide to shift here... highest precedence to shift
Productions[st_actionp[a].number].reduced -= 1
st_action[a] = j
st_actionp[a] = p
if not rlevel:
log.info(" ! shift/reduce conflict for %s resolved as shift",a)
self.sr_conflicts.append((st,a,'shift'))
elif (slevel == rlevel) and (rprec == 'nonassoc'):
st_action[a] = None
else:
# Hmmm. Guess we'll keep the reduce
if not slevel and not rlevel:
log.info(" ! shift/reduce conflict for %s resolved as reduce",a)
self.sr_conflicts.append((st,a,'reduce'))
else:
raise LALRError("Unknown conflict in state %d" % st)
else:
st_action[a] = j
st_actionp[a] = p
# Print the actions associated with each terminal
_actprint = { }
for a,p,m in actlist:
if a in st_action:
if p is st_actionp[a]:
log.info(" %-15s %s",a,m)
_actprint[(a,m)] = 1
log.info("")
# Print the actions that were not used. (debugging)
not_used = 0
for a,p,m in actlist:
if a in st_action:
if p is not st_actionp[a]:
if not (a,m) in _actprint:
log.debug(" ! %-15s [ %s ]",a,m)
not_used = 1
_actprint[(a,m)] = 1
if not_used:
log.debug("")
# Construct the goto table for this state
nkeys = { }
for ii in I:
for s in ii.usyms:
if s in self.grammar.Nonterminals:
nkeys[s] = None
for n in nkeys:
g = self.lr0_goto(I,n)
j = self.lr0_cidhash.get(id(g),-1)
if j >= 0:
st_goto[n] = j
log.info(" %-30s shift and go to state %d",n,j)
action[st] = st_action
actionp[st] = st_actionp
goto[st] = st_goto
st += 1
# -----------------------------------------------------------------------------
# write()
#
# This function writes the LR parsing tables to a file
# -----------------------------------------------------------------------------
def write_table(self,modulename,outputdir='',signature=""):
basemodulename = modulename.split(".")[-1]
filename = os.path.join(outputdir,basemodulename) + ".py"
try:
f = open(filename,"w")
f.write("""
# %s
# This file is automatically generated. Do not edit.
_tabversion = %r
_lr_method = %r
_lr_signature = %r
""" % (filename, __tabversion__, self.lr_method, signature))
# Change smaller to 0 to go back to original tables
smaller = 1
# Factor out names to try and make smaller
if smaller:
items = { }
for s,nd in self.lr_action.items():
for name,v in nd.items():
i = items.get(name)
if not i:
i = ([],[])
items[name] = i
i[0].append(s)
i[1].append(v)
f.write("\n_lr_action_items = {")
for k,v in items.items():
f.write("%r:([" % k)
for i in v[0]:
f.write("%r," % i)
f.write("],[")
for i in v[1]:
f.write("%r," % i)
f.write("]),")
f.write("}\n")
f.write("""
_lr_action = { }
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = { }
_lr_action[_x][_k] = _y
del _lr_action_items
""")
else:
f.write("\n_lr_action = { ");
for k,v in self.lr_action.items():
f.write("(%r,%r):%r," % (k[0],k[1],v))
f.write("}\n");
if smaller:
# Factor out names to try and make smaller
items = { }
for s,nd in self.lr_goto.items():
for name,v in nd.items():
i = items.get(name)
if not i:
i = ([],[])
items[name] = i
i[0].append(s)
i[1].append(v)
f.write("\n_lr_goto_items = {")
for k,v in items.items():
f.write("%r:([" % k)
for i in v[0]:
f.write("%r," % i)
f.write("],[")
for i in v[1]:
f.write("%r," % i)
f.write("]),")
f.write("}\n")
f.write("""
_lr_goto = { }
for _k, _v in _lr_goto_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_goto: _lr_goto[_x] = { }
_lr_goto[_x][_k] = _y
del _lr_goto_items
""")
else:
f.write("\n_lr_goto = { ");
for k,v in self.lr_goto.items():
f.write("(%r,%r):%r," % (k[0],k[1],v))
f.write("}\n");
# Write production table
f.write("_lr_productions = [\n")
for p in self.lr_productions:
if p.func:
f.write(" (%r,%r,%d,%r,%r,%d),\n" % (p.str,p.name, p.len, p.func,p.file,p.line))
else:
f.write(" (%r,%r,%d,None,None,None),\n" % (str(p),p.name, p.len))
f.write("]\n")
f.close()
except IOError:
e = sys.exc_info()[1]
sys.stderr.write("Unable to create '%s'\n" % filename)
sys.stderr.write(str(e)+"\n")
return
# -----------------------------------------------------------------------------
# pickle_table()
#
# This function pickles the LR parsing tables to a supplied file object
# -----------------------------------------------------------------------------
def pickle_table(self,filename,signature=""):
try:
import cPickle as pickle
except ImportError:
import pickle
outf = open(filename,"wb")
pickle.dump(__tabversion__,outf,pickle_protocol)
pickle.dump(self.lr_method,outf,pickle_protocol)
pickle.dump(signature,outf,pickle_protocol)
pickle.dump(self.lr_action,outf,pickle_protocol)
pickle.dump(self.lr_goto,outf,pickle_protocol)
outp = []
for p in self.lr_productions:
if p.func:
outp.append((p.str,p.name, p.len, p.func,p.file,p.line))
else:
outp.append((str(p),p.name,p.len,None,None,None))
pickle.dump(outp,outf,pickle_protocol)
outf.close()
# -----------------------------------------------------------------------------
# === INTROSPECTION ===
#
# The following functions and classes are used to implement the PLY
# introspection features followed by the yacc() function itself.
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# get_caller_module_dict()
#
# This function returns a dictionary containing all of the symbols defined within
# a caller further down the call stack. This is used to get the environment
# associated with the yacc() call if none was provided.
# -----------------------------------------------------------------------------
def get_caller_module_dict(levels):
try:
raise RuntimeError
except RuntimeError:
e,b,t = sys.exc_info()
f = t.tb_frame
while levels > 0:
f = f.f_back
levels -= 1
ldict = f.f_globals.copy()
if f.f_globals != f.f_locals:
ldict.update(f.f_locals)
return ldict
# -----------------------------------------------------------------------------
# parse_grammar()
#
# This takes a raw grammar rule string and parses it into production data
# -----------------------------------------------------------------------------
def parse_grammar(doc,file,line):
grammar = []
# Split the doc string into lines
pstrings = doc.splitlines()
lastp = None
dline = line
for ps in pstrings:
dline += 1
p = ps.split()
if not p: continue
try:
if p[0] == '|':
# This is a continuation of a previous rule
if not lastp:
raise SyntaxError("%s:%d: Misplaced '|'" % (file,dline))
prodname = lastp
syms = p[1:]
else:
prodname = p[0]
lastp = prodname
syms = p[2:]
assign = p[1]
if assign != ':' and assign != '::=':
raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file,dline))
grammar.append((file,dline,prodname,syms))
except SyntaxError:
raise
except Exception:
raise SyntaxError("%s:%d: Syntax error in rule '%s'" % (file,dline,ps.strip()))
return grammar
# -----------------------------------------------------------------------------
# ParserReflect()
#
# This class represents information extracted for building a parser including
# start symbol, error function, tokens, precedence list, action functions,
# etc.
# -----------------------------------------------------------------------------
class ParserReflect(object):
def __init__(self,pdict,log=None):
self.pdict = pdict
self.start = None
self.error_func = None
self.tokens = None
self.files = {}
self.grammar = []
self.error = 0
if log is None:
self.log = PlyLogger(sys.stderr)
else:
self.log = log
# Get all of the basic information
def get_all(self):
self.get_start()
self.get_error_func()
self.get_tokens()
self.get_precedence()
self.get_pfunctions()
# Validate all of the information
def validate_all(self):
self.validate_start()
self.validate_error_func()
self.validate_tokens()
self.validate_precedence()
self.validate_pfunctions()
self.validate_files()
return self.error
# Compute a signature over the grammar
def signature(self):
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
sig = md5()
if self.start:
sig.update(self.start.encode('latin-1'))
if self.prec:
sig.update("".join(["".join(p) for p in self.prec]).encode('latin-1'))
if self.tokens:
sig.update(" ".join(self.tokens).encode('latin-1'))
for f in self.pfuncs:
if f[3]:
sig.update(f[3].encode('latin-1'))
except (TypeError,ValueError):
pass
return sig.digest()
# -----------------------------------------------------------------------------
# validate_file()
#
# This method checks to see if there are duplicated p_rulename() functions
# in the parser module file. Without this function, it is really easy for
# users to make mistakes by cutting and pasting code fragments (and it's a real
# bugger to try and figure out why the resulting parser doesn't work). Therefore,
# we just do a little regular expression pattern matching of def statements
# to try and detect duplicates.
# -----------------------------------------------------------------------------
def validate_files(self):
# Match def p_funcname(
fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
for filename in self.files.keys():
base,ext = os.path.splitext(filename)
if ext != '.py': return 1 # No idea. Assume it's okay.
try:
f = open(filename)
lines = f.readlines()
f.close()
except IOError:
continue
counthash = { }
for linen,l in enumerate(lines):
linen += 1
m = fre.match(l)
if m:
name = m.group(1)
prev = counthash.get(name)
if not prev:
counthash[name] = linen
else:
self.log.warning("%s:%d: Function %s redefined. Previously defined on line %d", filename,linen,name,prev)
# Get the start symbol
def get_start(self):
self.start = self.pdict.get('start')
# Validate the start symbol
def validate_start(self):
if self.start is not None:
if not isinstance(self.start,str):
self.log.error("'start' must be a string")
# Look for error handler
def get_error_func(self):
self.error_func = self.pdict.get('p_error')
# Validate the error function
def validate_error_func(self):
if self.error_func:
if isinstance(self.error_func,types.FunctionType):
ismethod = 0
elif isinstance(self.error_func, types.MethodType):
ismethod = 1
else:
self.log.error("'p_error' defined, but is not a function or method")
self.error = 1
return
eline = func_code(self.error_func).co_firstlineno
efile = func_code(self.error_func).co_filename
self.files[efile] = 1
if (func_code(self.error_func).co_argcount != 1+ismethod):
self.log.error("%s:%d: p_error() requires 1 argument",efile,eline)
self.error = 1
# Get the tokens map
def get_tokens(self):
tokens = self.pdict.get("tokens",None)
if not tokens:
self.log.error("No token list is defined")
self.error = 1
return
if not isinstance(tokens,(list, tuple)):
self.log.error("tokens must be a list or tuple")
self.error = 1
return
if not tokens:
self.log.error("tokens is empty")
self.error = 1
return
self.tokens = tokens
# Validate the tokens
def validate_tokens(self):
# Validate the tokens.
if 'error' in self.tokens:
self.log.error("Illegal token name 'error'. Is a reserved word")
self.error = 1
return
terminals = {}
for n in self.tokens:
if n in terminals:
self.log.warning("Token '%s' multiply defined", n)
terminals[n] = 1
# Get the precedence map (if any)
def get_precedence(self):
self.prec = self.pdict.get("precedence",None)
# Validate and parse the precedence map
def validate_precedence(self):
preclist = []
if self.prec:
if not isinstance(self.prec,(list,tuple)):
self.log.error("precedence must be a list or tuple")
self.error = 1
return
for level,p in enumerate(self.prec):
if not isinstance(p,(list,tuple)):
self.log.error("Bad precedence table")
self.error = 1
return
if len(p) < 2:
self.log.error("Malformed precedence entry %s. Must be (assoc, term, ..., term)",p)
self.error = 1
return
assoc = p[0]
if not isinstance(assoc,str):
self.log.error("precedence associativity must be a string")
self.error = 1
return
for term in p[1:]:
if not isinstance(term,str):
self.log.error("precedence items must be strings")
self.error = 1
return
preclist.append((term,assoc,level+1))
self.preclist = preclist
# Get all p_functions from the grammar
def get_pfunctions(self):
p_functions = []
for name, item in self.pdict.items():
if name[:2] != 'p_': continue
if name == 'p_error': continue
if isinstance(item,(types.FunctionType,types.MethodType)):
line = func_code(item).co_firstlineno
file = func_code(item).co_filename
p_functions.append((line,file,name,item.__doc__))
# Sort all of the actions by line number
p_functions.sort()
self.pfuncs = p_functions
# Validate all of the p_functions
def validate_pfunctions(self):
grammar = []
# Check for non-empty symbols
if len(self.pfuncs) == 0:
self.log.error("no rules of the form p_rulename are defined")
self.error = 1
return
for line, file, name, doc in self.pfuncs:
func = self.pdict[name]
if isinstance(func, types.MethodType):
reqargs = 2
else:
reqargs = 1
if func_code(func).co_argcount > reqargs:
self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,func.__name__)
self.error = 1
elif func_code(func).co_argcount < reqargs:
self.log.error("%s:%d: Rule '%s' requires an argument",file,line,func.__name__)
self.error = 1
elif not func.__doc__:
self.log.warning("%s:%d: No documentation string specified in function '%s' (ignored)",file,line,func.__name__)
else:
try:
parsed_g = parse_grammar(doc,file,line)
for g in parsed_g:
grammar.append((name, g))
except SyntaxError:
e = sys.exc_info()[1]
self.log.error(str(e))
self.error = 1
# Looks like a valid grammar rule
# Mark the file in which defined.
self.files[file] = 1
# Secondary validation step that looks for p_ definitions that are not functions
# or functions that look like they might be grammar rules.
for n,v in self.pdict.items():
if n[0:2] == 'p_' and isinstance(v, (types.FunctionType, types.MethodType)): continue
if n[0:2] == 't_': continue
if n[0:2] == 'p_' and n != 'p_error':
self.log.warning("'%s' not defined as a function", n)
if ((isinstance(v,types.FunctionType) and func_code(v).co_argcount == 1) or
(isinstance(v,types.MethodType) and func_code(v).co_argcount == 2)):
try:
doc = v.__doc__.split(" ")
if doc[1] == ':':
self.log.warning("%s:%d: Possible grammar rule '%s' defined without p_ prefix",
func_code(v).co_filename, func_code(v).co_firstlineno,n)
except Exception:
pass
self.grammar = grammar
# -----------------------------------------------------------------------------
# yacc(module)
#
# Build a parser
# -----------------------------------------------------------------------------
def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None,
check_recursion=1, optimize=0, write_tables=1, debugfile=debug_file,outputdir='',
debuglog=None, errorlog = None, picklefile=None):
global parse # Reference to the parsing method of the last built parser
# If pickling is enabled, table files are not created
if picklefile:
write_tables = 0
if errorlog is None:
errorlog = PlyLogger(sys.stderr)
# Get the module dictionary used for the parser
if module:
_items = [(k,getattr(module,k)) for k in dir(module)]
pdict = dict(_items)
else:
pdict = get_caller_module_dict(2)
# Collect parser information from the dictionary
pinfo = ParserReflect(pdict,log=errorlog)
pinfo.get_all()
if pinfo.error:
raise YaccError("Unable to build parser")
# Check signature against table files (if any)
signature = pinfo.signature()
# Read the tables
try:
lr = LRTable()
if picklefile:
read_signature = lr.read_pickle(picklefile)
else:
read_signature = lr.read_table(tabmodule)
if optimize or (read_signature == signature):
try:
lr.bind_callables(pinfo.pdict)
parser = LRParser(lr,pinfo.error_func)
parse = parser.parse
return parser
except Exception:
e = sys.exc_info()[1]
errorlog.warning("There was a problem loading the table file: %s", repr(e))
except VersionError:
e = sys.exc_info()
errorlog.warning(str(e))
except Exception:
pass
if debuglog is None:
if debug:
debuglog = PlyLogger(open(debugfile,"w"))
else:
debuglog = NullLogger()
debuglog.info("Created by PLY version %s (http://www.dabeaz.com/ply)", __version__)
errors = 0
# Validate the parser information
if pinfo.validate_all():
raise YaccError("Unable to build parser")
if not pinfo.error_func:
errorlog.warning("no p_error() function is defined")
# Create a grammar object
grammar = Grammar(pinfo.tokens)
# Set precedence level for terminals
for term, assoc, level in pinfo.preclist:
try:
grammar.set_precedence(term,assoc,level)
except GrammarError:
e = sys.exc_info()[1]
errorlog.warning("%s",str(e))
# Add productions to the grammar
for funcname, gram in pinfo.grammar:
file, line, prodname, syms = gram
try:
grammar.add_production(prodname,syms,funcname,file,line)
except GrammarError:
e = sys.exc_info()[1]
errorlog.error("%s",str(e))
errors = 1
# Set the grammar start symbols
try:
if start is None:
grammar.set_start(pinfo.start)
else:
grammar.set_start(start)
except GrammarError:
e = sys.exc_info()[1]
errorlog.error(str(e))
errors = 1
if errors:
raise YaccError("Unable to build parser")
# Verify the grammar structure
undefined_symbols = grammar.undefined_symbols()
for sym, prod in undefined_symbols:
errorlog.error("%s:%d: Symbol '%s' used, but not defined as a token or a rule",prod.file,prod.line,sym)
errors = 1
unused_terminals = grammar.unused_terminals()
if unused_terminals:
debuglog.info("")
debuglog.info("Unused terminals:")
debuglog.info("")
for term in unused_terminals:
errorlog.warning("Token '%s' defined, but not used", term)
debuglog.info(" %s", term)
# Print out all productions to the debug log
if debug:
debuglog.info("")
debuglog.info("Grammar")
debuglog.info("")
for n,p in enumerate(grammar.Productions):
debuglog.info("Rule %-5d %s", n, p)
# Find unused non-terminals
unused_rules = grammar.unused_rules()
for prod in unused_rules:
errorlog.warning("%s:%d: Rule '%s' defined, but not used", prod.file, prod.line, prod.name)
if len(unused_terminals) == 1:
errorlog.warning("There is 1 unused token")
if len(unused_terminals) > 1:
errorlog.warning("There are %d unused tokens", len(unused_terminals))
if len(unused_rules) == 1:
errorlog.warning("There is 1 unused rule")
if len(unused_rules) > 1:
errorlog.warning("There are %d unused rules", len(unused_rules))
if debug:
debuglog.info("")
debuglog.info("Terminals, with rules where they appear")
debuglog.info("")
terms = list(grammar.Terminals)
terms.sort()
for term in terms:
debuglog.info("%-20s : %s", term, " ".join([str(s) for s in grammar.Terminals[term]]))
debuglog.info("")
debuglog.info("Nonterminals, with rules where they appear")
debuglog.info("")
nonterms = list(grammar.Nonterminals)
nonterms.sort()
for nonterm in nonterms:
debuglog.info("%-20s : %s", nonterm, " ".join([str(s) for s in grammar.Nonterminals[nonterm]]))
debuglog.info("")
if check_recursion:
unreachable = grammar.find_unreachable()
for u in unreachable:
errorlog.warning("Symbol '%s' is unreachable",u)
infinite = grammar.infinite_cycles()
for inf in infinite:
errorlog.error("Infinite recursion detected for symbol '%s'", inf)
errors = 1
unused_prec = grammar.unused_precedence()
for term, assoc in unused_prec:
errorlog.error("Precedence rule '%s' defined for unknown symbol '%s'", assoc, term)
errors = 1
if errors:
raise YaccError("Unable to build parser")
# Run the LRGeneratedTable on the grammar
if debug:
errorlog.debug("Generating %s tables", method)
lr = LRGeneratedTable(grammar,method,debuglog)
if debug:
num_sr = len(lr.sr_conflicts)
# Report shift/reduce and reduce/reduce conflicts
if num_sr == 1:
errorlog.warning("1 shift/reduce conflict")
elif num_sr > 1:
errorlog.warning("%d shift/reduce conflicts", num_sr)
num_rr = len(lr.rr_conflicts)
if num_rr == 1:
errorlog.warning("1 reduce/reduce conflict")
elif num_rr > 1:
errorlog.warning("%d reduce/reduce conflicts", num_rr)
# Write out conflicts to the output file
if debug and (lr.sr_conflicts or lr.rr_conflicts):
debuglog.warning("")
debuglog.warning("Conflicts:")
debuglog.warning("")
for state, tok, resolution in lr.sr_conflicts:
debuglog.warning("shift/reduce conflict for %s in state %d resolved as %s", tok, state, resolution)
already_reported = {}
for state, rule, rejected in lr.rr_conflicts:
if (state,id(rule),id(rejected)) in already_reported:
continue
debuglog.warning("reduce/reduce conflict in state %d resolved using rule (%s)", state, rule)
debuglog.warning("rejected rule (%s) in state %d", rejected,state)
errorlog.warning("reduce/reduce conflict in state %d resolved using rule (%s)", state, rule)
errorlog.warning("rejected rule (%s) in state %d", rejected, state)
already_reported[state,id(rule),id(rejected)] = 1
warned_never = []
for state, rule, rejected in lr.rr_conflicts:
if not rejected.reduced and (rejected not in warned_never):
debuglog.warning("Rule (%s) is never reduced", rejected)
errorlog.warning("Rule (%s) is never reduced", rejected)
warned_never.append(rejected)
# Write the table file if requested
if write_tables:
lr.write_table(tabmodule,outputdir,signature)
# Write a pickled version of the tables
if picklefile:
lr.pickle_table(picklefile,signature)
# Build the parser
lr.bind_callables(pinfo.pdict)
parser = LRParser(lr,pinfo.error_func)
parse = parser.parse
return parser
|
drglove/SickRage
|
refs/heads/master
|
sickbeard/metadata/ps3.py
|
13
|
# Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import os
import generic
from sickbeard import encodingKludge as ek
class PS3Metadata(generic.GenericMetadata):
"""
Metadata generation class for Sony PS3.
The following file structure is used:
show_root/cover.jpg (poster)
show_root/Season ##/filename.ext (*)
show_root/Season ##/filename.ext.cover.jpg (episode thumb)
"""
def __init__(self,
show_metadata=False,
episode_metadata=False,
fanart=False,
poster=False,
banner=False,
episode_thumbnails=False,
season_posters=False,
season_banners=False,
season_all_poster=False,
season_all_banner=False):
generic.GenericMetadata.__init__(self,
show_metadata,
episode_metadata,
fanart,
poster,
banner,
episode_thumbnails,
season_posters,
season_banners,
season_all_poster,
season_all_banner)
self.name = "Sony PS3"
self.poster_name = "cover.jpg"
# web-ui metadata template
self.eg_show_metadata = "<i>not supported</i>"
self.eg_episode_metadata = "<i>not supported</i>"
self.eg_fanart = "<i>not supported</i>"
self.eg_poster = "cover.jpg"
self.eg_banner = "<i>not supported</i>"
self.eg_episode_thumbnails = "Season##\\<i>filename</i>.ext.cover.jpg"
self.eg_season_posters = "<i>not supported</i>"
self.eg_season_banners = "<i>not supported</i>"
self.eg_season_all_poster = "<i>not supported</i>"
self.eg_season_all_banner = "<i>not supported</i>"
# Override with empty methods for unsupported features
def retrieveShowMetadata(self, folder):
# no show metadata generated, we abort this lookup function
return (None, None, None)
def create_show_metadata(self, show_obj, force=False):
pass
def update_show_indexer_metadata(self, show_obj):
pass
def get_show_file_path(self, show_obj):
pass
def create_episode_metadata(self, ep_obj, force=False):
pass
def create_fanart(self, show_obj):
pass
def create_banner(self, show_obj):
pass
def create_season_posters(self, show_obj):
pass
def create_season_banners(self, ep_obj):
pass
def create_season_all_poster(self, show_obj):
pass
def create_season_all_banner(self, show_obj):
pass
def get_episode_thumb_path(self, ep_obj):
"""
Returns the path where the episode thumbnail should be stored. Defaults to
the same path as the episode file but with a .cover.jpg extension.
ep_obj: a TVEpisode instance for which to create the thumbnail
"""
if ek.ek(os.path.isfile, ep_obj.location):
tbn_filename = ep_obj.location + ".cover.jpg"
else:
return None
return tbn_filename
# present a standard "interface" from the module
metadata_class = PS3Metadata
|
lllucius/climacast
|
refs/heads/master
|
aws-lambda-lxml/3.8.0/py27/lxml/usedoctest.py
|
149
|
"""Doctest module for XML comparison.
Usage::
>>> import lxml.usedoctest
>>> # now do your XML doctests ...
See `lxml.doctestcompare`
"""
from lxml import doctestcompare
doctestcompare.temp_install(del_module=__name__)
|
mtrpires/pySpidy
|
refs/heads/master
|
search_bot.py
|
1
|
# -*- coding: utf-8
# @mtrpires - http://github.com/mtrpires
from crawler_functions import changePage
from crawler_functions import createCSV
from crawler_functions import downloadHTML
from crawler_functions import fetchLinks
from crawler_functions import findContent
from crawler_functions import findResults
from crawler_functions import numPages
from crawler_functions import setSearchParams
from crawler_functions import storeInfo
from time import sleep
from random import uniform
# Google base search URL
baseURL = "https://www.google.com/search?"
# Initial params
kind = "Revista"
site = "revistaepoca.globo.com"
searchTerm = "Eike Batista"
dateMin = "05/01/2012"
dateMax = "05/31/2013"
perPage = 10
start = 0
# Gets the encoded URL to start the search
params = setSearchParams(site, searchTerm, dateMin, dateMax, perPage, start)
# Downloads the first page from Google
currentHTML = downloadHTML(baseURL, params)
# Saves the number of results. This number is
# used to calculate, roughly, the ammount of pages.
results = findResults(currentHTML)
pages = numPages(results)
# creates the CSV with the toprow
# createCSV()
# empty list where MediaObjects will live.
objectList = []
# The search routine. It goes from page one
# until results/10 + 1 pages. Ex. 213 results will
# render 22 pages. 21 with 10 results, a last one with 3.
# This is only an estimate. Google itself sometimes is
# not 100% sure how many results it gets.
page_number = 1
for page in range(pages-start/10):
print("Page: %s" % page_number)
# Random sleep
randomSleep = uniform(2, 5)
# Populates content list with Google Results
# from the HTML Soup
contentList = findContent(currentHTML)
# Append to the list of objects all relevant information
# from all the links in that page.
objectList.append(storeInfo(contentList, kind))
# Trying not to annoy Google, we try a random
# short wait.
print("Catching breath for %s seconds." % randomSleep)
sleep(randomSleep)
# Go to the next page
divider_text = '-' * 79
print("Changing page.\n\n%s\n" % divider_text)
params = changePage(params)
# Downloads the content of the next page and converts
# them into a BeautifulSoup object.
currentHTML = downloadHTML(baseURL, changePage(params))
page_number += 1
# Uses the objectList to download all the URLs and
# populate the CSV with relevant information.
#fetchLinks(objectList)
print("The end.")
|
CodethinkLabs/python-consonant
|
refs/heads/master
|
consonant/web/services.py
|
1
|
# Copyright (C) 2014 Codethink Limited.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Consonant web service implementations."""
import copy
import json
import yaml
from twisted.internet import reactor
from twisted.web.server import Site
from twisted.web.resource import Resource
import consonant
class PageContext(object):
"""Provides contextual information for pages handling different URLs."""
def __init__(self):
self.store = None
self.ref = None
self.commit = None
self.klass = None
self.object = None
self.property = None
def extend(self, **kwargs):
"""Return a copy of the context, with additional members set."""
new_context = copy.copy(self)
for key, val in kwargs.iteritems():
setattr(new_context, key, val)
return new_context
def resolve_commit(self):
"""Return the commit to use for accessing the store."""
if self.commit:
return self.commit
else:
return self.store.ref(self.ref).head
class Page(Resource):
"""Base class for URL handlers."""
def __init__(self, context):
Resource.__init__(self)
self.context = context
self.put_children()
def put_children(self):
"""Construction hook to add statically routed subpages."""
pass
def respond(self, request, data, content_type=None):
"""Convert data to return an appropriate response to a request."""
# allow cross-domain requests to this web service
request.setHeader('Access-Control-Allow-Origin', '*')
# parse the accept header if there is one
if request.getHeader('Accept'):
if ';' in request.getHeader('Accept'):
accepted_types = request.getHeader('Accept').split(';')[0]
else:
accepted_types = request.getHeader('Accept')
accepted_types = [x.strip() for x in accepted_types.split(',')]
else:
accepted_types = []
if content_type is not None:
if not accepted_types or content_type in accepted_types:
request.setHeader('Content-Type', content_type)
return data
else:
# the content type and the accept type don't match
request.setResponseCode(406)
return ''
else:
if any(t in accepted_types for t in ('application/json', '*/*')):
request.setHeader('Content-Type', 'application/json')
return json.dumps(
data, cls=consonant.util.converters.JSONObjectEncoder)
elif 'application/x-yaml' in accepted_types:
request.setHeader('Content-Type', 'application/x-yaml')
return yaml.dump(data, default_flow_style=False)
else:
# the accept header is unsupported, we only support
# JSON and YAML
request.setResponseCode(406)
return ''
class RefPage(Page):
"""Renders /, /refs/:ref."""
def __init__(self, context):
Page.__init__(self, context)
def render_GET(self, request):
"""Return a response for a /refs/:ref request."""
ref = self.context.store.ref(self.context.ref)
return self.respond(request, ref)
def put_children(self):
"""Define subpages for /."""
self.putChild('name', NamePage(self.context))
self.putChild('schema', SchemaPage(self.context))
self.putChild('services', ServicesPage(self.context))
self.putChild('classes', ClassesPage(self.context))
self.putChild('objects', ObjectsPage(self.context))
self.putChild('refs', RefsPage(self.context))
self.putChild('commits', CommitsPage(self.context))
self.putChild('transactions', TransactionsPage(self.context))
class NamePage(Page):
"""Renders /name and /ref/:ref/name."""
def render_GET(self, request):
"""Return a response for a /name request."""
commit = self.context.resolve_commit()
name = self.context.store.name(commit)
return self.respond(request, name)
class SchemaPage(Page):
"""Renders /schema and /ref/:ref/schema."""
def render_GET(self, request):
"""Return a response for a /schema request."""
commit = self.context.resolve_commit()
schema = self.context.store.schema(commit)
return self.respond(request, schema)
class ServicesPage(Page):
"""Renders /services and /ref/:ref/services."""
def render_GET(self, request):
"""Return a response for a /services request."""
commit = self.context.resolve_commit()
services = self.context.store.services(commit)
return self.respond(request, services)
class ClassesPage(Page):
"""Renders /classes and /ref/:ref/classes."""
def render_GET(self, request):
"""Return a response for a /classes request."""
commit = self.context.resolve_commit()
classes = self.context.store.classes(commit)
return self.respond(request, classes)
def getChild(self, name, request):
"""Return a subpage to handle /classes/:name."""
commit = self.context.resolve_commit()
klass = self.context.store.klass(commit, name)
context = self.context.extend(klass=klass)
return ClassPage(context)
class ClassPage(Page):
"""Renders /classes/:class and /ref/:ref/classes/:class."""
def render_GET(self, request):
"""Return a response for a /classes/:class request."""
return self.respond(request, self.context.klass)
def put_children(self):
"""Define a subpage for /classes/:class/objects."""
self.putChild('objects', ObjectsPage(self.context))
class ObjectsPage(Page):
"""Renders /objects, /classes/:class/objects, /ref/:ref/objects etc."""
def render_GET(self, request):
"""Return a response for an /objects request."""
commit = self.context.resolve_commit()
objects = self.context.store.objects(commit, self.context.klass)
return self.respond(request, objects)
def getChild(self, name, request):
"""Return a subpage to handle /objects or /class/:name/objects."""
commit = self.context.resolve_commit()
object = self.context.store.object(commit, name, self.context.klass)
return ObjectPage(self.context.extend(object=object))
class ObjectPage(Page):
"""Renders /objects/:uuid, /classes/:class/objects/:uuid etc."""
def render_GET(self, request):
"""Return a response for an /object/:uuid request."""
return self.respond(request, self.context.object)
def put_children(self):
"""Define subpages for /objects/:uuid."""
self.putChild('properties', PropertiesPage(self.context))
self.putChild('class', ClassNamePage(self.context))
class ClassNamePage(Page):
"""Renders /objects/:uuid/class etc."""
isLeaf = True
def render_GET(self, request):
"""Return a response for a /object/:uuid/class request."""
return self.respond(request, self.context.object.klass.name)
class PropertiesPage(Page):
"""Renders /objects/:uuid/properties etc."""
def render_GET(self, request):
"""Return a response for a /object/:uuid/properties request."""
properties = dict((n, p.value) for n, p in self.context.object)
return self.respond(request, properties)
def getChild(self, name, request):
"""Return a subpage to handle /objects/:uuid/properties/:name."""
property = self.context.object.properties[name]
return PropertyPage(self.context.extend(property=property))
class PropertyPage(Page):
"""Renders /objects/:uuid/properties/:property etc."""
isLeaf = True
def render_GET(self, request):
"""Return a response for a /object/:uuid/properties/:name request."""
if isinstance(self.context.property,
consonant.store.properties.RawProperty):
commit = self.context.resolve_commit()
data = self.context.store.raw_property_data(
commit, self.context.object, self.context.property.name)
return self.respond(request, data, self.context.property.value)
else:
return self.respond(request, self.context.property.value)
class RefsPage(Page):
"""Renders /refs and /refs/:ref/refs."""
def render_GET(self, request):
"""Return a response for a /refs request."""
refs = self.context.store.refs()
return self.respond(request, refs)
def getChild(self, name, request):
"""Return a subpage to handle /refs/:name."""
context = self.context.extend(ref=name)
return RefPage(context)
class CommitsPage(Page):
"""Renders /commits, /refs/:ref/commits etc."""
def getChild(self, name, request):
"""Return a subpage to handle /commits/:sha1."""
commit = self.context.store.commit(name)
context = self.context.extend(commit=commit)
return CommitPage(context)
class CommitPage(Page):
"""Renders /commits/:sha1, /refs/:ref/commits/:sha1 etc."""
def render_GET(self, request):
"""Return a response for a /commit/:sha1 request."""
return self.respond(request, self.context.commit)
def put_children(self):
"""Define subpages for /commit/:sha1."""
self.putChild('name', NamePage(self.context))
self.putChild('schema', SchemaPage(self.context))
self.putChild('services', ServicesPage(self.context))
self.putChild('classes', ClassesPage(self.context))
self.putChild('objects', ObjectsPage(self.context))
self.putChild('refs', RefsPage(self.context.extend(commit=None)))
self.putChild('commits', CommitsPage(self.context.extend(commit=None)))
class TransactionsPage(Page):
"""Renders /transactions."""
def render_POST(self, request):
"""Try to apply a submitted transaction and return a response."""
if not request.getHeader('Content-Type') == 'multipart/mixed':
request.setResponseCode(406)
return ''
else:
body = request.content.read()
parser = consonant.transaction.parser.TransactionParser()
transaction = parser.parse(body)
self.context.store.apply_transaction(transaction)
return ''
class SimpleWebService(object):
"""A simple Consonant web service.
This web service implementation does not support authentication or
application-specific hooks.
"""
def __init__(self, store):
self.store = store
def run(self, port):
"""Serve a Consonant web service over the given port."""
context = PageContext().extend(store=self.store, ref='master')
resource = RefPage(context)
factory = Site(resource)
reactor.listenTCP(port, factory)
reactor.run()
|
salguarnieri/intellij-community
|
refs/heads/master
|
python/testData/deprecation/deprecatedModule.py
|
83
|
import warnings
warnings.warn("the deprecated module is deprecated; use a non-deprecated module instead",
DeprecationWarning, 2)
|
Maximilian-Reuter/SickRage-1
|
refs/heads/master
|
lib/hachoir_metadata/metadata_item.py
|
94
|
from hachoir_core.tools import makeUnicode, normalizeNewline
from hachoir_core.error import HACHOIR_ERRORS
from hachoir_metadata import config
from hachoir_metadata.setter import normalizeString
MIN_PRIORITY = 100
MAX_PRIORITY = 999
QUALITY_FASTEST = 0.0
QUALITY_FAST = 0.25
QUALITY_NORMAL = 0.5
QUALITY_GOOD = 0.75
QUALITY_BEST = 1.0
class DataValue:
def __init__(self, value, text):
self.value = value
self.text = text
class Data:
def __init__(self, key, priority, description,
text_handler=None, type=None, filter=None, conversion=None):
"""
handler is only used if value is not string nor unicode, prototype:
def handler(value) -> str/unicode
"""
assert MIN_PRIORITY <= priority <= MAX_PRIORITY
assert isinstance(description, unicode)
self.metadata = None
self.key = key
self.description = description
self.values = []
if type and not isinstance(type, (tuple, list)):
type = (type,)
self.type = type
self.text_handler = text_handler
self.filter = filter
self.priority = priority
self.conversion = conversion
def _createItem(self, value, text=None):
if text is None:
if isinstance(value, unicode):
text = value
elif self.text_handler:
text = self.text_handler(value)
assert isinstance(text, unicode)
else:
text = makeUnicode(value)
return DataValue(value, text)
def add(self, value):
if isinstance(value, tuple):
if len(value) != 2:
raise ValueError("Data.add() only accept tuple of 2 elements: (value,text)")
value, text = value
else:
text = None
# Skip value 'None'
if value is None:
return
if isinstance(value, (str, unicode)):
value = normalizeString(value)
if not value:
return
# Convert string to Unicode string using charset ISO-8859-1
if self.conversion:
try:
new_value = self.conversion(self.metadata, self.key, value)
except HACHOIR_ERRORS, err:
self.metadata.warning("Error during conversion of %r value: %s" % (
self.key, err))
return
if new_value is None:
dest_types = " or ".join(str(item.__name__) for item in self.type)
self.metadata.warning("Unable to convert %s=%r (%s) to %s" % (
self.key, value, type(value).__name__, dest_types))
return
if isinstance(new_value, tuple):
if text:
value = new_value[0]
else:
value, text = new_value
else:
value = new_value
elif isinstance(value, str):
value = unicode(value, "ISO-8859-1")
if self.type and not isinstance(value, self.type):
dest_types = " or ".join(str(item.__name__) for item in self.type)
self.metadata.warning("Key %r: value %r type (%s) is not %s" % (
self.key, value, type(value).__name__, dest_types))
return
# Skip empty strings
if isinstance(value, unicode):
value = normalizeNewline(value)
if config.MAX_STR_LENGTH \
and config.MAX_STR_LENGTH < len(value):
value = value[:config.MAX_STR_LENGTH] + "(...)"
# Skip duplicates
if value in self:
return
# Use filter
if self.filter and not self.filter(value):
self.metadata.warning("Skip value %s=%r (filter)" % (self.key, value))
return
# For string, if you have "verlongtext" and "verylo",
# keep the longer value
if isinstance(value, unicode):
for index, item in enumerate(self.values):
item = item.value
if not isinstance(item, unicode):
continue
if value.startswith(item):
# Find longer value, replace the old one
self.values[index] = self._createItem(value, text)
return
if item.startswith(value):
# Find truncated value, skip it
return
# Add new value
self.values.append(self._createItem(value, text))
def __len__(self):
return len(self.values)
def __getitem__(self, index):
return self.values[index]
def __contains__(self, value):
for item in self.values:
if value == item.value:
return True
return False
def __cmp__(self, other):
return cmp(self.priority, other.priority)
|
jostep/tensorflow
|
refs/heads/master
|
tensorflow/python/kernel_tests/softsign_op_test.py
|
91
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Softsign and SoftsignGrad."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import nn_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
class SoftsignTest(test.TestCase):
def _npSoftsign(self, np_features):
return np_features / (1 + np.abs(np_features))
def _testSoftsign(self, np_features, use_gpu=False):
np_softsign = self._npSoftsign(np_features)
with self.test_session(use_gpu=use_gpu):
softsign = nn_ops.softsign(np_features)
tf_softsign = softsign.eval()
self.assertAllClose(np_softsign, tf_softsign)
self.assertShapeEqual(np_softsign, softsign)
def testNumbers(self):
for t in [np.float, np.double]:
self._testSoftsign(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
use_gpu=False)
self._testSoftsign(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
use_gpu=True)
def testGradient(self):
with self.test_session():
x = constant_op.constant(
[-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
shape=[2, 5],
name="x")
y = nn_ops.softsign(x, name="softsign")
x_init = np.asarray(
[[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
dtype=np.float32,
order="F")
err = gradient_checker.compute_gradient_error(
x, [2, 5], y, [2, 5], x_init_value=x_init)
print("softsign (float) gradient err = ", err)
self.assertLess(err, 1e-4)
def testWarnInts(self):
# NOTE(irving): Actually I don't know how to intercept the warning, but
# let's make sure it runs. I promised I've looked, and there was a warning.
with self.test_session():
nn_ops.softsign(constant_op.constant(7)).eval()
if __name__ == "__main__":
test.main()
|
nagyistoce/euca2ools
|
refs/heads/master
|
euca2ools/commands/ec2/deletesubnet.py
|
5
|
# Copyright 2013-2014 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from requestbuilder import Arg
from euca2ools.commands.ec2 import EC2Request
class DeleteSubnet(EC2Request):
DESCRIPTION = 'Delete a VPC subnet'
ARGS = [Arg('SubnetId', metavar='SUBNET',
help='ID of the subnet to delete (required)')]
|
himleyb85/django
|
refs/heads/master
|
django/db/migrations/optimizer.py
|
127
|
from __future__ import unicode_literals
class MigrationOptimizer(object):
"""
Powers the optimization process, where you provide a list of Operations
and you are returned a list of equal or shorter length - operations
are merged into one if possible.
For example, a CreateModel and an AddField can be optimized into a
new CreateModel, and CreateModel and DeleteModel can be optimized into
nothing.
"""
def optimize(self, operations, app_label=None):
"""
Main optimization entry point. Pass in a list of Operation instances,
get out a new list of Operation instances.
Unfortunately, due to the scope of the optimization (two combinable
operations might be separated by several hundred others), this can't be
done as a peephole optimization with checks/output implemented on
the Operations themselves; instead, the optimizer looks at each
individual operation and scans forwards in the list to see if there
are any matches, stopping at boundaries - operations which can't
be optimized over (RunSQL, operations on the same field/model, etc.)
The inner loop is run until the starting list is the same as the result
list, and then the result is returned. This means that operation
optimization must be stable and always return an equal or shorter list.
The app_label argument is optional, but if you pass it you'll get more
efficient optimization.
"""
# Internal tracking variable for test assertions about # of loops
self._iterations = 0
while True:
result = self.optimize_inner(operations, app_label)
self._iterations += 1
if result == operations:
return result
operations = result
def optimize_inner(self, operations, app_label=None):
"""
Inner optimization loop.
"""
new_operations = []
for i, operation in enumerate(operations):
# Compare it to each operation after it
for j, other in enumerate(operations[i + 1:]):
in_between = operations[i + 1:i + j + 1]
result = operation.reduce(other, in_between, app_label)
if isinstance(result, list):
# Optimize! Add result, then remaining others, then return
new_operations.extend(result)
new_operations.extend(in_between)
new_operations.extend(operations[i + j + 2:])
return new_operations
if not result:
# We can't optimize across `other`.
new_operations.append(operation)
break
else:
new_operations.append(operation)
return new_operations
|
elijh/bitmask_client
|
refs/heads/master
|
src/leap/bitmask/services/eip/eipspec.py
|
8
|
# -*- coding: utf-8 -*-
# eipspec.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Schemas dict
# To add a schema for a version you should follow the form:
# { '1': schema_v1, '2': schema_v2, ... etc }
# so for instance, to add the '2' version, you should do:
# eipservice_config_spec['2'] = schema_v2
eipservice_config_spec = {}
eipservice_config_spec['1'] = {
'description': 'sample eip service config',
'type': 'object',
'properties': {
'serial': {
'type': int,
'default': 1,
'required': ["True"]
},
'version': {
'type': int,
'default': 1,
'required': ["True"]
},
'clusters': {
'type': list,
'default': [
{"label": {
"en": "Location Unknown"},
"name": "location_unknown"}]
},
'gateways': {
'type': list,
'default': [
{"capabilities": {
"adblock": True,
"filter_dns": True,
"ports": ["80", "53", "443", "1194"],
"protocols": ["udp", "tcp"],
"transport": ["openvpn"],
"user_ips": False},
"cluster": "location_unknown",
"host": "location.example.org",
"ip_address": "127.0.0.1"}]
},
'locations': {
'type': dict,
'default': {}
},
'openvpn_configuration': {
'type': dict,
'default': {
"auth": None,
"cipher": None,
"tls-cipher": None}
}
}
}
def get_schema(version):
"""
Returns the schema corresponding to the version given.
:param version: the version of the schema to get.
:type version: str
:rtype: dict or None if the version is not supported.
"""
schema = eipservice_config_spec.get(version, None)
return schema
|
torwag/micropython
|
refs/heads/master
|
tests/bench/loop_count-5.1-while_down_ne_localvar.py
|
102
|
import bench
def test(num):
zero = 0
while num != zero:
num -= 1
bench.run(test)
|
mapr-demos/wifi-sensor-demo
|
refs/heads/master
|
python/mpu6050/mpuserver.py
|
1
|
import micropython
micropython.alloc_emergency_exception_buf(100)
from machine import Pin, reset, disable_irq, enable_irq
import gc
from mpu6050 import MPU
import socket
import select
import time
default_port = 8000
default_irq_pin = 4
default_write_interval = 10
default_gc_interval = 1000
def tojson(values):
inner = []
for item in values:
msg = ('['
+ (', '.join(str(x) for x in item))
+ ']')
inner.append(msg)
return ('[' + ','.join(inner) + ']\n')
class MPUServer(object):
def __init__(self, mpu,
port=default_port,
write_interval=default_write_interval,
gc_interval=default_gc_interval,
irq_pin=default_irq_pin):
self.mpu = mpu
self.port = port
self.write_interval = write_interval
self.gc_interval = gc_interval
self.irq_pin = irq_pin
self.last_isr = 0
self.flag_reset_gyro = False
self.init_pins()
self.init_socket()
self.mpu.calibrate()
def __repr__(self):
return '<{} @ {}>'.format(self.__class__.__name__, self.port)
def init_pins(self):
self.pin_irq = Pin(self.irq_pin, Pin.IN, Pin.PULL_UP)
self.pin_irq.irq(handler=self.isr, trigger=Pin.IRQ_FALLING)
def init_socket(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def isr(self, pin):
# debounce
if time.ticks_diff(time.ticks_ms(), self.last_isr) < 10:
return
print('! reset gyro request')
self.flag_reset_gyro = True
self.last_isr = time.ticks_ms()
def serve(self):
print('starting mpu server on port {}'.format(self.port))
lastgc = lastsent = lastread = time.ticks_ms()
while True:
now = time.ticks_ms()
write_dt = time.ticks_diff(now, lastsent)
read_dt = time.ticks_diff(now, lastread)
gc_dt = time.ticks_diff(now, lastgc)
time.sleep_ms(max(0, 1-read_dt))
if self.flag_reset_gyro:
self.mpu.filter.reset_gyro()
self.flag_reset_gyro = False
values = self.mpu.read_position()
lastread = now
if write_dt >= self.write_interval:
lastsent = time.ticks_ms()
self.sock.sendto(tojson(values), ('192.168.4.2', 8000))
if gc_dt >= self.gc_interval:
gc.collect()
|
polarise/RP-python
|
refs/heads/master
|
intron_scores_by_flank.py
|
1
|
#!/home/paulk/software/bin/python
from __future__ import division
from sys import argv,exit,stderr
from subprocess import Popen,PIPE
from random import choice
from re import search
def PrintStatic(line):
stderr.write("\r%s" %line.ljust(50)+" "*20)
stderr.flush()
def intron_length(region1,region2,pos1,pos2):
c1,st1_sp1,sd1 = region1.split(':')
c2,st2_sp2,sd2 = region2.split(':')
st1,sp1 = map(int,st1_sp1.split('-'))
st2,sp2 = map(int,st2_sp2.split('-'))
if c1 != c2: raise ValueError("Conflict in chromosome names")
if sd1 != sd2: raise ValueError("Conflict in strands")
if sd1 == '+': return abs((st1+pos1) - (sp2-pos2))
elif sd1 == '-': return abs((st2+pos2) - (sp1-pos1))
f = open("resources/Homo_sapiens.GRCh37.66.gtf.upstream_introns")
data = dict()
i = 0
for row in f:
data['row_%s' % i] = row.strip().split('\t')
i += 1
f.close()
f = open("resources/Homo_sapiens.GRCh37.66.gtf.downstream_introns")
i = 0
for row in f:
data['row_%s' % i] += row.strip().split('\t')
i += 1
f.close()
up_scores = dict()
f = open("upstream_intron.full")
for row in f:
l = row.strip().split('\t')
# PrintStatic(str(l))
if len(l) != 5: continue
up_scores[l[0],l[1]] = l[4]
f.close()
down_scores = dict()
f = open("downstream_intron.full")
for row in f:
l = row.strip().split('\t')
# PrintStatic(str(l))
if len(l) != 5: continue
down_scores[l[0],l[1]] = l[4]
f.close()
print "up5SS\tup3SS\tup_len\tup3_score\tdown5SS\tdown3SS\tdown_len\tdown3_score"
c = 0
for d in data:
l = data[d]
if c > 5: break
if len(l) != 8: continue
try:
up_score = up_scores[l[0],l[1]]
except KeyError:
try:
up_score = up_scores[l[1],l[0]]
except KeyError:
up_score = 'NA'
try:
down_score = down_scores[l[4],l[5]]
except KeyError:
try:
down_score = down_scores[l[5],l[4]]
except KeyError:
down_score = 'NA'
print "\t".join(map(str,[l[0],l[1],intron_length(l[0],l[1],3,3),up_score,l[4],l[5],intron_length(l[4],l[5],3,3),down_score]))
c += 0
|
joshuajharris/dotfiles
|
refs/heads/master
|
Alfred.alfredpreferences/workflows/user.workflow.2E020B45-B449-45A0-A753-DD2F58A2AA27/slackfred-snooze.py
|
1
|
import sys
import argparse
from workflow import Workflow, web, PasswordNotFound
import json
def slack_keys():
wf = Workflow()
try:
slack_keys = wf.get_password('slack_api_key')
except PasswordNotFound:
wf.add_item(title='No API key set. Please run slt',
valid=False)
wf.send_feedback()
return 0
keys = slack_keys.split(",")
return keys
def slack_list(keys):
wf = Workflow()
slack_snooze = []
for key in keys:
api_key = str(key)
slack_auth = web.get('https://slack.com/api/auth.test?token=' + api_key + '&pretty=1').json()
if slack_auth['ok'] is False:
wf.add_item(title='Authentication failed. Check your API key',
valid=False)
wf.send_feedback()
break
else:
slack_dnd = web.get('https://slack.com/api/dnd.info?token={token}&pretty=1'.format(token=api_key)).json()
if slack_dnd['snooze_enabled'] is True:
slack_snooze.append({'team': slack_auth['team'], 'status': 'Snoozed'})
else:
slack_snooze.append({'team': slack_auth['team'], 'status': 'Active'})
return slack_snooze
def search_slack_names(slack_list):
elements = []
elements.append(slack_list['team'])
name_sort = sorted(elements, key=len)
return u' '.join(name_sort)
def main(wf):
parser = argparse.ArgumentParser()
parser.add_argument('--snooze', dest='snooze', nargs='?', default=None)
parser.add_argument('query', nargs='?', default=60)
args = parser.parse_args(wf.args)
if args.snooze:
query = args.snooze
carrot = query.find('>')
team = query[7:(carrot-1)]
snooze_time = query[carrot+2:]
for key in slack_keys():
api_key = str(key)
slack_auth = web.get('https://slack.com/api/auth.test?token=' + api_key + '&pretty=1').json()
if slack_auth['ok'] is True and slack_auth['team'] == team:
if snooze_time != '0':
dnd_url = 'https://slack.com/api/dnd.setSnooze?token={0}&num_minutes={1}'.format(api_key, snooze_time)
web.get(dnd_url)
else:
dnd_url = 'https://slack.com/api/dnd.endSnooze?token={0}'.format(api_key)
web.get(dnd_url)
if len(wf.args):
query = wf.args[0]
def wrapper():
return slack_list(keys=slack_keys())
slack_snooze = wf.cached_data('slacksnooze', wrapper, max_age=5)
if query:
slack_snooze = wf.filter(query, slack_snooze, key=search_slack_names)
if len(slack_snooze) == 0:
wf.add_item(title='Enter time in minutes',
arg=query,
valid=True)
else:
for team in slack_snooze:
wf.add_item(title='{0} -- Status: {1}'.format(team['team'], team['status']),
autocomplete='Snooze {0} > '.format(team['team']),
arg=query,
valid=True)
wf.send_feedback()
if __name__==u"__main__":
wf = Workflow()
sys.exit(wf.run(main))
|
blueyed/pytest_django
|
refs/heads/master
|
tests/urls.py
|
2
|
try:
from django.conf.urls import patterns # Django >1.4
except ImportError:
from django.conf.urls.defaults import patterns # Django 1.3
urlpatterns = patterns(
'',
(r'admin-required/', 'tests.views.admin_required_view'),
)
|
Xangis/django-ckeditor
|
refs/heads/master
|
ckeditor/views.py
|
1
|
from datetime import datetime
import mimetypes
import os
import re
import StringIO
from urlparse import urlparse, urlunparse
from django.conf import settings
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
try:
from PIL import Image, ImageOps
except ImportError:
import Image
import ImageOps
try:
from django.views.decorators.csrf import csrf_exempt
except ImportError:
# monkey patch this with a dummy decorator which just returns the
# same function (for compatability with pre-1.1 Djangos)
def csrf_exempt(fn):
return fn
THUMBNAIL_SIZE = (75, 75)
def get_available_name(name):
"""
Returns a filename that's free on the target storage system, and
available for new content to be written to.
"""
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
# If the filename already exists, keep adding an underscore (before the
# file extension, if one exists) to the filename until the generated
# filename doesn't exist.
while default_storage.exists(name):
file_root += '_'
# file_ext includes the dot.
name = os.path.join(dir_name, file_root + file_ext)
return name
def get_thumb_filename(file_name):
"""
Generate thumb filename by adding _thumb to end of
filename before . (if present)
"""
return '%s_thumb%s' % os.path.splitext(file_name)
def get_mimetype(extension):
mimetypes.init()
return mimetypes.types_map[extension.lower()]
def create_thumbnail(filename):
thumbnail_filename = get_thumb_filename(filename)
thumbnail_format = get_mimetype(os.path.splitext(filename)[1])
pil_format = thumbnail_format.split('/')[1]
image = default_storage.open(filename)
image = Image.open(image)
# Convert to RGB if necessary
# Thanks to Limodou on DjangoSnippets.org
# http://www.djangosnippets.org/snippets/20/
if image.mode not in ('L', 'RGB'):
image = image.convert('RGB')
# scale and crop to thumbnail
imagefit = ImageOps.fit(image, THUMBNAIL_SIZE, Image.ANTIALIAS)
thumbnail_io = StringIO.StringIO()
imagefit.save(thumbnail_io, format=pil_format)
thumbnail = InMemoryUploadedFile(thumbnail_io, None, thumbnail_filename, thumbnail_format,
thumbnail_io.len, None)
return default_storage.save(thumbnail_filename, thumbnail)
def get_media_url(path):
"""
Determine system file's media URL.
"""
return default_storage.url(path)
def get_upload_filename(upload_name, user):
# If CKEDITOR_RESTRICT_BY_USER is True upload file to user specific path.
if getattr(settings, 'CKEDITOR_RESTRICT_BY_USER', False):
user_path = user.username
else:
user_path = ''
# Generate date based path to put uploaded file.
date_path = datetime.now().strftime('%Y/%m/%d')
# Complete upload path (upload_path + date_path).
upload_path = os.path.join(settings.CKEDITOR_UPLOAD_PATH, user_path, \
date_path)
return get_available_name(os.path.join(upload_path, upload_name))
@csrf_exempt
def upload(request):
"""
Uploads a file and send back its URL to CKEditor.
TODO:
Validate uploads
"""
# Get the uploaded file from request.
upload = request.FILES['upload']
# Open output file in which to store upload.
upload_filename = get_upload_filename(upload.name, request.user)
mimetype, mimetype_secondary = get_mimetype(os.path.splitext(upload_filename)[1]).split('/')
image = default_storage.save(upload_filename, upload)
if mimetype == 'image':
create_thumbnail(image)
# Respond with Javascript sending ckeditor upload url.
url = get_media_url(image)
return HttpResponse("""
<script type='text/javascript'>
window.parent.CKEDITOR.tools.callFunction(%s, '%s');
</script>""" % (request.GET['CKEditorFuncNum'], url))
def get_image_files(user=None, path=''):
"""
Recursively walks all dirs under upload dir and generates a list of
full paths for each file found.
"""
# If a user is provided and CKEDITOR_RESTRICT_BY_USER is True,
# limit images to user specific path, but not for superusers.
STORAGE_DIRECTORIES = 0
STORAGE_FILES = 1
if user and not user.is_superuser and getattr(settings, \
'CKEDITOR_RESTRICT_BY_USER', False):
user_path = user.username
else:
user_path = ''
browse_path = os.path.join(settings.CKEDITOR_UPLOAD_PATH, user_path, path)
try:
storage_list = default_storage.listdir(browse_path)
except NotImplementedError:
return
except OSError:
return
for filename in storage_list[STORAGE_FILES]:
if os.path.splitext(filename)[0].endswith('_thumb'):
continue
filename = os.path.join(browse_path, filename)
yield filename
for directory in storage_list[STORAGE_DIRECTORIES]:
directory_path = os.path.join(path, directory)
for element in get_image_files(path=directory_path):
yield element
def get_image_browse_urls(user=None):
"""
Recursively walks all dirs under upload dir and generates a list of
thumbnail and full image URL's for each file found.
"""
images = []
for filename in get_image_files(user=user):
images.append({
'thumb': get_media_url(get_thumb_filename(filename)),
'src': get_media_url(filename)
})
return images
def browse(request):
context = RequestContext(request, {
'images': get_image_browse_urls(request.user),
})
return render_to_response('browse.html', context)
|
danielquinn/spirithunter
|
refs/heads/master
|
src/aspects/arbiters/weather.py
|
1
|
from datetime import datetime
import requests
from django.conf import settings
from django.contrib.gis.geos import fromstr
from django.core.cache import cache
from citytemp.models import City, Temperature
from spirithunter.logging import LogMixin
from ..models.elements import Element
from ..models.weather import WeatherCounter
from ..exceptions import ApiLimitExceeded
from .base import Arbiter
class WeatherArbiter(LogMixin, Arbiter):
TEMPERATURE_THRESHOLD = 5
WIND_THRESHOLD = 20
WUNDERGROUND_KEY = "88882c941f645b5c"
CACHE_TIME = 14400 # 4 hours
CONDITION_SUNNY = "sun"
CONDITIONS = (
(CONDITION_SUNNY, "Sunny"),
)
WEIGHT_HOT = 7
WEIGHT_COLD = 7
WEIGHT_WIND = 15
def __init__(self, lat, lng):
"""
API call to get current weather conditions
"""
self.centre = fromstr('POINT(%s %s)' % (lng, lat))
self.city = City.objects.distance(self.centre).order_by("distance")[0]
self.current_temperature = None
self.wind = None
self.conditions = None
try:
self._get_weather()
except ApiLimitExceeded:
self.logger.error("API limit exceeded :-(")
# The default of None ensures that the weather tests are all False
pass
else:
self.now = datetime.now()
self.temperature = Temperature.objects.get(
city=self.city,
month=self.now.month,
day=self.now.day
)
def get_results(self):
r = {"elements": [], "facets": [], "nationalities": []}
if self._is_hot():
self.logger.debug("HOT!")
r["elements"].append((Element.ELEMENT_HOT, self.WEIGHT_HOT))
if self._is_cold():
self.logger.debug("COLD!")
r["elements"].append((Element.ELEMENT_COLD, self.WEIGHT_COLD))
if self._is_windy():
self.logger.debug("WINDY!")
r["elements"].append((Element.ELEMENT_WIND, self.WEIGHT_WIND))
return r
def _is_hot(self):
if self.current_temperature and self.temperature.mean:
compare = self.temperature.mean + self.TEMPERATURE_THRESHOLD
return self.current_temperature > compare
return False
def _is_cold(self):
if self.current_temperature and self.temperature.mean:
compare = self.temperature.mean - self.TEMPERATURE_THRESHOLD
return self.current_temperature < compare
return False
def _is_windy(self):
if self.wind:
return self.wind > self.WIND_THRESHOLD
return False
def _get_weather(self):
"""
API call to Wunderground for weather and temperature
"""
if not settings.ONLINE:
return {
"temperature": 0,
"wind": 0,
"conditions": "sun"
}
self.logger.debug("Acquiring weather data for {city}".format(
city=self.city.airport_code
))
current_weather = cache.get(self.city.airport_code)
if not current_weather:
WeatherCounter.count() # Might throw an ApiLimitExceeded exception
self.logger.debug("Querying Wunderground")
url = "http://{}/api/{}/conditions/q/{}/{}.json".format(
"api.wunderground.com",
self.WUNDERGROUND_KEY,
self.city.country.code.upper(),
self.city.airport_code.upper()
)
response = requests.get(url).json()
self.logger.debug(response)
wunderground = response.get("current_observation", {})
current_weather = {
"temperature": wunderground.get("temp_c"),
"wind": wunderground.get("wind_kph"),
"conditions": wunderground.get("icon")
}
cache.set(self.city.airport_code, current_weather, self.CACHE_TIME)
self.logger.debug(current_weather)
self.current_temperature = current_weather.get("temperature")
self.conditions = current_weather.get("conditions")
self.wind = current_weather.get("wind")
return current_weather
|
vitmod/dvbapp
|
refs/heads/master
|
lib/python/Plugins/SystemPlugins/SatelliteEquipmentControl/plugin.py
|
32
|
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Plugins.Plugin import PluginDescriptor
from Components.ConfigList import ConfigListScreen
from Components.ActionMap import ActionMap
from Components.config import config
from Components.NimManager import nimmanager as nimmgr
class SecParameterSetup(Screen, ConfigListScreen):
skin = """
<screen position="100,100" size="560,400" title="Satellite equipment setup" >
<widget name="config" position="10,10" size="540,390" />
</screen>"""
def __init__(self, session):
self.skin = SecParameterSetup.skin
self["actions"] = ActionMap(["SetupActions", "MenuActions"],
{
"ok": self.keySave,
"cancel": self.keyCancel,
"menu": self.closeRecursive,
}, -2)
Screen.__init__(self, session)
list = [
(_("Delay after diseqc reset command"), config.sec.delay_after_diseqc_reset_cmd),
(_("Delay after diseqc peripherial poweron command"), config.sec.delay_after_diseqc_peripherial_poweron_cmd),
(_("Delay after continuous tone disable before diseqc"), config.sec.delay_after_continuous_tone_disable_before_diseqc),
(_("Delay after final continuous tone change"), config.sec.delay_after_final_continuous_tone_change),
(_("Delay after last voltage change"), config.sec.delay_after_final_voltage_change),
(_("Delay between diseqc commands"), config.sec.delay_between_diseqc_repeats),
(_("Delay after last diseqc command"), config.sec.delay_after_last_diseqc_command),
(_("Delay after toneburst"), config.sec.delay_after_toneburst),
(_("Delay after change voltage before switch command"), config.sec.delay_after_change_voltage_before_switch_command),
(_("Delay after enable voltage before switch command"), config.sec.delay_after_enable_voltage_before_switch_command),
(_("Delay between switch and motor command"), config.sec.delay_between_switch_and_motor_command),
(_("Delay after set voltage before measure motor power"), config.sec.delay_after_voltage_change_before_measure_idle_inputpower),
(_("Delay after enable voltage before motor command"), config.sec.delay_after_enable_voltage_before_motor_command),
(_("Delay after motor stop command"), config.sec.delay_after_motor_stop_command),
(_("Delay after voltage change before motor command"), config.sec.delay_after_voltage_change_before_motor_command),
(_("Delay before sequence repeat"), config.sec.delay_before_sequence_repeat),
(_("Motor running timeout"), config.sec.motor_running_timeout),
(_("Motor command retries"), config.sec.motor_command_retries) ]
ConfigListScreen.__init__(self, list)
session = None
def confirmed(answer):
global session
if answer:
session.open(SecParameterSetup)
def SecSetupMain(Session, **kwargs):
global session
session = Session
session.openWithCallback(confirmed, MessageBox, _("Please do not change any values unless you know what you are doing!"), MessageBox.TYPE_INFO)
def SecSetupStart(menuid):
show = False
# other menu than "scan"?
if menuid != "scan":
return [ ]
# only show if DVB-S frontends are available
for slot in nimmgr.nim_slots:
if slot.isCompatible("DVB-S"):
return [(_("Satellite equipment setup"), SecSetupMain, "satellite_equipment_setup", None)]
return [ ]
def Plugins(**kwargs):
if (nimmgr.hasNimType("DVB-S")):
return PluginDescriptor(name=_("Satellite equipment setup"), description=_("Setup your satellite equipment"), where = PluginDescriptor.WHERE_MENU, needsRestart = False, fnc=SecSetupStart)
else:
return []
|
betoesquivel/fil2014
|
refs/heads/master
|
build/django/tests/i18n/urls.py
|
79
|
from __future__ import unicode_literals
from django.conf.urls.i18n import i18n_patterns
from django.http import HttpResponse, StreamingHttpResponse
from django.utils.translation import ugettext_lazy as _
urlpatterns = i18n_patterns('',
(r'^simple/$', lambda r: HttpResponse()),
(r'^streaming/$', lambda r: StreamingHttpResponse([_("Yes"), "/", _("No")])),
)
|
pasancario/telegraf
|
refs/heads/master
|
scripts/build.py
|
3
|
#!/usr/bin/python -u
import sys
import os
import subprocess
import time
import datetime
import shutil
import tempfile
import hashlib
import re
debug = False
################
#### Telegraf Variables
################
# Packaging variables
PACKAGE_NAME = "telegraf"
INSTALL_ROOT_DIR = "/usr/bin"
LOG_DIR = "/var/log/telegraf"
SCRIPT_DIR = "/usr/lib/telegraf/scripts"
CONFIG_DIR = "/etc/telegraf"
LOGROTATE_DIR = "/etc/logrotate.d"
INIT_SCRIPT = "scripts/init.sh"
SYSTEMD_SCRIPT = "scripts/telegraf.service"
LOGROTATE_SCRIPT = "etc/logrotate.d/telegraf"
DEFAULT_CONFIG = "etc/telegraf.conf"
DEFAULT_WINDOWS_CONFIG = "etc/telegraf_windows.conf"
POSTINST_SCRIPT = "scripts/post-install.sh"
PREINST_SCRIPT = "scripts/pre-install.sh"
POSTREMOVE_SCRIPT = "scripts/post-remove.sh"
PREREMOVE_SCRIPT = "scripts/pre-remove.sh"
# Default AWS S3 bucket for uploads
DEFAULT_BUCKET = "get.influxdb.org/telegraf"
CONFIGURATION_FILES = [
CONFIG_DIR + '/telegraf.conf',
LOGROTATE_DIR + '/telegraf',
]
# META-PACKAGE VARIABLES
PACKAGE_LICENSE = "MIT"
PACKAGE_URL = "https://github.com/influxdata/telegraf"
MAINTAINER = "[email protected]"
VENDOR = "InfluxData"
DESCRIPTION = "Plugin-driven server agent for reporting metrics into InfluxDB."
# SCRIPT START
prereqs = [ 'git', 'go' ]
go_vet_command = "go tool vet -composites=true ./"
optional_prereqs = [ 'gvm', 'fpm', 'rpmbuild' ]
fpm_common_args = "-f -s dir --log error \
--vendor {} \
--url {} \
--license {} \
--maintainer {} \
--config-files {} \
--config-files {} \
--after-install {} \
--before-install {} \
--after-remove {} \
--before-remove {} \
--description \"{}\"".format(
VENDOR,
PACKAGE_URL,
PACKAGE_LICENSE,
MAINTAINER,
CONFIG_DIR + '/telegraf.conf',
LOGROTATE_DIR + '/telegraf',
POSTINST_SCRIPT,
PREINST_SCRIPT,
POSTREMOVE_SCRIPT,
PREREMOVE_SCRIPT,
DESCRIPTION)
targets = {
'telegraf' : './cmd/telegraf',
}
supported_builds = {
"darwin": [ "amd64" ],
"windows": [ "amd64", "i386" ],
"linux": [ "amd64", "i386", "armhf", "armel", "arm64" ],
"freebsd": [ "amd64" ]
}
supported_packages = {
"darwin": [ "tar", "zip" ],
"linux": [ "deb", "rpm", "tar" ],
"windows": [ "zip" ],
"freebsd": [ "tar" ]
}
supported_tags = {
# "linux": {
# "amd64": ["sensors"]
# }
}
prereq_cmds = {
# "linux": "sudo apt-get install lm-sensors libsensors4-dev"
}
################
#### Telegraf Functions
################
def create_package_fs(build_root):
print("Creating a filesystem hierarchy from directory: {}".format(build_root))
# Using [1:] for the path names due to them being absolute
# (will overwrite previous paths, per 'os.path.join' documentation)
dirs = [ INSTALL_ROOT_DIR[1:], LOG_DIR[1:], SCRIPT_DIR[1:], CONFIG_DIR[1:], LOGROTATE_DIR[1:] ]
for d in dirs:
create_dir(os.path.join(build_root, d))
os.chmod(os.path.join(build_root, d), 0o755)
def package_scripts(build_root, windows=False):
print("Copying scripts and sample configuration to build directory")
if windows:
shutil.copyfile(DEFAULT_WINDOWS_CONFIG, os.path.join(build_root, "telegraf.conf"))
os.chmod(os.path.join(build_root, "telegraf.conf"), 0o644)
else:
shutil.copyfile(INIT_SCRIPT, os.path.join(build_root, SCRIPT_DIR[1:], INIT_SCRIPT.split('/')[1]))
os.chmod(os.path.join(build_root, SCRIPT_DIR[1:], INIT_SCRIPT.split('/')[1]), 0o644)
shutil.copyfile(SYSTEMD_SCRIPT, os.path.join(build_root, SCRIPT_DIR[1:], SYSTEMD_SCRIPT.split('/')[1]))
os.chmod(os.path.join(build_root, SCRIPT_DIR[1:], SYSTEMD_SCRIPT.split('/')[1]), 0o644)
shutil.copyfile(LOGROTATE_SCRIPT, os.path.join(build_root, LOGROTATE_DIR[1:], "telegraf"))
os.chmod(os.path.join(build_root, LOGROTATE_DIR[1:], "telegraf"), 0o644)
shutil.copyfile(DEFAULT_CONFIG, os.path.join(build_root, CONFIG_DIR[1:], "telegraf.conf"))
os.chmod(os.path.join(build_root, CONFIG_DIR[1:], "telegraf.conf"), 0o644)
def run_generate():
# NOOP for Telegraf
return True
def go_get(branch, update=False, no_stash=False):
if not check_path_for("gdm"):
print("Downloading `gdm`...")
get_command = "go get github.com/sparrc/gdm"
run(get_command)
print("Retrieving dependencies with `gdm`...")
run("{}/bin/gdm restore -f Godeps_windows".format(os.environ.get("GOPATH")))
run("{}/bin/gdm restore".format(os.environ.get("GOPATH")))
return True
def run_tests(race, parallel, timeout, no_vet):
# Currently a NOOP for Telegraf
return True
################
#### All Telegraf-specific content above this line
################
def run(command, allow_failure=False, shell=False):
out = None
if debug:
print("[DEBUG] {}".format(command))
try:
if shell:
out = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=shell)
else:
out = subprocess.check_output(command.split(), stderr=subprocess.STDOUT)
out = out.decode("utf8")
if debug:
print("[DEBUG] command output: {}".format(out))
except subprocess.CalledProcessError as e:
print("")
print("")
print("Executed command failed!")
print("-- Command run was: {}".format(command))
print("-- Failure was: {}".format(e.output))
if allow_failure:
print("Continuing...")
return None
else:
print("")
print("Stopping.")
sys.exit(1)
except OSError as e:
print("")
print("")
print("Invalid command!")
print("-- Command run was: {}".format(command))
print("-- Failure was: {}".format(e))
if allow_failure:
print("Continuing...")
return out
else:
print("")
print("Stopping.")
sys.exit(1)
else:
return out
def create_temp_dir(prefix = None):
if prefix is None:
return tempfile.mkdtemp(prefix="{}-build.".format(PACKAGE_NAME))
else:
return tempfile.mkdtemp(prefix=prefix)
def get_current_version_tag():
version = run("git describe --always --tags --abbrev=0").strip()
return version
def get_current_version():
version_tag = get_current_version_tag()
if version_tag[0] == 'v':
# Remove leading 'v' and possible '-rc\d+'
version = re.sub(r'-rc\d+', '', version_tag[1:])
else:
version = re.sub(r'-rc\d+', '', version_tag)
return version
def get_current_rc():
rc = None
version_tag = get_current_version_tag()
matches = re.match(r'.*-rc(\d+)', version_tag)
if matches:
rc, = matches.groups(1)
return rc
def get_current_commit(short=False):
command = None
if short:
command = "git log --pretty=format:'%h' -n 1"
else:
command = "git rev-parse HEAD"
out = run(command)
return out.strip('\'\n\r ')
def get_current_branch():
command = "git rev-parse --abbrev-ref HEAD"
out = run(command)
return out.strip()
def get_system_arch():
arch = os.uname()[4]
if arch == "x86_64":
arch = "amd64"
return arch
def get_system_platform():
if sys.platform.startswith("linux"):
return "linux"
else:
return sys.platform
def get_go_version():
out = run("go version")
matches = re.search('go version go(\S+)', out)
if matches is not None:
return matches.groups()[0].strip()
return None
def check_path_for(b):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
full_path = os.path.join(path, b)
if os.path.isfile(full_path) and os.access(full_path, os.X_OK):
return full_path
def check_environ(build_dir=None):
print("")
print("Checking environment:")
for v in [ "GOPATH", "GOBIN", "GOROOT" ]:
print("- {} -> {}".format(v, os.environ.get(v)))
cwd = os.getcwd()
if build_dir is None and os.environ.get("GOPATH") and os.environ.get("GOPATH") not in cwd:
print("!! WARNING: Your current directory is not under your GOPATH. This may lead to build failures.")
def check_prereqs():
print("")
print("Checking for dependencies:")
for req in prereqs:
path = check_path_for(req)
if path:
print("- {} -> {}".format(req, path))
else:
print("- {} -> ?".format(req))
for req in optional_prereqs:
path = check_path_for(req)
if path:
print("- {} (optional) -> {}".format(req, path))
else:
print("- {} (optional) -> ?".format(req))
print("")
return True
def upload_packages(packages, bucket_name=None, nightly=False):
if debug:
print("[DEBUG] upload_packages: {}".format(packages))
try:
import boto
from boto.s3.key import Key
except ImportError:
print("!! Cannot upload packages without the 'boto' Python library.")
return 1
print("Connecting to S3...".format(bucket_name))
c = boto.connect_s3()
if bucket_name is None:
bucket_name = DEFAULT_BUCKET
bucket = c.get_bucket(bucket_name.split('/')[0])
print("Using bucket: {}".format(bucket_name))
for p in packages:
if '/' in bucket_name:
# Allow for nested paths within the bucket name (ex:
# bucket/folder). Assuming forward-slashes as path
# delimiter.
name = os.path.join('/'.join(bucket_name.split('/')[1:]),
os.path.basename(p))
else:
name = os.path.basename(p)
if bucket.get_key(name) is None or nightly:
print("Uploading {}...".format(name))
sys.stdout.flush()
k = Key(bucket)
k.key = name
if nightly:
n = k.set_contents_from_filename(p, replace=True)
else:
n = k.set_contents_from_filename(p, replace=False)
k.make_public()
else:
print("!! Not uploading package {}, as it already exists.".format(p))
print("")
return 0
def build(version=None,
branch=None,
commit=None,
platform=None,
arch=None,
nightly=False,
rc=None,
race=False,
clean=False,
outdir="."):
print("\n-------------------------\n")
print("Build Plan:")
print("- version: {}".format(version))
if rc:
print("- release candidate: {}".format(rc))
print("- commit: {}".format(get_current_commit(short=True)))
print("- branch: {}".format(get_current_branch()))
print("- platform: {}".format(platform))
print("- arch: {}".format(arch))
print("- nightly? {}".format(str(nightly).lower()))
print("- race enabled? {}".format(str(race).lower()))
print("")
if not os.path.exists(outdir):
os.makedirs(outdir)
elif clean and outdir != '/':
print("Cleaning build directory...")
shutil.rmtree(outdir)
os.makedirs(outdir)
if rc:
# If a release candidate, update the version information accordingly
version = "{}rc{}".format(version, rc)
print("Starting build...")
tmp_build_dir = create_temp_dir()
for b, c in targets.items():
print("Building '{}'...".format(os.path.join(outdir, b)))
build_command = ""
if "arm" in arch:
build_command += "GOOS={} GOARCH={} ".format(platform, "arm")
else:
if arch == 'i386':
arch = '386'
elif arch == 'x86_64':
arch = 'amd64'
build_command += "GOOS={} GOARCH={} ".format(platform, arch)
if "arm" in arch:
if arch == "armel":
build_command += "GOARM=5 "
elif arch == "armhf" or arch == "arm":
build_command += "GOARM=6 "
elif arch == "arm64":
build_command += "GOARM=7 "
else:
print("!! Invalid ARM architecture specifed: {}".format(arch))
print("Please specify either 'armel', 'armhf', or 'arm64'")
return 1
if platform == 'windows':
build_command += "go build -o {} ".format(os.path.join(outdir, b + '.exe'))
else:
build_command += "go build -o {} ".format(os.path.join(outdir, b))
if race:
build_command += "-race "
go_version = get_go_version()
if "1.4" in go_version:
build_command += "-ldflags=\"-X main.Version {} -X main.Branch {} -X main.Commit {}\" ".format(version,
get_current_branch(),
get_current_commit())
else:
# With Go 1.5, the linker flag arguments changed to 'name=value' from 'name value'
build_command += "-ldflags=\"-X main.Version={} -X main.Branch={} -X main.Commit={}\" ".format(version,
get_current_branch(),
get_current_commit())
build_command += c
run(build_command, shell=True)
return 0
def create_dir(path):
try:
os.makedirs(path)
except OSError as e:
print(e)
def rename_file(fr, to):
try:
os.rename(fr, to)
except OSError as e:
print(e)
# Return the original filename
return fr
else:
# Return the new filename
return to
def copy_file(fr, to):
try:
shutil.copy(fr, to)
except OSError as e:
print(e)
def generate_md5_from_file(path):
m = hashlib.md5()
with open(path, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
m.update(chunk)
return m.hexdigest()
def generate_md5_from_file(path):
m = hashlib.md5()
with open(path, 'rb') as f:
while True:
data = f.read(4096)
if not data:
break
m.update(data)
return m.hexdigest()
def build_packages(build_output, version, nightly=False, rc=None, iteration=1):
outfiles = []
tmp_build_dir = create_temp_dir()
if debug:
print("[DEBUG] build_output = {}".format(build_output))
try:
print("-------------------------\n")
print("Packaging...")
for platform in build_output:
# Create top-level folder displaying which platform (linux, etc)
create_dir(os.path.join(tmp_build_dir, platform))
for arch in build_output[platform]:
# Create second-level directory displaying the architecture (amd64, etc)
current_location = build_output[platform][arch]
# Create directory tree to mimic file system of package
build_root = os.path.join(tmp_build_dir,
platform,
arch,
'{}-{}-{}'.format(PACKAGE_NAME, version, iteration))
create_dir(build_root)
# Copy packaging scripts to build directory
if platform == 'windows':
package_scripts(build_root, windows=True)
else:
create_package_fs(build_root)
package_scripts(build_root)
for binary in targets:
if platform == 'windows':
# For windows, we just want to copy the binary into the root directory
binary = binary + '.exe'
# Where the binary should go in the package filesystem
to = os.path.join(build_root, binary)
# Where the binary currently is located
fr = os.path.join(current_location, binary)
else:
# Where the binary currently is located
fr = os.path.join(current_location, binary)
# Where the binary should go in the package filesystem
to = os.path.join(build_root, INSTALL_ROOT_DIR[1:], binary)
if debug:
print("[{}][{}] - Moving from '{}' to '{}'".format(platform,
arch,
fr,
to))
copy_file(fr, to)
for package_type in supported_packages[platform]:
# Package the directory structure for each package type for the platform
print("Packaging directory '{}' as '{}'...".format(build_root, package_type))
name = PACKAGE_NAME
# Reset version, iteration, and current location on each run
# since they may be modified below.
package_version = version
package_iteration = iteration
package_build_root = build_root
current_location = build_output[platform][arch]
if rc is not None:
# Set iteration to 0 since it's a release candidate
package_iteration = "0.rc{}".format(rc)
if package_type in ['zip', 'tar']:
# For tars and zips, start the packaging one folder above
# the build root (to include the package name)
package_build_root = os.path.join('/', '/'.join(build_root.split('/')[:-1]))
if nightly:
name = '{}-nightly_{}_{}'.format(name,
platform,
arch)
else:
name = '{}-{}-{}_{}_{}'.format(name,
package_version,
package_iteration,
platform,
arch)
current_location = os.path.join(os.getcwd(), current_location)
if package_type == 'tar':
tar_command = "cd {} && tar -cvzf {}.tar.gz ./*".format(build_root, name)
run(tar_command, shell=True)
run("mv {}.tar.gz {}".format(os.path.join(build_root, name), current_location), shell=True)
outfile = os.path.join(current_location, name + ".tar.gz")
outfiles.append(outfile)
print("MD5({}) = {}".format(outfile, generate_md5_from_file(outfile)))
elif package_type == 'zip':
zip_command = "cd {} && zip -r {}.zip ./*".format(build_root, name)
run(zip_command, shell=True)
run("mv {}.zip {}".format(os.path.join(build_root, name), current_location), shell=True)
outfile = os.path.join(current_location, name + ".zip")
outfiles.append(outfile)
print("MD5({}) = {}".format(outfile, generate_md5_from_file(outfile)))
else:
fpm_command = "fpm {} --name {} -a {} -t {} --version {} --iteration {} -C {} -p {} ".format(fpm_common_args,
name,
arch,
package_type,
package_version,
package_iteration,
package_build_root,
current_location)
if debug:
fpm_command += "--verbose "
if package_type == "rpm":
fpm_command += "--depends coreutils "
out = run(fpm_command, shell=True)
matches = re.search(':path=>"(.*)"', out)
outfile = None
if matches is not None:
outfile = matches.groups()[0]
if outfile is None:
print("!! Could not determine output from packaging command.")
else:
# Strip nightly version (the unix epoch) from filename
if nightly:
outfile = rename_file(outfile, outfile.replace("{}-{}".format(version, iteration), "nightly"))
outfiles.append(os.path.join(os.getcwd(), outfile))
# Display MD5 hash for generated package
print("MD5({}) = {}".format(outfile, generate_md5_from_file(outfile)))
print("")
if debug:
print("[DEBUG] package outfiles: {}".format(outfiles))
return outfiles
finally:
# Cleanup
print("Cleaning up build dir: {}".format(tmp_build_dir))
shutil.rmtree(tmp_build_dir)
def print_usage():
print("Usage: ./build.py [options]")
print("")
print("Options:")
print("\t --outdir=<path> \n\t\t- Send build output to a specified path. Defaults to ./build.")
print("\t --arch=<arch> \n\t\t- Build for specified architecture. Acceptable values: x86_64|amd64, 386|i386, arm, or all")
print("\t --platform=<platform> \n\t\t- Build for specified platform. Acceptable values: linux, windows, darwin, or all")
print("\t --version=<version> \n\t\t- Version information to apply to build metadata. If not specified, will be pulled from repo tag.")
print("\t --commit=<commit> \n\t\t- Use specific commit for build (currently a NOOP).")
print("\t --branch=<branch> \n\t\t- Build from a specific branch (currently a NOOP).")
print("\t --rc=<rc number> \n\t\t- Whether or not the build is a release candidate (affects version information).")
print("\t --iteration=<iteration number> \n\t\t- The iteration to display on the package output (defaults to 0 for RC's, and 1 otherwise).")
print("\t --race \n\t\t- Whether the produced build should have race detection enabled.")
print("\t --package \n\t\t- Whether the produced builds should be packaged for the target platform(s).")
print("\t --nightly \n\t\t- Whether the produced build is a nightly (affects version information).")
print("\t --update \n\t\t- Whether dependencies should be updated prior to building.")
print("\t --test \n\t\t- Run Go tests. Will not produce a build.")
print("\t --parallel \n\t\t- Run Go tests in parallel up to the count specified.")
print("\t --generate \n\t\t- Run `go generate`.")
print("\t --timeout \n\t\t- Timeout for Go tests. Defaults to 480s.")
print("\t --clean \n\t\t- Clean the build output directory prior to creating build.")
print("\t --no-get \n\t\t- Do not run `go get` before building.")
print("\t --bucket=<S3 bucket>\n\t\t- Full path of the bucket to upload packages to (must also specify --upload).")
print("\t --debug \n\t\t- Displays debug output.")
print("")
def print_package_summary(packages):
print(packages)
def main():
global debug
# Command-line arguments
outdir = "build"
commit = None
target_platform = None
target_arch = None
nightly = False
race = False
branch = None
version = get_current_version()
rc = get_current_rc()
package = False
update = False
clean = False
upload = False
test = False
parallel = None
timeout = None
iteration = 1
no_vet = False
run_get = True
upload_bucket = None
generate = False
no_stash = False
for arg in sys.argv[1:]:
if '--outdir' in arg:
# Output directory. If none is specified, then builds will be placed in the same directory.
outdir = arg.split("=")[1]
if '--commit' in arg:
# Commit to build from. If none is specified, then it will build from the most recent commit.
commit = arg.split("=")[1]
if '--branch' in arg:
# Branch to build from. If none is specified, then it will build from the current branch.
branch = arg.split("=")[1]
elif '--arch' in arg:
# Target architecture. If none is specified, then it will build for the current arch.
target_arch = arg.split("=")[1]
elif '--platform' in arg:
# Target platform. If none is specified, then it will build for the current platform.
target_platform = arg.split("=")[1]
elif '--version' in arg:
# Version to assign to this build (0.9.5, etc)
version = arg.split("=")[1]
elif '--rc' in arg:
# Signifies that this is a release candidate build.
rc = arg.split("=")[1]
elif '--race' in arg:
# Signifies that race detection should be enabled.
race = True
elif '--package' in arg:
# Signifies that packages should be built.
package = True
# If packaging do not allow stashing of local changes
no_stash = True
elif '--nightly' in arg:
# Signifies that this is a nightly build.
nightly = True
elif '--update' in arg:
# Signifies that dependencies should be updated.
update = True
elif '--upload' in arg:
# Signifies that the resulting packages should be uploaded to S3
upload = True
elif '--test' in arg:
# Run tests and exit
test = True
elif '--parallel' in arg:
# Set parallel for tests.
parallel = int(arg.split("=")[1])
elif '--timeout' in arg:
# Set timeout for tests.
timeout = arg.split("=")[1]
elif '--clean' in arg:
# Signifies that the outdir should be deleted before building
clean = True
elif '--iteration' in arg:
iteration = arg.split("=")[1]
elif '--no-vet' in arg:
no_vet = True
elif '--no-get' in arg:
run_get = False
elif '--bucket' in arg:
# The bucket to upload the packages to, relies on boto
upload_bucket = arg.split("=")[1]
elif '--no-stash' in arg:
# Do not stash uncommited changes
# Fail if uncommited changes exist
no_stash = True
elif '--generate' in arg:
generate = True
elif '--debug' in arg:
print("[DEBUG] Using debug output")
debug = True
elif '--help' in arg:
print_usage()
return 0
else:
print("!! Unknown argument: {}".format(arg))
print_usage()
return 1
if nightly and rc:
print("!! Cannot be both nightly and a release candidate! Stopping.")
return 1
if nightly:
# In order to cleanly delineate nightly version, we are adding the epoch timestamp
# to the version so that version numbers are always greater than the previous nightly.
version = "{}~n{}".format(version, int(time.time()))
iteration = 0
elif rc:
iteration = 0
# Pre-build checks
check_environ()
if not check_prereqs():
return 1
if not commit:
commit = get_current_commit(short=True)
if not branch:
branch = get_current_branch()
if not target_arch:
system_arch = get_system_arch()
if 'arm' in system_arch:
# Prevent uname from reporting ARM arch (eg 'armv7l')
target_arch = "arm"
else:
target_arch = system_arch
if target_arch == '386':
target_arch = 'i386'
elif target_arch == 'x86_64':
target_arch = 'amd64'
if target_platform:
if target_platform not in supported_builds and target_platform != 'all':
print("! Invalid build platform: {}".format(target_platform))
return 1
else:
target_platform = get_system_platform()
build_output = {}
if generate:
if not run_generate():
return 1
if run_get:
if not go_get(branch, update=update, no_stash=no_stash):
return 1
if test:
if not run_tests(race, parallel, timeout, no_vet):
return 1
return 0
platforms = []
single_build = True
if target_platform == 'all':
platforms = supported_builds.keys()
single_build = False
else:
platforms = [target_platform]
for platform in platforms:
build_output.update( { platform : {} } )
archs = []
if target_arch == "all":
single_build = False
archs = supported_builds.get(platform)
else:
archs = [target_arch]
for arch in archs:
od = outdir
if not single_build:
od = os.path.join(outdir, platform, arch)
if build(version=version,
branch=branch,
commit=commit,
platform=platform,
arch=arch,
nightly=nightly,
rc=rc,
race=race,
clean=clean,
outdir=od):
return 1
build_output.get(platform).update( { arch : od } )
# Build packages
if package:
if not check_path_for("fpm"):
print("!! Cannot package without command 'fpm'.")
return 1
packages = build_packages(build_output, version, nightly=nightly, rc=rc, iteration=iteration)
if upload:
upload_packages(packages, bucket_name=upload_bucket, nightly=nightly)
print("Done!")
return 0
if __name__ == '__main__':
sys.exit(main())
|
andrew-pa/limbo-android
|
refs/heads/master
|
jni/qemu/scripts/tracetool/backend/dtrace.py
|
71
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
DTrace/SystemTAP backend.
"""
__author__ = "Lluís Vilanova <[email protected]>"
__copyright__ = "Copyright 2012, Lluís Vilanova <[email protected]>"
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "[email protected]"
from tracetool import out
PROBEPREFIX = None
def _probeprefix():
if PROBEPREFIX is None:
raise ValueError("you must set PROBEPREFIX")
return PROBEPREFIX
BINARY = None
def _binary():
if BINARY is None:
raise ValueError("you must set BINARY")
return BINARY
def c(events):
pass
def h(events):
out('#include "trace-dtrace.h"',
'')
for e in events:
out('static inline void trace_%(name)s(%(args)s) {',
' QEMU_%(uppername)s(%(argnames)s);',
'}',
name = e.name,
args = e.args,
uppername = e.name.upper(),
argnames = ", ".join(e.args.names()),
)
def d(events):
out('provider qemu {')
for e in events:
args = str(e.args)
# DTrace provider syntax expects foo() for empty
# params, not foo(void)
if args == 'void':
args = ''
# Define prototype for probe arguments
out('',
'probe %(name)s(%(args)s);',
name = e.name,
args = args,
)
out('',
'};')
def stap(events):
for e in events:
# Define prototype for probe arguments
out('probe %(probeprefix)s.%(name)s = process("%(binary)s").mark("%(name)s")',
'{',
probeprefix = _probeprefix(),
name = e.name,
binary = _binary(),
)
i = 1
if len(e.args) > 0:
for name in e.args.names():
# Append underscore to reserved keywords
if name in ('limit', 'in', 'next', 'self'):
name += '_'
out(' %s = $arg%d;' % (name, i))
i += 1
out('}')
out()
|
movmov/cc
|
refs/heads/master
|
vendor/Twisted-10.0.0/twisted/python/zipstream.py
|
60
|
# -*- test-case-name: twisted.python.test.test_zipstream -*-
# Copyright (c) 2001-2008 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
An incremental approach to unzipping files. This allows you to unzip a little
bit of a file at a time, which means you can report progress as a file unzips.
"""
import warnings
import zipfile
import os.path
import zlib
import struct
_fileHeaderSize = struct.calcsize(zipfile.structFileHeader)
class ChunkingZipFile(zipfile.ZipFile):
"""
A ZipFile object which, with readfile(), also gives you access to a
filelike object for each entry.
"""
def readfile(self, name):
"""
Return file-like object for name.
"""
if self.mode not in ("r", "a"):
raise RuntimeError('read() requires mode "r" or "a"')
if not self.fp:
raise RuntimeError(
"Attempt to read ZIP archive that was already closed")
zinfo = self.getinfo(name)
self.fp.seek(zinfo.header_offset, 0)
fheader = self.fp.read(_fileHeaderSize)
if fheader[0:4] != zipfile.stringFileHeader:
raise zipfile.BadZipfile("Bad magic number for file header")
fheader = struct.unpack(zipfile.structFileHeader, fheader)
fname = self.fp.read(fheader[zipfile._FH_FILENAME_LENGTH])
if fheader[zipfile._FH_EXTRA_FIELD_LENGTH]:
self.fp.read(fheader[zipfile._FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename:
raise zipfile.BadZipfile(
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname))
if zinfo.compress_type == zipfile.ZIP_STORED:
return ZipFileEntry(self, zinfo.compress_size)
elif zinfo.compress_type == zipfile.ZIP_DEFLATED:
return DeflatedZipFileEntry(self, zinfo.compress_size)
else:
raise zipfile.BadZipfile(
"Unsupported compression method %d for file %s" %
(zinfo.compress_type, name))
class _FileEntry(object):
"""
Abstract superclass of both compressed and uncompressed variants of
file-like objects within a zip archive.
@ivar chunkingZipFile: a chunking zip file.
@type chunkingZipFile: L{ChunkingZipFile}
@ivar length: The number of bytes within the zip file that represent this
file. (This is the size on disk, not the number of decompressed bytes
which will result from reading it.)
@ivar fp: the underlying file object (that contains pkzip data). Do not
touch this, please. It will quite likely move or go away.
@ivar closed: File-like 'closed' attribute; True before this file has been
closed, False after.
@type closed: L{bool}
@ivar finished: An older, broken synonym for 'closed'. Do not touch this,
please.
@type finished: L{int}
"""
def __init__(self, chunkingZipFile, length):
"""
Create a L{_FileEntry} from a L{ChunkingZipFile}.
"""
self.chunkingZipFile = chunkingZipFile
self.fp = self.chunkingZipFile.fp
self.length = length
self.finished = 0
self.closed = False
def isatty(self):
"""
Returns false because zip files should not be ttys
"""
return False
def close(self):
"""
Close self (file-like object)
"""
self.closed = True
self.finished = 1
del self.fp
def readline(self):
"""
Read a line.
"""
bytes = ""
for byte in iter(lambda : self.read(1), ""):
bytes += byte
if byte == "\n":
break
return bytes
def next(self):
"""
Implement next as file does (like readline, except raises StopIteration
at EOF)
"""
nextline = self.readline()
if nextline:
return nextline
raise StopIteration()
def readlines(self):
"""
Returns a list of all the lines
"""
return list(self)
def xreadlines(self):
"""
Returns an iterator (so self)
"""
return self
def __iter__(self):
"""
Returns an iterator (so self)
"""
return self
class ZipFileEntry(_FileEntry):
"""
File-like object used to read an uncompressed entry in a ZipFile
"""
def __init__(self, chunkingZipFile, length):
_FileEntry.__init__(self, chunkingZipFile, length)
self.readBytes = 0
def tell(self):
return self.readBytes
def read(self, n=None):
if n is None:
n = self.length - self.readBytes
if n == 0 or self.finished:
return ''
data = self.chunkingZipFile.fp.read(
min(n, self.length - self.readBytes))
self.readBytes += len(data)
if self.readBytes == self.length or len(data) < n:
self.finished = 1
return data
class DeflatedZipFileEntry(_FileEntry):
"""
File-like object used to read a deflated entry in a ZipFile
"""
def __init__(self, chunkingZipFile, length):
_FileEntry.__init__(self, chunkingZipFile, length)
self.returnedBytes = 0
self.readBytes = 0
self.decomp = zlib.decompressobj(-15)
self.buffer = ""
def tell(self):
return self.returnedBytes
def read(self, n=None):
if self.finished:
return ""
if n is None:
result = [self.buffer,]
result.append(
self.decomp.decompress(
self.chunkingZipFile.fp.read(
self.length - self.readBytes)))
result.append(self.decomp.decompress("Z"))
result.append(self.decomp.flush())
self.buffer = ""
self.finished = 1
result = "".join(result)
self.returnedBytes += len(result)
return result
else:
while len(self.buffer) < n:
data = self.chunkingZipFile.fp.read(
min(n, 1024, self.length - self.readBytes))
self.readBytes += len(data)
if not data:
result = (self.buffer
+ self.decomp.decompress("Z")
+ self.decomp.flush())
self.finished = 1
self.buffer = ""
self.returnedBytes += len(result)
return result
else:
self.buffer += self.decomp.decompress(data)
result = self.buffer[:n]
self.buffer = self.buffer[n:]
self.returnedBytes += len(result)
return result
def unzip(filename, directory=".", overwrite=0):
"""
Unzip the file
@param filename: the name of the zip file
@param directory: the directory into which the files will be
extracted
@param overwrite: if on, overwrite files when they exist. You can
still get an error if you try to create a directory over a file
with the same name or vice-versa.
"""
for i in unzipIter(filename, directory, overwrite):
pass
DIR_BIT = 16
def unzipIter(filename, directory='.', overwrite=0):
"""
Return a generator for the zipfile. This implementation will yield
after every file.
The value it yields is the number of files left to unzip.
"""
zf = zipfile.ZipFile(filename, 'r')
names = zf.namelist()
if not os.path.exists(directory):
os.makedirs(directory)
remaining = len(zf.namelist())
for entry in names:
remaining -= 1
isdir = zf.getinfo(entry).external_attr & DIR_BIT
f = os.path.join(directory, entry)
if isdir:
# overwrite flag only applies to files
if not os.path.exists(f):
os.makedirs(f)
else:
# create the directory the file will be in first,
# since we can't guarantee it exists
fdir = os.path.split(f)[0]
if not os.path.exists(fdir):
os.makedirs(fdir)
if overwrite or not os.path.exists(f):
outfile = file(f, 'wb')
outfile.write(zf.read(entry))
outfile.close()
yield remaining
def countZipFileChunks(filename, chunksize):
"""
Predict the number of chunks that will be extracted from the entire
zipfile, given chunksize blocks.
"""
totalchunks = 0
zf = ChunkingZipFile(filename)
for info in zf.infolist():
totalchunks += countFileChunks(info, chunksize)
return totalchunks
def countFileChunks(zipinfo, chunksize):
"""
Count the number of chunks that will result from the given L{ZipInfo}.
@param zipinfo: a L{zipfile.ZipInfo} instance describing an entry in a zip
archive to be counted.
@return: the number of chunks present in the zip file. (Even an empty file
counts as one chunk.)
@rtype: L{int}
"""
count, extra = divmod(zipinfo.file_size, chunksize)
if extra > 0:
count += 1
return count or 1
def countZipFileEntries(filename):
"""
Count the number of entries in a zip archive. (Don't use this function.)
@param filename: The filename of a zip archive.
@type filename: L{str}
"""
warnings.warn("countZipFileEntries is deprecated.",
DeprecationWarning, 2)
zf = zipfile.ZipFile(filename)
return len(zf.namelist())
def unzipIterChunky(filename, directory='.', overwrite=0,
chunksize=4096):
"""
Return a generator for the zipfile. This implementation will yield after
every chunksize uncompressed bytes, or at the end of a file, whichever
comes first.
The value it yields is the number of chunks left to unzip.
"""
czf = ChunkingZipFile(filename, 'r')
if not os.path.exists(directory):
os.makedirs(directory)
remaining = countZipFileChunks(filename, chunksize)
names = czf.namelist()
infos = czf.infolist()
for entry, info in zip(names, infos):
isdir = info.external_attr & DIR_BIT
f = os.path.join(directory, entry)
if isdir:
# overwrite flag only applies to files
if not os.path.exists(f):
os.makedirs(f)
remaining -= 1
yield remaining
else:
# create the directory the file will be in first,
# since we can't guarantee it exists
fdir = os.path.split(f)[0]
if not os.path.exists(fdir):
os.makedirs(fdir)
if overwrite or not os.path.exists(f):
outfile = file(f, 'wb')
fp = czf.readfile(entry)
if info.file_size == 0:
remaining -= 1
yield remaining
while fp.tell() < info.file_size:
hunk = fp.read(chunksize)
outfile.write(hunk)
remaining -= 1
yield remaining
outfile.close()
else:
remaining -= countFileChunks(info, chunksize)
yield remaining
|
zqfan/leetcode
|
refs/heads/master
|
algorithms/116. Populating Next Right Pointers in Each Node/solution.py
|
1
|
# Definition for binary tree with next pointer.
# class TreeLinkNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# self.next = None
class Solution:
# @param root, a tree link node
# @return nothing
def connect(self, root):
while root and root.left:
t = root.left
while root:
root.left.next = root.right
root.right.next = root.next and root.next.left
root = root.next
root = t
|
CaptainCN/QCEditor
|
refs/heads/master
|
cocos2d/plugin/tools/toolsForGame/main.py
|
265
|
import sys, string, os
from Tkinter import *
import steps
Plugins = sys.argv[1]
print Plugins
pluginList = Plugins.split(':')
maxStep = 2
curStep = 1
stepList = []
# functions
# show step on the num index
def showStep(num):
global stepList
stepNum = len(stepList)
if num >= stepNum or num <= 0 :
pass
i = 0
while i < stepNum:
if i == num:
stepList[i].stepFrame.pack(fill=BOTH, anchor='nw')
else:
stepList[i].stepFrame.pack_forget()
i += 1
# update the pre & next buttons status
def updateBtnState():
global curStep
global btnNextStep
global btnPreStep
if curStep == 1:
btnPreStep['state'] = DISABLED
btnNextStep['state'] = NORMAL
btnNextStep['text'] = 'Next'
elif curStep == maxStep:
btnPreStep['state'] = NORMAL
btnNextStep['state'] = NORMAL
btnNextStep['text'] = 'Finish'
else:
btnPreStep['state'] = NORMAL
btnNextStep['state'] = NORMAL
btnNextStep['text'] = 'Next'
# next button clicked
def nextStep():
if btnNextStep['text'] == 'close':
root.quit()
return
global curStep
nowStepObj = stepList[curStep - 1]
bRet = nowStepObj.checkStep()
if bRet != None:
stepError['text'] = bRet
return
else:
stepError['text'] = ''
if curStep < maxStep:
curStep += 1
showStep(curStep - 1)
updateBtnState()
elif curStep == maxStep:
# disable buttons when process
btnPreStep['state'] = DISABLED
btnNextStep['state'] = DISABLED
# get user input arguments
projPath = stepList[0].getPath()
plugins = stepList[1].getSelectedPlugins()
strPlugins = ''
i = 0
while i < len(plugins):
strPlugins += "plugins/"
strPlugins += plugins[i]
if i != (len(plugins) - 1):
strPlugins += ':'
i += 1
# process shell script to modify the game project
ret = os.system('bash ./toolsForGame/addPluginForGame.sh ' + projPath + ' ' + strPlugins)
if ret != 0:
# enable buttons after process
btnPreStep['state'] = NORMAL
btnNextStep['state'] = NORMAL
stepError['text'] = 'Error during process'
else:
# enable next button & change text to close
btnNextStep['state'] = NORMAL
btnNextStep['text'] = 'close'
stepError['text'] = 'Process Successful!'
# pre button clicked
def preStep():
global curStep
global stepError
stepError['text'] = ''
if curStep > 1:
curStep -= 1
showStep(curStep - 1)
updateBtnState()
# init root view
root = Tk()
root.title('Plugin-x Integration Guide')
root.geometry("600x400")
rootFrame = Frame(root)
rootFrame.pack(fill=BOTH)
# steps view
MyStep1 = steps.step1()
MyStep1.initStep(rootFrame)
MyStep2 = steps.step2()
MyStep2.initStep(rootFrame, pluginList)
stepList.append(MyStep1)
stepList.append(MyStep2)
MyStep1.stepFrame.pack(fill=BOTH, anchor='nw')
# add step error message
controlFrame = Frame(root)
controlFrame.pack(side=BOTTOM, fill=X, anchor='s')
stepError = Label(controlFrame)
stepError.pack(side=LEFT, padx=30)
# add step button
btnNextStep = Button(controlFrame, text='Next', command=nextStep)
btnPreStep = Button(controlFrame, text='Back', command=preStep, state=DISABLED)
btnNextStep.pack(side=RIGHT, padx=30)
btnPreStep.pack(side=RIGHT)
root.mainloop()
|
opencorato/represent-boundaries
|
refs/heads/master
|
boundaries/south_migrations/0004_add_extra_metadata.py
|
2
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'BoundarySet.extra'
db.add_column('boundaries_boundaryset', 'extra', self.gf('jsonfield.fields.JSONField')(null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'BoundarySet.extra'
db.delete_column('boundaries_boundaryset', 'extra')
models = {
'boundaries.boundary': {
'Meta': {'unique_together': "(('slug', 'set'),)", 'object_name': 'Boundary'},
'centroid': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True'}),
'extent': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label_point': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'spatial_index': 'False', 'blank': 'True'}),
'metadata': ('jsonfield.fields.JSONField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '192', 'db_index': 'True'}),
'set': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'boundaries'", 'to': "orm['boundaries.BoundarySet']"}),
'set_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shape': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {}),
'simple_shape': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '200', 'db_index': 'True'})
},
'boundaries.boundaryset': {
'Meta': {'ordering': "('name',)", 'object_name': 'BoundarySet'},
'authority': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'domain': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'extent': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'extra': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'last_updated': ('django.db.models.fields.DateField', [], {}),
'licence_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'singular': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '200', 'primary_key': 'True', 'db_index': 'True'}),
'source_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['boundaries']
|
bjss/BJSS_liveobs_automation
|
refs/heads/develop
|
liveobs_ui/page_object_models/mobile/modal_page.py
|
2
|
"""
Page Object Model for Modals
While Technically Modals are not 'pages' there are common interaction patterns
with modals that merit their own Page Object Model. This is that Page Object
Model.
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.select import Select
from liveobs_ui.page_object_models.mobile.mobile_common import BaseMobilePage
from liveobs_ui.selectors.mobile.modal import MODAL_DIALOG, MODAL_TITLE, \
MODAL_BUTTONS, MODAL_CONTENT
class ModalPage(BaseMobilePage):
"""
Class that handles interacting with Modals
"""
@staticmethod
def _get_selector_for_modal(modal):
"""
Given a modal return a By selector
:param modal: Modal to generate selector for
:return: By selector
"""
modal_id = modal.get_attribute('id')
return (By.ID, modal_id)
def close_modal(self, modal):
"""
Close supplied modal
:param modal: Modal object (.dialog) to close
"""
modal_selector = self._get_selector_for_modal(modal)
cover = self.get_cover_for_modal(modal)
self.click_and_verify_change(cover, modal_selector, hidden=True)
def get_open_modals(self):
"""
Get the currently open modals on the page
:return: list of modal objects
"""
self.wait_for_element(MODAL_DIALOG)
return self.driver.find_elements(*MODAL_DIALOG)
def get_cover_for_modal(self, modal):
"""
Get the cover that is associated with the modal
:param modal: Modal to find cover for
:return: cover object
"""
modal_id = modal.get_attribute('id')
cover_selector = (By.CSS_SELECTOR, '.cover[data-target={}]'.format(
modal_id))
return self.driver.find_element(*cover_selector)
@staticmethod
def get_modal_title(modal):
"""
Get the title text for the supplied modal
:param modal: Modal to get title for
:return: Text content of the title element
"""
title = modal.find_element(*MODAL_TITLE)
return title.text
@staticmethod
def get_modal_options(modal):
"""
Get the option buttons for the supplied modal
:param modal: Modal to find option buttons for
:return: List of button elements
"""
return modal.find_elements(*MODAL_BUTTONS)
@staticmethod
def get_modal_content(modal):
"""
Get the text content of the supplied modal
:param modal: Modal to get content of
:return: Text content of modal
"""
content = modal.find_element(*MODAL_CONTENT)
return content.text
def click_modal_option(self, modal, option_title):
"""
Locate the option in the modal options and click it
:param modal: Modal to find option in
:param option_title: Title of the button to click
"""
options = self.get_modal_options(modal)
button = None
for option in options:
if option.text == option_title:
button = option
if button:
modal_selector = self._get_selector_for_modal(modal)
self.click_and_verify_change(button, modal_selector, hidden=True)
@staticmethod
def select_reason_in_modal(modal, value_to_select):
"""
For modals with select boxes (cancel reason, partial reason) select the
supplied value
:param modal: Modal with select box
:param value_to_select: Value to select in select box
"""
select_field = modal.find_element_by_tag_name('select')
select_select = Select(select_field)
select_select.select_by_visible_text(value_to_select)
select_field.send_keys(Keys.TAB)
|
RO-ny9/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/lib2to3/fixes/fix_throw.py
|
203
|
"""Fixer for generator.throw(E, V, T).
g.throw(E) -> g.throw(E)
g.throw(E, V) -> g.throw(E(V))
g.throw(E, V, T) -> g.throw(E(V).with_traceback(T))
g.throw("foo"[, V[, T]]) will warn about string exceptions."""
# Author: Collin Winter
# Local imports
from .. import pytree
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Name, Call, ArgList, Attr, is_tuple
class FixThrow(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
power< any trailer< '.' 'throw' >
trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' >
>
|
power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > >
"""
def transform(self, node, results):
syms = self.syms
exc = results["exc"].clone()
if exc.type is token.STRING:
self.cannot_convert(node, "Python 3 does not support string exceptions")
return
# Leave "g.throw(E)" alone
val = results.get("val")
if val is None:
return
val = val.clone()
if is_tuple(val):
args = [c.clone() for c in val.children[1:-1]]
else:
val.prefix = ""
args = [val]
throw_args = results["args"]
if "tb" in results:
tb = results["tb"].clone()
tb.prefix = ""
e = Call(exc, args)
with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])]
throw_args.replace(pytree.Node(syms.power, with_tb))
else:
throw_args.replace(Call(exc, args))
|
karansingh1559/coala
|
refs/heads/master
|
coalib/bearlib/languages/definitions/CSS.py
|
30
|
from coalib.bearlib.languages.Language import Language
@Language
class CSS:
extensions = '.css',
multiline_comment_delimiters = {'/*': '*/'}
string_delimiters = {'"': '"', "'": "'"}
multiline_string_delimiters = {}
indent_types = {'{': '}'}
encapsulators = {'(': ')', '[': ']'}
|
mitocw/edx-platform
|
refs/heads/master
|
openedx/core/djangoapps/content/block_structure/block_structure.py
|
4
|
"""
Module with family of classes for block structures.
BlockStructure - responsible for block existence and relations.
BlockStructureBlockData - responsible for block & transformer data.
BlockStructureModulestoreData - responsible for xBlock data.
The following internal data structures are implemented:
_BlockRelations - Data structure for a single block's relations.
_BlockData - Data structure for a single block's data.
"""
from copy import deepcopy
from functools import partial
from logging import getLogger
import six
from openedx.core.lib.graph_traversals import traverse_post_order, traverse_topologically
from .exceptions import TransformerException
logger = getLogger(__name__) # pylint: disable=invalid-name
# A dictionary key value for storing a transformer's version number.
TRANSFORMER_VERSION_KEY = '_version'
class _BlockRelations(object):
"""
Data structure to encapsulate relationships for a single block,
including its children and parents.
"""
def __init__(self):
# List of usage keys of this block's parents.
# list [UsageKey]
self.parents = []
# List of usage keys of this block's children.
# list [UsageKey]
self.children = []
class BlockStructure(object):
"""
Base class for a block structure. BlockStructures are constructed
using the BlockStructureFactory and then used as the currency across
Transformers.
This base class keeps track of the block structure's root_block_usage_key,
the existence of the blocks, and their parents and children
relationships (graph nodes and edges).
"""
def __init__(self, root_block_usage_key):
# The usage key of the root block for this structure.
# UsageKey
self.root_block_usage_key = root_block_usage_key
# Map of a block's usage key to its block relations. The
# existence of a block in the structure is determined by its
# presence in this map.
# dict {UsageKey: _BlockRelations}
self._block_relations = {}
# Add the root block.
self._add_block(self._block_relations, root_block_usage_key)
def __iter__(self):
"""
The default iterator for a block structure is get_block_keys()
since we need to filter blocks as a list.
A topological traversal can be used to support DAGs.
"""
return self.get_block_keys()
def __len__(self):
return len(self._block_relations)
#--- Block structure relation methods ---#
def get_parents(self, usage_key):
"""
Returns the parents of the block identified by the given
usage_key.
Arguments:
usage_key - The usage key of the block whose parents
are to be returned.
Returns:
[UsageKey] - A list of usage keys of the block's parents.
"""
return self._block_relations[usage_key].parents if usage_key in self else []
def get_children(self, usage_key):
"""
Returns the children of the block identified by the given
usage_key.
Arguments:
usage_key - The usage key of the block whose children
are to be returned.
Returns:
[UsageKey] - A list of usage keys of the block's children.
"""
return self._block_relations[usage_key].children if usage_key in self else []
def set_root_block(self, usage_key):
"""
Sets the given usage key as the new root of the block structure.
Note: This method does *not* prune the rest of the structure. For
performance reasons, it is left to the caller to decide when exactly
to prune.
Arguments:
usage_key - The usage key of the block that is to be set as the
new root of the block structure.
"""
self.root_block_usage_key = usage_key
self._block_relations[usage_key].parents = []
def __contains__(self, usage_key):
"""
Returns whether a block with the given usage_key is in this
block structure.
Arguments:
usage_key - The usage key of the block whose children
are to be returned.
Returns:
bool - Whether or not a block with the given usage_key
is present in this block structure.
"""
return usage_key in self._block_relations
def get_block_keys(self):
"""
Returns the block keys in the block structure.
Returns:
iterator(UsageKey) - An iterator of the usage
keys of all the blocks in the block structure.
"""
return six.iterkeys(self._block_relations)
#--- Block structure traversal methods ---#
def topological_traversal(
self,
filter_func=None,
yield_descendants_of_unyielded=False,
start_node=None,
):
"""
Performs a topological sort of the block structure and yields
the usage_key of each block as it is encountered.
Arguments:
See the description in
openedx.core.lib.graph_traversals.traverse_topologically.
Returns:
generator - A generator object created from the
traverse_topologically method.
"""
return traverse_topologically(
start_node=start_node or self.root_block_usage_key,
get_parents=self.get_parents,
get_children=self.get_children,
filter_func=filter_func,
yield_descendants_of_unyielded=yield_descendants_of_unyielded,
)
def post_order_traversal(
self,
filter_func=None,
start_node=None,
):
"""
Performs a post-order sort of the block structure and yields
the usage_key of each block as it is encountered.
Arguments:
See the description in
openedx.core.lib.graph_traversals.traverse_post_order.
Returns:
generator - A generator object created from the
traverse_post_order method.
"""
return traverse_post_order(
start_node=start_node or self.root_block_usage_key,
get_children=self.get_children,
filter_func=filter_func,
)
#--- Internal methods ---#
# To be used within the block_structure framework or by tests.
def _prune_unreachable(self):
"""
Mutates this block structure by removing any unreachable blocks.
"""
# Create a new block relations map to store only those blocks
# that are still linked
pruned_block_relations = {}
old_block_relations = self._block_relations
# Build the structure from the leaves up by doing a post-order
# traversal of the old structure, thereby encountering only
# reachable blocks.
for block_key in self.post_order_traversal():
# If the block is in the old structure,
if block_key in old_block_relations:
# Add it to the new pruned structure
self._add_block(pruned_block_relations, block_key)
# Add a relationship to only those old children that
# were also added to the new pruned structure.
for child in old_block_relations[block_key].children:
if child in pruned_block_relations:
self._add_to_relations(pruned_block_relations, block_key, child)
# Replace this structure's relations with the newly pruned one.
self._block_relations = pruned_block_relations
def _add_relation(self, parent_key, child_key):
"""
Adds a parent to child relationship in this block structure.
Arguments:
parent_key (UsageKey) - Usage key of the parent block.
child_key (UsageKey) - Usage key of the child block.
"""
self._add_to_relations(self._block_relations, parent_key, child_key)
@staticmethod
def _add_to_relations(block_relations, parent_key, child_key):
"""
Adds a parent to child relationship in the given block
relations map.
Arguments:
block_relations (dict({UsageKey: _BlockRelations})) -
Internal map of a block's usage key to its
parents/children relations.
parent_key (UsageKey) - Usage key of the parent block.
child_key (UsageKey) - Usage key of the child block.
"""
BlockStructure._add_block(block_relations, parent_key)
BlockStructure._add_block(block_relations, child_key)
block_relations[child_key].parents.append(parent_key)
block_relations[parent_key].children.append(child_key)
@staticmethod
def _add_block(block_relations, usage_key):
"""
Adds the given usage_key to the given block_relations map.
Arguments:
block_relations (dict({UsageKey: _BlockRelations})) -
Internal map of a block's usage key to its
parents/children relations.
usage_key (UsageKey) - Usage key of the block that is to
be added to the given block_relations.
"""
if usage_key not in block_relations:
block_relations[usage_key] = _BlockRelations()
class FieldData(object):
"""
Data structure to encapsulate collected fields.
"""
def class_field_names(self):
"""
Returns list of names of fields that are defined directly
on the class. Can be overridden by subclasses. All other
fields are assumed to be stored in the self.fields dict.
"""
return ['fields']
def __init__(self):
# Map of field name to the field's value for this block.
# dict {string: any picklable type}
self.fields = {}
def __getattr__(self, field_name):
if self._is_own_field(field_name):
return super(FieldData, self).__getattr__(field_name)
try:
return self.fields[field_name]
except KeyError:
raise AttributeError(u"Field {0} does not exist".format(field_name))
def __setattr__(self, field_name, field_value):
if self._is_own_field(field_name):
return super(FieldData, self).__setattr__(field_name, field_value)
else:
self.fields[field_name] = field_value
def __delattr__(self, field_name):
if self._is_own_field(field_name):
return super(FieldData, self).__delattr__(field_name)
else:
del self.fields[field_name]
def _is_own_field(self, field_name):
"""
Returns whether the given field_name is the name of an
actual field of this class.
"""
return field_name in self.class_field_names()
class TransformerData(FieldData):
"""
Data structure to encapsulate collected data for a transformer.
"""
pass
class TransformerDataMap(dict):
"""
A map of Transformer name to its corresponding TransformerData.
The map can be accessed by the Transformer's name or the
Transformer's class type.
"""
def __getitem__(self, key):
key = self._translate_key(key)
return dict.__getitem__(self, key)
def __setitem__(self, key, value):
key = self._translate_key(key)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
key = self._translate_key(key)
dict.__delitem__(self, key)
def get_or_create(self, key):
"""
Returns the TransformerData associated with the given
key. If not found, creates and returns a new TransformerData
and maps it to the given key.
"""
try:
return self[key]
except KeyError:
new_transformer_data = TransformerData()
self[key] = new_transformer_data
return new_transformer_data
def _translate_key(self, key):
"""
Allows the given key to be either the transformer's class or name,
always returning the transformer's name. This allows
TransformerDataMap to be accessed in either of the following ways:
map[TransformerClass] or
map['transformer_name']
"""
try:
return key.name()
except AttributeError:
return key
class BlockData(FieldData):
"""
Data structure to encapsulate collected data for a single block.
"""
def class_field_names(self):
return super(BlockData, self).class_field_names() + ['location', 'transformer_data']
def __init__(self, usage_key):
super(BlockData, self).__init__()
# Location (or usage key) of the block.
self.location = usage_key
# Map of transformer name to its block-specific data.
self.transformer_data = TransformerDataMap()
class BlockStructureBlockData(BlockStructure):
"""
Subclass of BlockStructure that is responsible for managing block
and transformer data.
"""
# The latest version of the data structure of this class. Incrementally
# update this value whenever the data structure changes. Dependent storage
# layers can then use this value when serializing/deserializing block
# structures, and invalidating any previously cached/stored data.
VERSION = 2
def __init__(self, root_block_usage_key):
super(BlockStructureBlockData, self).__init__(root_block_usage_key)
# Map of a block's usage key to its collected data, including
# its xBlock fields and block-specific transformer data.
# dict {UsageKey: BlockData}
self._block_data_map = {}
# Map of a transformer's name to its non-block-specific data.
self.transformer_data = TransformerDataMap()
def copy(self):
"""
Returns a new instance of BlockStructureBlockData with a
deep-copy of this instance's contents.
"""
from .factory import BlockStructureFactory
return BlockStructureFactory.create_new(
self.root_block_usage_key,
deepcopy(self._block_relations),
deepcopy(self.transformer_data),
deepcopy(self._block_data_map),
)
def iteritems(self):
"""
Returns iterator of (UsageKey, BlockData) pairs for all
blocks in the BlockStructure.
"""
return six.iteritems(self._block_data_map)
def itervalues(self):
"""
Returns iterator of BlockData for all blocks in the
BlockStructure.
"""
return six.itervalues(self._block_data_map)
def __getitem__(self, usage_key):
"""
Returns the BlockData associated with the given key.
"""
return self._block_data_map[usage_key]
def get_xblock_field(self, usage_key, field_name, default=None):
"""
Returns the collected value of the xBlock field for the
requested block for the requested field_name; returns default if
not found.
Arguments:
usage_key (UsageKey) - Usage key of the block whose xBlock
field is requested.
field_name (string) - The name of the field that is
requested.
default (any type) - The value to return if a field value is
not found.
"""
block_data = self._block_data_map.get(usage_key)
return getattr(block_data, field_name, default) if block_data else default
def override_xblock_field(self, usage_key, field_name, override_data):
"""
Set value of the XBlock field for the requested block for the requested field_name;
Arguments:
usage_key (UsageKey) - Usage key of the block whose xBlock
field is requested.
field_name (string) - The name of the field that is
requested.
override_data (object) - The data you want to set
"""
block_data = self._block_data_map.get(usage_key)
setattr(block_data, field_name, override_data)
def get_transformer_data(self, transformer, key, default=None):
"""
Returns the value associated with the given key from the given
transformer's data dictionary; returns default if not found.
Arguments:
transformer (BlockStructureTransformer) - The transformer
whose collected data is requested.
key (string) - A dictionary key to the transformer's data
that is requested.
"""
try:
return getattr(self.transformer_data[transformer], key, default)
except KeyError:
return default
def set_transformer_data(self, transformer, key, value):
"""
Updates the given transformer's data dictionary with the given
key and value.
Arguments:
transformer (BlockStructureTransformer) - The transformer
whose data is to be updated.
key (string) - A dictionary key to the transformer's data.
value (any picklable type) - The value to associate with the
given key for the given transformer's data.
"""
setattr(self.transformer_data.get_or_create(transformer), key, value)
def get_transformer_block_data(self, usage_key, transformer):
"""
Returns the TransformerData for the given
transformer for the block identified by the given usage_key.
Raises KeyError if not found.
Arguments:
usage_key (UsageKey) - Usage key of the block whose
transformer data is requested.
transformer (BlockStructureTransformer) - The transformer
whose dictionary data is requested.
"""
return self._block_data_map[usage_key].transformer_data[transformer]
def get_transformer_block_field(self, usage_key, transformer, key, default=None):
"""
Returns the value associated with the given key for the given
transformer for the block identified by the given usage_key;
returns default if not found.
Arguments:
usage_key (UsageKey) - Usage key of the block whose
transformer data is requested.
transformer (BlockStructureTransformer) - The transformer
whose dictionary data is requested.
key (string) - A dictionary key to the transformer's data
that is requested.
default (any type) - The value to return if a dictionary
entry is not found.
"""
try:
transformer_data = self.get_transformer_block_data(usage_key, transformer)
except KeyError:
return default
return getattr(transformer_data, key, default)
def set_transformer_block_field(self, usage_key, transformer, key, value):
"""
Updates the given transformer's data dictionary with the given
key and value for the block identified by the given usage_key.
Arguments:
usage_key (UsageKey) - Usage key of the block whose
transformer data is to be updated.
transformer (BlockStructureTransformer) - The transformer
whose data is to be updated.
key (string) - A dictionary key to the transformer's data.
value (any picklable type) - The value to associate with the
given key for the given transformer's data for the
requested block.
"""
setattr(
self._get_or_create_block(usage_key).transformer_data.get_or_create(transformer),
key,
value,
)
def remove_transformer_block_field(self, usage_key, transformer, key):
"""
Deletes the given transformer's entire data dict for the
block identified by the given usage_key.
Arguments:
usage_key (UsageKey) - Usage key of the block whose
transformer data is to be deleted.
transformer (BlockStructureTransformer) - The transformer
whose data entry is to be deleted.
"""
try:
transformer_block_data = self.get_transformer_block_data(usage_key, transformer)
delattr(transformer_block_data, key)
except (AttributeError, KeyError):
pass
def remove_block(self, usage_key, keep_descendants):
"""
Removes the block identified by the usage_key and all of its
related data from the block structure. If descendants of the
removed block are to be kept, the structure's relations are
updated to reconnect the block's parents with its children.
Note: While the immediate relations of the block are updated
(removed), all descendants of the block will remain in the
structure unless the _prune_unreachable method is called.
Arguments:
usage_key (UsageKey) - Usage key of the block that is to be
removed.
keep_descendants (bool) - If True, the block structure's
relations (graph edges) are updated such that the
removed block's children become children of the
removed block's parents.
"""
children = self._block_relations[usage_key].children
parents = self._block_relations[usage_key].parents
# Remove block from its children.
for child in children:
self._block_relations[child].parents.remove(usage_key)
# Remove block from its parents.
for parent in parents:
self._block_relations[parent].children.remove(usage_key)
# Remove block.
self._block_relations.pop(usage_key, None)
self._block_data_map.pop(usage_key, None)
# Recreate the graph connections if descendants are to be kept.
if keep_descendants:
for child in children:
for parent in parents:
self._add_relation(parent, child)
def create_universal_filter(self):
"""
Returns a filter function that always returns True for all blocks.
"""
return lambda block_key: True
def create_removal_filter(self, removal_condition, keep_descendants=False):
"""
Returns a filter function that automatically removes blocks that satisfy
the removal_condition.
Arguments:
removal_condition ((usage_key)->bool) - A function that
takes a block's usage key as input and returns whether
or not to remove that block from the block structure.
keep_descendants (bool) - See the description in
remove_block.
"""
return partial(
self.retain_or_remove,
removal_condition=removal_condition,
keep_descendants=keep_descendants,
)
def retain_or_remove(self, block_key, removal_condition, keep_descendants=False):
"""
Removes the given block if it satisfies the removal_condition.
Returns True if the block was retained, and False if the block
was removed.
Arguments:
block_key (usage_key) - Usage key of the block.
removal_condition ((usage_key)->bool) - A function that
takes a block's usage key as input and returns whether
or not to remove that block from the block structure.
keep_descendants (bool) - See the description in
remove_block.
"""
if removal_condition(block_key):
self.remove_block(block_key, keep_descendants)
return False
return True
def remove_block_traversal(self, removal_condition, keep_descendants=False):
"""
A higher-order function that traverses the block structure
using topological sort and removes all blocks satisfying the given
removal_condition.
Arguments:
removal_condition ((usage_key)->bool) - A function that
takes a block's usage key as input and returns whether
or not to remove that block from the block structure.
keep_descendants (bool) - See the description in
remove_block.
"""
self.filter_topological_traversal(
filter_func=self.create_removal_filter(
removal_condition, keep_descendants
)
)
def filter_topological_traversal(self, filter_func, **kwargs):
"""
A higher-order function that traverses the block structure
using topological sort and applies the given filter.
Arguments:
filter_func ((usage_key)->bool) - Function that returns
whether or not to yield the given block key.
If None, the True function is assumed.
kwargs (dict) - Optional keyword arguments to be forwarded
to topological_traversal.
"""
# Note: For optimization, we remove blocks using the filter
# function, since the graph traversal method can skip over
# descendants that are unyielded. However, note that the
# optimization is not currently present because of DAGs,
# but it will be as soon as we remove support for DAGs.
for _ in self.topological_traversal(filter_func=filter_func, **kwargs):
pass
#--- Internal methods ---#
# To be used within the block_structure framework or by tests.
def _get_transformer_data_version(self, transformer):
"""
Returns the version number stored for the given transformer.
Arguments:
transformer (BlockStructureTransformer) - The transformer
whose stored version is requested.
"""
return self.get_transformer_data(transformer, TRANSFORMER_VERSION_KEY, 0)
def _add_transformer(self, transformer):
"""
Adds the given transformer to the block structure by recording
its current version number.
"""
if transformer.WRITE_VERSION == 0:
raise TransformerException(u'Version attributes are not set on transformer {0}.', transformer.name())
self.set_transformer_data(transformer, TRANSFORMER_VERSION_KEY, transformer.WRITE_VERSION)
def _get_or_create_block(self, usage_key):
"""
Returns the BlockData associated with the given usage_key.
If not found, creates and returns a new BlockData and
maps it to the given key.
"""
try:
return self._block_data_map[usage_key]
except KeyError:
block_data = BlockData(usage_key)
self._block_data_map[usage_key] = block_data
return block_data
class BlockStructureModulestoreData(BlockStructureBlockData):
"""
Subclass of BlockStructureBlockData that is responsible for managing
xBlocks and corresponding functionality that should only be called
during the Collect phase.
Note: Although this class interface uses xBlock terminology, it is
designed and implemented generically so it can work with any
interface and implementation of an xBlock.
"""
def __init__(self, root_block_usage_key):
super(BlockStructureModulestoreData, self).__init__(root_block_usage_key)
# Map of a block's usage key to its instantiated xBlock.
# dict {UsageKey: XBlock}
self._xblock_map = {}
# Set of xBlock field names that have been requested for
# collection.
# set(string)
self._requested_xblock_fields = set()
def request_xblock_fields(self, *field_names):
"""
Records request for collecting data for the given xBlock fields.
A Transformer should call this method when it needs to collect
data for a common xBlock field that may also be used by other
transformers. This minimizes storage usage across transformers.
Contrast this with each transformer collecting the same xBlock
data within its own transformer data storage.
Arguments:
field_names (list(string)) - A list of names of common
xBlock fields whose values should be collected.
"""
self._requested_xblock_fields.update(set(field_names))
def get_xblock(self, usage_key):
"""
Returns the instantiated xBlock for the given usage key.
Arguments:
usage_key (UsageKey) - Usage key of the block whose
xBlock object is to be returned.
"""
return self._xblock_map[usage_key]
#--- Internal methods ---#
# To be used within the block_structure framework or by tests.
def _add_xblock(self, usage_key, xblock):
"""
Associates the given xBlock object with the given usage_key.
Arguments:
usage_key (UsageKey) - Usage key of the given xBlock. This
value is passed in separately as opposed to retrieving
it from the given xBlock since this interface is
agnostic to and decoupled from the xBlock interface.
xblock (XBlock) - An instantiated XBlock object that is
to be stored for later access.
"""
self._xblock_map[usage_key] = xblock
def _collect_requested_xblock_fields(self):
"""
Iterates through all instantiated xBlocks that were added and
collects all xBlock fields that were requested.
"""
for xblock_usage_key, xblock in six.iteritems(self._xblock_map):
block_data = self._get_or_create_block(xblock_usage_key)
for field_name in self._requested_xblock_fields:
self._set_xblock_field(block_data, xblock, field_name)
def _set_xblock_field(self, block_data, xblock, field_name):
"""
Updates the given block's xBlock fields data with the xBlock
value for the given field name.
Arguments:
block_data (BlockData) - A BlockStructure BlockData
object.
xblock (XBlock) - An instantiated XBlock object whose
field is being accessed and collected for later
retrieval.
field_name (string) - The name of the xBlock field that is
being collected and stored.
"""
if hasattr(xblock, field_name):
setattr(block_data, field_name, getattr(xblock, field_name))
|
mrquim/repository.mrquim
|
refs/heads/master
|
script.module.youtube.dl/lib/youtube_dl/extractor/bloomberg.py
|
63
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
class BloombergIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?bloomberg\.com/(?:[^/]+/)*(?P<id>[^/?#]+)'
_TESTS = [{
'url': 'http://www.bloomberg.com/news/videos/b/aaeae121-5949-481e-a1ce-4562db6f5df2',
# The md5 checksum changes
'info_dict': {
'id': 'qurhIVlJSB6hzkVi229d8g',
'ext': 'flv',
'title': 'Shah\'s Presentation on Foreign-Exchange Strategies',
'description': 'md5:a8ba0302912d03d246979735c17d2761',
},
'params': {
'format': 'best[format_id^=hds]',
},
}, {
# video ID in BPlayer(...)
'url': 'http://www.bloomberg.com/features/2016-hello-world-new-zealand/',
'info_dict': {
'id': '938c7e72-3f25-4ddb-8b85-a9be731baa74',
'ext': 'flv',
'title': 'Meet the Real-Life Tech Wizards of Middle Earth',
'description': 'Hello World, Episode 1: New Zealand’s freaky AI babies, robot exoskeletons, and a virtual you.',
},
'params': {
'format': 'best[format_id^=hds]',
},
}, {
# data-bmmrid=
'url': 'https://www.bloomberg.com/politics/articles/2017-02-08/le-pen-aide-briefed-french-central-banker-on-plan-to-print-money',
'only_matching': True,
}, {
'url': 'http://www.bloomberg.com/news/articles/2015-11-12/five-strange-things-that-have-been-happening-in-financial-markets',
'only_matching': True,
}, {
'url': 'http://www.bloomberg.com/politics/videos/2015-11-25/karl-rove-on-jeb-bush-s-struggles-stopping-trump',
'only_matching': True,
}]
def _real_extract(self, url):
name = self._match_id(url)
webpage = self._download_webpage(url, name)
video_id = self._search_regex(
(r'["\']bmmrId["\']\s*:\s*(["\'])(?P<id>(?:(?!\1).)+)\1',
r'videoId\s*:\s*(["\'])(?P<id>(?:(?!\1).)+)\1',
r'data-bmmrid=(["\'])(?P<id>(?:(?!\1).)+)\1'),
webpage, 'id', group='id', default=None)
if not video_id:
bplayer_data = self._parse_json(self._search_regex(
r'BPlayer\(null,\s*({[^;]+})\);', webpage, 'id'), name)
video_id = bplayer_data['id']
title = re.sub(': Video$', '', self._og_search_title(webpage))
embed_info = self._download_json(
'http://www.bloomberg.com/api/embed?id=%s' % video_id, video_id)
formats = []
for stream in embed_info['streams']:
stream_url = stream.get('url')
if not stream_url:
continue
if stream['muxing_format'] == 'TS':
formats.extend(self._extract_m3u8_formats(
stream_url, video_id, 'mp4', m3u8_id='hls', fatal=False))
else:
formats.extend(self._extract_f4m_formats(
stream_url, video_id, f4m_id='hds', fatal=False))
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'description': self._og_search_description(webpage),
'thumbnail': self._og_search_thumbnail(webpage),
}
|
portableant/open-context-py
|
refs/heads/master
|
opencontext_py/apps/ocitems/versions/views.py
|
2
|
import json
from django.http import HttpResponse, Http404
from opencontext_py.libs.rootpath import RootPath
from opencontext_py.libs.requestnegotiation import RequestNegotiation
from opencontext_py.apps.ocitems.ocitem.models import OCitem
from opencontext_py.apps.ocitems.ocitem.templating import TemplateItem
from opencontext_py.apps.ocitems.subjects.supplement import SubjectSupplement
from django.template import RequestContext, loader
# A subject is a generic item that is the subbject of observations
# A subject is the main type of record in open context for analytic data
# The main dependency for this app is for OCitems, which are used to generate
# Every type of item in Open Context, including subjects
def index(request):
return HttpResponse("Hello, world. You're at the subjects index.")
def html_view(request, uuid):
ocitem = OCitem()
ocitem.get_item(uuid)
if(ocitem.manifest is not False):
# check to see if there's related data via API calls. Add if so.
subj_s = SubjectSupplement(ocitem.json_ld)
ocitem.json_ld = subj_s.get_catal_related()
rp = RootPath()
base_url = rp.get_baseurl()
temp_item = TemplateItem(request)
temp_item.read_jsonld_dict(ocitem.json_ld)
template = loader.get_template('subjects/view.html')
if temp_item.view_permitted:
req_neg = RequestNegotiation('text/html')
req_neg.supported_types = ['application/json',
'application/ld+json']
if 'HTTP_ACCEPT' in request.META:
req_neg.check_request_support(request.META['HTTP_ACCEPT'])
if req_neg.supported:
if 'json' in req_neg.use_response_type:
# content negotiation requested JSON or JSON-LD
return HttpResponse(json.dumps(ocitem.json_ld,
ensure_ascii=False, indent=4),
content_type=req_neg.use_response_type + "; charset=utf8")
else:
context = RequestContext(request,
{'item': temp_item,
'base_url': base_url})
return HttpResponse(template.render(context))
else:
# client wanted a mimetype we don't support
return HttpResponse(req_neg.error_message,
content_type=req_neg.use_response_type + "; charset=utf8",
status=415)
else:
template = loader.get_template('items/view401.html')
context = RequestContext(request,
{'item': temp_item})
return HttpResponse(template.render(context), status=401)
else:
raise Http404
def json_view(request, uuid):
""" returns a json representation """
ocitem = OCitem()
ocitem.get_item(uuid)
if(ocitem.manifest is not False):
req_neg = RequestNegotiation('application/json')
req_neg.supported_types = ['application/ld+json']
if 'HTTP_ACCEPT' in request.META:
req_neg.check_request_support(request.META['HTTP_ACCEPT'])
if req_neg.supported:
json_output = json.dumps(ocitem.json_ld,
indent=4,
ensure_ascii=False)
return HttpResponse(json_output,
content_type=req_neg.use_response_type + "; charset=utf8")
else:
# client wanted a mimetype we don't support
return HttpResponse(req_neg.error_message,
content_type=req_neg.use_response_type + "; charset=utf8",
status=415)
else:
raise Http404
|
bradh/samba
|
refs/heads/master
|
wintest/wintest.py
|
36
|
#!/usr/bin/env python
'''automated testing library for testing Samba against windows'''
import pexpect, subprocess
import optparse
import sys, os, time, re
class wintest():
'''testing of Samba against windows VMs'''
def __init__(self):
self.vars = {}
self.list_mode = False
self.vms = None
os.environ['PYTHONUNBUFFERED'] = '1'
self.parser = optparse.OptionParser("wintest")
def check_prerequesites(self):
self.info("Checking prerequesites")
self.setvar('HOSTNAME', self.cmd_output("hostname -s").strip())
if os.getuid() != 0:
raise Exception("You must run this script as root")
self.run_cmd('ifconfig ${INTERFACE} ${INTERFACE_NET} up')
if self.getvar('INTERFACE_IPV6'):
self.run_cmd('ifconfig ${INTERFACE} inet6 del ${INTERFACE_IPV6}/64', checkfail=False)
self.run_cmd('ifconfig ${INTERFACE} inet6 add ${INTERFACE_IPV6}/64 up')
self.run_cmd('ifconfig ${NAMED_INTERFACE} ${NAMED_INTERFACE_NET} up')
if self.getvar('NAMED_INTERFACE_IPV6'):
self.run_cmd('ifconfig ${NAMED_INTERFACE} inet6 del ${NAMED_INTERFACE_IPV6}/64', checkfail=False)
self.run_cmd('ifconfig ${NAMED_INTERFACE} inet6 add ${NAMED_INTERFACE_IPV6}/64 up')
def stop_vms(self):
'''Shut down any existing alive VMs, so they do not collide with what we are doing'''
self.info('Shutting down any of our VMs already running')
vms = self.get_vms()
for v in vms:
self.vm_poweroff(v, checkfail=False)
def setvar(self, varname, value):
'''set a substitution variable'''
self.vars[varname] = value
def getvar(self, varname):
'''return a substitution variable'''
if not varname in self.vars:
return None
return self.vars[varname]
def setwinvars(self, vm, prefix='WIN'):
'''setup WIN_XX vars based on a vm name'''
for v in ['VM', 'HOSTNAME', 'USER', 'PASS', 'SNAPSHOT', 'REALM', 'DOMAIN', 'IP']:
vname = '%s_%s' % (vm, v)
if vname in self.vars:
self.setvar("%s_%s" % (prefix,v), self.substitute("${%s}" % vname))
else:
self.vars.pop("%s_%s" % (prefix,v), None)
if self.getvar("WIN_REALM"):
self.setvar("WIN_REALM", self.getvar("WIN_REALM").upper())
self.setvar("WIN_LCREALM", self.getvar("WIN_REALM").lower())
dnsdomain = self.getvar("WIN_REALM")
self.setvar("WIN_BASEDN", "DC=" + dnsdomain.replace(".", ",DC="))
if self.getvar("WIN_USER") is None:
self.setvar("WIN_USER", "administrator")
def info(self, msg):
'''print some information'''
if not self.list_mode:
print(self.substitute(msg))
def load_config(self, fname):
'''load the config file'''
f = open(fname)
for line in f:
line = line.strip()
if len(line) == 0 or line[0] == '#':
continue
colon = line.find(':')
if colon == -1:
raise RuntimeError("Invalid config line '%s'" % line)
varname = line[0:colon].strip()
value = line[colon+1:].strip()
self.setvar(varname, value)
def list_steps_mode(self):
'''put wintest in step listing mode'''
self.list_mode = True
def set_skip(self, skiplist):
'''set a list of tests to skip'''
self.skiplist = skiplist.split(',')
def set_vms(self, vms):
'''set a list of VMs to test'''
if vms is not None:
self.vms = []
for vm in vms.split(','):
vm = vm.upper()
self.vms.append(vm)
def skip(self, step):
'''return True if we should skip a step'''
if self.list_mode:
print("\t%s" % step)
return True
return step in self.skiplist
def substitute(self, text):
"""Substitute strings of the form ${NAME} in text, replacing
with substitutions from vars.
"""
if isinstance(text, list):
ret = text[:]
for i in range(len(ret)):
ret[i] = self.substitute(ret[i])
return ret
"""We may have objects such as pexpect.EOF that are not strings"""
if not isinstance(text, str):
return text
while True:
var_start = text.find("${")
if var_start == -1:
return text
var_end = text.find("}", var_start)
if var_end == -1:
return text
var_name = text[var_start+2:var_end]
if not var_name in self.vars:
raise RuntimeError("Unknown substitution variable ${%s}" % var_name)
text = text.replace("${%s}" % var_name, self.vars[var_name])
return text
def have_var(self, varname):
'''see if a variable has been set'''
return varname in self.vars
def have_vm(self, vmname):
'''see if a VM should be used'''
if not self.have_var(vmname + '_VM'):
return False
if self.vms is None:
return True
return vmname in self.vms
def putenv(self, key, value):
'''putenv with substitution'''
os.environ[key] = self.substitute(value)
def chdir(self, dir):
'''chdir with substitution'''
os.chdir(self.substitute(dir))
def del_files(self, dirs):
'''delete all files in the given directory'''
for d in dirs:
self.run_cmd("find %s -type f | xargs rm -f" % d)
def write_file(self, filename, text, mode='w'):
'''write to a file'''
f = open(self.substitute(filename), mode=mode)
f.write(self.substitute(text))
f.close()
def run_cmd(self, cmd, dir=".", show=None, output=False, checkfail=True):
'''run a command'''
cmd = self.substitute(cmd)
if isinstance(cmd, list):
self.info('$ ' + " ".join(cmd))
else:
self.info('$ ' + cmd)
if output:
return subprocess.Popen([cmd], shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=dir).communicate()[0]
if isinstance(cmd, list):
shell=False
else:
shell=True
if checkfail:
return subprocess.check_call(cmd, shell=shell, cwd=dir)
else:
return subprocess.call(cmd, shell=shell, cwd=dir)
def run_child(self, cmd, dir="."):
'''create a child and return the Popen handle to it'''
cwd = os.getcwd()
cmd = self.substitute(cmd)
if isinstance(cmd, list):
self.info('$ ' + " ".join(cmd))
else:
self.info('$ ' + cmd)
if isinstance(cmd, list):
shell=False
else:
shell=True
os.chdir(dir)
ret = subprocess.Popen(cmd, shell=shell, stderr=subprocess.STDOUT)
os.chdir(cwd)
return ret
def cmd_output(self, cmd):
'''return output from and command'''
cmd = self.substitute(cmd)
return self.run_cmd(cmd, output=True)
def cmd_contains(self, cmd, contains, nomatch=False, ordered=False, regex=False,
casefold=True):
'''check that command output contains the listed strings'''
if isinstance(contains, str):
contains = [contains]
out = self.cmd_output(cmd)
self.info(out)
for c in self.substitute(contains):
if regex:
if casefold:
c = c.upper()
out = out.upper()
m = re.search(c, out)
if m is None:
start = -1
end = -1
else:
start = m.start()
end = m.end()
elif casefold:
start = out.upper().find(c.upper())
end = start + len(c)
else:
start = out.find(c)
end = start + len(c)
if nomatch:
if start != -1:
raise RuntimeError("Expected to not see %s in %s" % (c, cmd))
else:
if start == -1:
raise RuntimeError("Expected to see %s in %s" % (c, cmd))
if ordered and start != -1:
out = out[end:]
def retry_cmd(self, cmd, contains, retries=30, delay=2, wait_for_fail=False,
ordered=False, regex=False, casefold=True):
'''retry a command a number of times'''
while retries > 0:
try:
self.cmd_contains(cmd, contains, nomatch=wait_for_fail,
ordered=ordered, regex=regex, casefold=casefold)
return
except:
time.sleep(delay)
retries -= 1
self.info("retrying (retries=%u delay=%u)" % (retries, delay))
raise RuntimeError("Failed to find %s" % contains)
def pexpect_spawn(self, cmd, timeout=60, crlf=True, casefold=True):
'''wrapper around pexpect spawn'''
cmd = self.substitute(cmd)
self.info("$ " + cmd)
ret = pexpect.spawn(cmd, logfile=sys.stdout, timeout=timeout)
def sendline_sub(line):
line = self.substitute(line)
if crlf:
line = line.replace('\n', '\r\n') + '\r'
return ret.old_sendline(line)
def expect_sub(line, timeout=ret.timeout, casefold=casefold):
line = self.substitute(line)
if casefold:
if isinstance(line, list):
for i in range(len(line)):
if isinstance(line[i], str):
line[i] = '(?i)' + line[i]
elif isinstance(line, str):
line = '(?i)' + line
return ret.old_expect(line, timeout=timeout)
ret.old_sendline = ret.sendline
ret.sendline = sendline_sub
ret.old_expect = ret.expect
ret.expect = expect_sub
return ret
def get_nameserver(self):
'''Get the current nameserver from /etc/resolv.conf'''
child = self.pexpect_spawn('cat /etc/resolv.conf', crlf=False)
i = child.expect(['Generated by wintest', 'nameserver'])
if i == 0:
child.expect('your original resolv.conf')
child.expect('nameserver')
child.expect('\d+.\d+.\d+.\d+')
return child.after
def rndc_cmd(self, cmd, checkfail=True):
'''run a rndc command'''
self.run_cmd("${RNDC} -c ${PREFIX}/etc/rndc.conf %s" % cmd, checkfail=checkfail)
def named_supports_gssapi_keytab(self):
'''see if named supports tkey-gssapi-keytab'''
self.write_file("${PREFIX}/named.conf.test",
'options { tkey-gssapi-keytab "test"; };')
try:
self.run_cmd("${NAMED_CHECKCONF} ${PREFIX}/named.conf.test")
except subprocess.CalledProcessError:
return False
return True
def set_nameserver(self, nameserver):
'''set the nameserver in resolv.conf'''
self.write_file("/etc/resolv.conf.wintest", '''
# Generated by wintest, the Samba v Windows automated testing system
nameserver %s
# your original resolv.conf appears below:
''' % self.substitute(nameserver))
child = self.pexpect_spawn("cat /etc/resolv.conf", crlf=False)
i = child.expect(['your original resolv.conf appears below:', pexpect.EOF])
if i == 0:
child.expect(pexpect.EOF)
contents = child.before.lstrip().replace('\r', '')
self.write_file('/etc/resolv.conf.wintest', contents, mode='a')
self.write_file('/etc/resolv.conf.wintest-bak', contents)
self.run_cmd("mv -f /etc/resolv.conf.wintest /etc/resolv.conf")
self.resolv_conf_backup = '/etc/resolv.conf.wintest-bak';
def configure_bind(self, kerberos_support=False, include=None):
self.chdir('${PREFIX}')
if self.getvar('NAMED_INTERFACE_IPV6'):
ipv6_listen = 'listen-on-v6 port 53 { ${NAMED_INTERFACE_IPV6}; };'
else:
ipv6_listen = ''
self.setvar('BIND_LISTEN_IPV6', ipv6_listen)
if not kerberos_support:
self.setvar("NAMED_TKEY_OPTION", "")
elif self.getvar('NAMESERVER_BACKEND') != 'SAMBA_INTERNAL':
if self.named_supports_gssapi_keytab():
self.setvar("NAMED_TKEY_OPTION",
'tkey-gssapi-keytab "${PREFIX}/private/dns.keytab";')
else:
self.info("LCREALM=${LCREALM}")
self.setvar("NAMED_TKEY_OPTION",
'''tkey-gssapi-credential "DNS/${LCREALM}";
tkey-domain "${LCREALM}";
''')
self.putenv('KEYTAB_FILE', '${PREFIX}/private/dns.keytab')
self.putenv('KRB5_KTNAME', '${PREFIX}/private/dns.keytab')
else:
self.setvar("NAMED_TKEY_OPTION", "")
if include and self.getvar('NAMESERVER_BACKEND') != 'SAMBA_INTERNAL':
self.setvar("NAMED_INCLUDE", 'include "%s";' % include)
else:
self.setvar("NAMED_INCLUDE", '')
self.run_cmd("mkdir -p ${PREFIX}/etc")
self.write_file("etc/named.conf", '''
options {
listen-on port 53 { ${NAMED_INTERFACE_IP}; };
${BIND_LISTEN_IPV6}
directory "${PREFIX}/var/named";
dump-file "${PREFIX}/var/named/data/cache_dump.db";
pid-file "${PREFIX}/var/named/named.pid";
statistics-file "${PREFIX}/var/named/data/named_stats.txt";
memstatistics-file "${PREFIX}/var/named/data/named_mem_stats.txt";
allow-query { any; };
recursion yes;
${NAMED_TKEY_OPTION}
max-cache-ttl 10;
max-ncache-ttl 10;
forward only;
forwarders {
${DNSSERVER};
};
};
key "rndc-key" {
algorithm hmac-md5;
secret "lA/cTrno03mt5Ju17ybEYw==";
};
controls {
inet ${NAMED_INTERFACE_IP} port 953
allow { any; } keys { "rndc-key"; };
};
${NAMED_INCLUDE}
''')
if self.getvar('NAMESERVER_BACKEND') == 'SAMBA_INTERNAL':
self.write_file('etc/named.conf',
'''
zone "%s" IN {
type forward;
forward only;
forwarders {
%s;
};
};
''' % (self.getvar('LCREALM'), self.getvar('INTERFACE_IP')),
mode='a')
# add forwarding for the windows domains
domains = self.get_domains()
for d in domains:
self.write_file('etc/named.conf',
'''
zone "%s" IN {
type forward;
forward only;
forwarders {
%s;
};
};
''' % (d, domains[d]),
mode='a')
self.write_file("etc/rndc.conf", '''
# Start of rndc.conf
key "rndc-key" {
algorithm hmac-md5;
secret "lA/cTrno03mt5Ju17ybEYw==";
};
options {
default-key "rndc-key";
default-server ${NAMED_INTERFACE_IP};
default-port 953;
};
''')
def stop_bind(self):
'''Stop our private BIND from listening and operating'''
self.rndc_cmd("stop", checkfail=False)
self.port_wait("${NAMED_INTERFACE_IP}", 53, wait_for_fail=True)
self.run_cmd("rm -rf var/named")
def start_bind(self):
'''restart the test environment version of bind'''
self.info("Restarting bind9")
self.chdir('${PREFIX}')
self.set_nameserver(self.getvar('NAMED_INTERFACE_IP'))
self.run_cmd("mkdir -p var/named/data")
self.run_cmd("chown -R ${BIND_USER} var/named")
self.bind_child = self.run_child("${BIND9} -u ${BIND_USER} -n 1 -c ${PREFIX}/etc/named.conf -g")
self.port_wait("${NAMED_INTERFACE_IP}", 53)
self.rndc_cmd("flush")
def restart_bind(self, kerberos_support=False, include=None):
self.configure_bind(kerberos_support=kerberos_support, include=include)
self.stop_bind()
self.start_bind()
def restore_resolv_conf(self):
'''restore the /etc/resolv.conf after testing is complete'''
if getattr(self, 'resolv_conf_backup', False):
self.info("restoring /etc/resolv.conf")
self.run_cmd("mv -f %s /etc/resolv.conf" % self.resolv_conf_backup)
def vm_poweroff(self, vmname, checkfail=True):
'''power off a VM'''
self.setvar('VMNAME', vmname)
self.run_cmd("${VM_POWEROFF}", checkfail=checkfail)
def vm_reset(self, vmname):
'''reset a VM'''
self.setvar('VMNAME', vmname)
self.run_cmd("${VM_RESET}")
def vm_restore(self, vmname, snapshot):
'''restore a VM'''
self.setvar('VMNAME', vmname)
self.setvar('SNAPSHOT', snapshot)
self.run_cmd("${VM_RESTORE}")
def ping_wait(self, hostname):
'''wait for a hostname to come up on the network'''
hostname = self.substitute(hostname)
loops=10
while loops > 0:
try:
self.run_cmd("ping -c 1 -w 10 %s" % hostname)
break
except:
loops = loops - 1
if loops == 0:
raise RuntimeError("Failed to ping %s" % hostname)
self.info("Host %s is up" % hostname)
def port_wait(self, hostname, port, retries=200, delay=3, wait_for_fail=False):
'''wait for a host to come up on the network'''
while retries > 0:
child = self.pexpect_spawn("nc -v -z -w 1 %s %u" % (hostname, port), crlf=False, timeout=1)
child.expect([pexpect.EOF, pexpect.TIMEOUT])
child.close()
i = child.exitstatus
if wait_for_fail:
#wait for timeout or fail
if i == None or i > 0:
return
else:
if i == 0:
return
time.sleep(delay)
retries -= 1
self.info("retrying (retries=%u delay=%u)" % (retries, delay))
raise RuntimeError("gave up waiting for %s:%d" % (hostname, port))
def run_net_time(self, child):
'''run net time on windows'''
child.sendline("net time \\\\${HOSTNAME} /set")
child.expect("Do you want to set the local computer")
child.sendline("Y")
child.expect("The command completed successfully")
def run_date_time(self, child, time_tuple=None):
'''run date and time on windows'''
if time_tuple is None:
time_tuple = time.localtime()
child.sendline("date")
child.expect("Enter the new date:")
i = child.expect(["dd-mm-yy", "mm-dd-yy"])
if i == 0:
child.sendline(time.strftime("%d-%m-%y", time_tuple))
else:
child.sendline(time.strftime("%m-%d-%y", time_tuple))
child.expect("C:")
child.sendline("time")
child.expect("Enter the new time:")
child.sendline(time.strftime("%H:%M:%S", time_tuple))
child.expect("C:")
def get_ipconfig(self, child):
'''get the IP configuration of the child'''
child.sendline("ipconfig /all")
child.expect('Ethernet adapter ')
child.expect("[\w\s]+")
self.setvar("WIN_NIC", child.after)
child.expect(['IPv4 Address', 'IP Address'])
child.expect('\d+.\d+.\d+.\d+')
self.setvar('WIN_IPV4_ADDRESS', child.after)
child.expect('Subnet Mask')
child.expect('\d+.\d+.\d+.\d+')
self.setvar('WIN_SUBNET_MASK', child.after)
child.expect('Default Gateway')
i = child.expect(['\d+.\d+.\d+.\d+', "C:"])
if i == 0:
self.setvar('WIN_DEFAULT_GATEWAY', child.after)
child.expect("C:")
def get_is_dc(self, child):
'''check if a windows machine is a domain controller'''
child.sendline("dcdiag")
i = child.expect(["is not a [Directory Server|DC]",
"is not recognized as an internal or external command",
"Home Server = ",
"passed test Replications"])
if i == 0:
return False
if i == 1 or i == 3:
child.expect("C:")
child.sendline("net config Workstation")
child.expect("Workstation domain")
child.expect('[\S]+')
domain = child.after
i = child.expect(["Workstation Domain DNS Name", "Logon domain"])
'''If we get the Logon domain first, we are not in an AD domain'''
if i == 1:
return False
if domain.upper() == self.getvar("WIN_DOMAIN").upper():
return True
child.expect('[\S]+')
hostname = child.after
if hostname.upper() == self.getvar("WIN_HOSTNAME").upper():
return True
def set_noexpire(self, child, username):
"""Ensure this user's password does not expire"""
child.sendline('wmic useraccount where name="%s" set PasswordExpires=FALSE' % username)
child.expect("update successful")
child.expect("C:")
def run_tlntadmn(self, child):
'''remove the annoying telnet restrictions'''
child.sendline('tlntadmn config maxconn=1024')
child.expect(["The settings were successfully updated", "Access is denied"])
child.expect("C:")
def disable_firewall(self, child):
'''remove the annoying firewall'''
child.sendline('netsh advfirewall set allprofiles state off')
i = child.expect(["Ok", "The following command was not found: advfirewall set allprofiles state off", "The requested operation requires elevation", "Access is denied"])
child.expect("C:")
if i == 1:
child.sendline('netsh firewall set opmode mode = DISABLE profile = ALL')
i = child.expect(["Ok", "The following command was not found", "Access is denied"])
if i != 0:
self.info("Firewall disable failed - ignoring")
child.expect("C:")
def set_dns(self, child):
child.sendline('netsh interface ip set dns "${WIN_NIC}" static ${NAMED_INTERFACE_IP} primary')
i = child.expect(['C:', pexpect.EOF, pexpect.TIMEOUT], timeout=5)
if i > 0:
return True
else:
return False
def set_ip(self, child):
"""fix the IP address to the same value it had when we
connected, but don't use DHCP, and force the DNS server to our
DNS server. This allows DNS updates to run"""
self.get_ipconfig(child)
if self.getvar("WIN_IPV4_ADDRESS") != self.getvar("WIN_IP"):
raise RuntimeError("ipconfig address %s != nmblookup address %s" % (self.getvar("WIN_IPV4_ADDRESS"),
self.getvar("WIN_IP")))
child.sendline('netsh')
child.expect('netsh>')
child.sendline('offline')
child.expect('netsh>')
child.sendline('routing ip add persistentroute dest=0.0.0.0 mask=0.0.0.0 name="${WIN_NIC}" nhop=${WIN_DEFAULT_GATEWAY}')
child.expect('netsh>')
child.sendline('interface ip set address "${WIN_NIC}" static ${WIN_IPV4_ADDRESS} ${WIN_SUBNET_MASK} ${WIN_DEFAULT_GATEWAY} 1 store=persistent')
i = child.expect(['The syntax supplied for this command is not valid. Check help for the correct syntax', 'netsh>', pexpect.EOF, pexpect.TIMEOUT], timeout=5)
if i == 0:
child.sendline('interface ip set address "${WIN_NIC}" static ${WIN_IPV4_ADDRESS} ${WIN_SUBNET_MASK} ${WIN_DEFAULT_GATEWAY} 1')
child.expect('netsh>')
child.sendline('commit')
child.sendline('online')
child.sendline('exit')
child.expect([pexpect.EOF, pexpect.TIMEOUT], timeout=5)
return True
def resolve_ip(self, hostname, retries=60, delay=5):
'''resolve an IP given a hostname, assuming NBT'''
while retries > 0:
child = self.pexpect_spawn("bin/nmblookup %s" % hostname)
i = 0
while i == 0:
i = child.expect(["querying", '\d+.\d+.\d+.\d+', hostname, "Lookup failed"])
if i == 0:
child.expect("\r")
if i == 1:
return child.after
retries -= 1
time.sleep(delay)
self.info("retrying (retries=%u delay=%u)" % (retries, delay))
raise RuntimeError("Failed to resolve IP of %s" % hostname)
def open_telnet(self, hostname, username, password, retries=60, delay=5, set_time=False, set_ip=False,
disable_firewall=True, run_tlntadmn=True, set_noexpire=False):
'''open a telnet connection to a windows server, return the pexpect child'''
set_route = False
set_dns = False
set_telnetclients = True
start_telnet = True
if self.getvar('WIN_IP'):
ip = self.getvar('WIN_IP')
else:
ip = self.resolve_ip(hostname)
self.setvar('WIN_IP', ip)
while retries > 0:
child = self.pexpect_spawn("telnet " + ip + " -l '" + username + "'")
i = child.expect(["Welcome to Microsoft Telnet Service",
"Denying new connections due to the limit on number of connections",
"No more connections are allowed to telnet server",
"Unable to connect to remote host",
"No route to host",
"Connection refused",
pexpect.EOF])
if i != 0:
child.close()
time.sleep(delay)
retries -= 1
self.info("retrying (retries=%u delay=%u)" % (retries, delay))
continue
child.expect("password:")
child.sendline(password)
i = child.expect(["C:",
"TelnetClients",
"Denying new connections due to the limit on number of connections",
"No more connections are allowed to telnet server",
"Unable to connect to remote host",
"No route to host",
"Connection refused",
pexpect.EOF])
if i == 1:
if set_telnetclients:
self.run_cmd('bin/net rpc group add TelnetClients -S $WIN_IP -U$WIN_USER%$WIN_PASS')
self.run_cmd('bin/net rpc group addmem TelnetClients "authenticated users" -S $WIN_IP -U$WIN_USER%$WIN_PASS')
child.close()
retries -= 1
set_telnetclients = False
self.info("retrying (retries=%u delay=%u)" % (retries, delay))
continue
else:
raise RuntimeError("Failed to connect with telnet due to missing TelnetClients membership")
if i == 6:
# This only works if it is installed and enabled, but not started. Not entirely likely, but possible
self.run_cmd('bin/net rpc service start TlntSvr -S $WIN_IP -U$WIN_USER%$WIN_PASS')
child.close()
start_telnet = False
retries -= 1
self.info("retrying (retries=%u delay=%u)" % (retries, delay))
continue
if i != 0:
child.close()
time.sleep(delay)
retries -= 1
self.info("retrying (retries=%u delay=%u)" % (retries, delay))
continue
if set_dns:
set_dns = False
if self.set_dns(child):
continue;
if set_route:
child.sendline('route add 0.0.0.0 mask 0.0.0.0 ${WIN_DEFAULT_GATEWAY}')
child.expect("C:")
set_route = False
if set_time:
self.run_date_time(child, None)
set_time = False
if run_tlntadmn:
self.run_tlntadmn(child)
run_tlntadmn = False
if set_noexpire:
self.set_noexpire(child, username)
set_noexpire = False
if disable_firewall:
self.disable_firewall(child)
disable_firewall = False
if set_ip:
set_ip = False
if self.set_ip(child):
set_route = True
set_dns = True
continue
return child
raise RuntimeError("Failed to connect with telnet")
def kinit(self, username, password):
'''use kinit to setup a credentials cache'''
self.run_cmd("kdestroy")
self.putenv('KRB5CCNAME', "${PREFIX}/ccache.test")
username = self.substitute(username)
s = username.split('@')
if len(s) > 0:
s[1] = s[1].upper()
username = '@'.join(s)
child = self.pexpect_spawn('kinit ' + username)
child.expect("Password")
child.sendline(password)
child.expect(pexpect.EOF)
child.close()
if child.exitstatus != 0:
raise RuntimeError("kinit failed with status %d" % child.exitstatus)
def get_domains(self):
'''return a dictionary of DNS domains and IPs for named.conf'''
ret = {}
for v in self.vars:
if v[-6:] == "_REALM":
base = v[:-6]
if base + '_IP' in self.vars:
ret[self.vars[base + '_REALM']] = self.vars[base + '_IP']
return ret
def wait_reboot(self, retries=3):
'''wait for a VM to reboot'''
# first wait for it to shutdown
self.port_wait("${WIN_IP}", 139, wait_for_fail=True, delay=6)
# now wait for it to come back. If it fails to come back
# then try resetting it
while retries > 0:
try:
self.port_wait("${WIN_IP}", 139)
return
except:
retries -= 1
self.vm_reset("${WIN_VM}")
self.info("retrying reboot (retries=%u)" % retries)
raise RuntimeError(self.substitute("VM ${WIN_VM} failed to reboot"))
def get_vms(self):
'''return a dictionary of all the configured VM names'''
ret = []
for v in self.vars:
if v[-3:] == "_VM":
ret.append(self.vars[v])
return ret
def run_dcpromo_as_first_dc(self, vm, func_level=None):
self.setwinvars(vm)
self.info("Configuring a windows VM ${WIN_VM} at the first DC in the domain using dcpromo")
child = self.open_telnet("${WIN_HOSTNAME}", "administrator", "${WIN_PASS}", set_time=True)
if self.get_is_dc(child):
return
if func_level == '2008r2':
self.setvar("FUNCTION_LEVEL_INT", str(4))
elif func_level == '2003':
self.setvar("FUNCTION_LEVEL_INT", str(1))
else:
self.setvar("FUNCTION_LEVEL_INT", str(0))
child = self.open_telnet("${WIN_HOSTNAME}", "administrator", "${WIN_PASS}", set_ip=True, set_noexpire=True)
"""This server must therefore not yet be a directory server, so we must promote it"""
child.sendline("copy /Y con answers.txt")
child.sendline('''
[DCInstall]
; New forest promotion
ReplicaOrNewDomain=Domain
NewDomain=Forest
NewDomainDNSName=${WIN_REALM}
ForestLevel=${FUNCTION_LEVEL_INT}
DomainNetbiosName=${WIN_DOMAIN}
DomainLevel=${FUNCTION_LEVEL_INT}
InstallDNS=Yes
ConfirmGc=Yes
CreateDNSDelegation=No
DatabasePath="C:\Windows\NTDS"
LogPath="C:\Windows\NTDS"
SYSVOLPath="C:\Windows\SYSVOL"
; Set SafeModeAdminPassword to the correct value prior to using the unattend file
SafeModeAdminPassword=${WIN_PASS}
; Run-time flags (optional)
RebootOnCompletion=No
''')
child.expect("copied.")
child.expect("C:")
child.expect("C:")
child.sendline("dcpromo /answer:answers.txt")
i = child.expect(["You must restart this computer", "failed", "Active Directory Domain Services was not installed", "C:", pexpect.TIMEOUT], timeout=240)
if i == 1 or i == 2:
raise Exception("dcpromo failed")
if i == 4: # timeout
child = self.open_telnet("${WIN_HOSTNAME}", "administrator", "${WIN_PASS}")
child.sendline("shutdown -r -t 0")
self.port_wait("${WIN_IP}", 139, wait_for_fail=True)
self.port_wait("${WIN_IP}", 139)
child = self.open_telnet("${WIN_HOSTNAME}", "administrator", "${WIN_PASS}")
# Check if we became a DC by now
if not self.get_is_dc(child):
raise Exception("dcpromo failed (and wasn't a DC even after rebooting)")
# Give DNS registration a kick
child.sendline("ipconfig /registerdns")
self.retry_cmd("host -t SRV _ldap._tcp.${WIN_REALM} ${WIN_IP}", ['has SRV record'], retries=60, delay=5 )
def start_winvm(self, vm):
'''start a Windows VM'''
self.setwinvars(vm)
self.info("Joining a windows box to the domain")
self.vm_poweroff("${WIN_VM}", checkfail=False)
self.vm_restore("${WIN_VM}", "${WIN_SNAPSHOT}")
def run_winjoin(self, vm, domain, username="administrator", password="${PASSWORD1}"):
'''join a windows box to a domain'''
child = self.open_telnet("${WIN_HOSTNAME}", "${WIN_USER}", "${WIN_PASS}", set_time=True, set_ip=True, set_noexpire=True)
retries = 5
while retries > 0:
child.sendline("ipconfig /flushdns")
child.expect("C:")
child.sendline("netdom join ${WIN_HOSTNAME} /Domain:%s /UserD:%s /PasswordD:%s" % (domain, username, password))
i = child.expect(["The command completed successfully",
"The specified domain either does not exist or could not be contacted."], timeout=120)
if i == 0:
break
time.sleep(10)
retries -= 1
child.expect("C:")
child.sendline("shutdown /r -t 0")
self.wait_reboot()
child = self.open_telnet("${WIN_HOSTNAME}", "${WIN_USER}", "${WIN_PASS}", set_time=True, set_ip=True)
child.sendline("ipconfig /registerdns")
child.expect("Registration of the DNS resource records for all adapters of this computer has been initiated. Any errors will be reported in the Event Viewer")
child.expect("C:")
def test_remote_smbclient(self, vm, username="${WIN_USER}", password="${WIN_PASS}", args=""):
'''test smbclient against remote server'''
self.setwinvars(vm)
self.info('Testing smbclient')
self.chdir('${PREFIX}')
smbclient = self.getvar("smbclient")
self.cmd_contains("%s --version" % (smbclient), ["${SAMBA_VERSION}"])
self.retry_cmd('%s -L ${WIN_HOSTNAME} -U%s%%%s %s' % (smbclient, username, password, args), ["IPC"], retries=60, delay=5)
def test_net_use(self, vm, realm, domain, username, password):
self.setwinvars(vm)
self.info('Testing net use against Samba3 member')
child = self.open_telnet("${WIN_HOSTNAME}", "%s\\%s" % (domain, username), password)
child.sendline("net use t: \\\\${HOSTNAME}.%s\\test" % realm)
child.expect("The command completed successfully")
def setup(self, testname, subdir):
'''setup for main tests, parsing command line'''
self.parser.add_option("--conf", type='string', default='', help='config file')
self.parser.add_option("--skip", type='string', default='', help='list of steps to skip (comma separated)')
self.parser.add_option("--vms", type='string', default=None, help='list of VMs to use (comma separated)')
self.parser.add_option("--list", action='store_true', default=False, help='list the available steps')
self.parser.add_option("--rebase", action='store_true', default=False, help='do a git pull --rebase')
self.parser.add_option("--clean", action='store_true', default=False, help='clean the tree')
self.parser.add_option("--prefix", type='string', default=None, help='override install prefix')
self.parser.add_option("--sourcetree", type='string', default=None, help='override sourcetree location')
self.parser.add_option("--nocleanup", action='store_true', default=False, help='disable cleanup code')
self.parser.add_option("--use-ntvfs", action='store_true', default=False, help='use NTVFS for the fileserver')
self.parser.add_option("--dns-backend", type="choice",
choices=["SAMBA_INTERNAL", "BIND9_FLATFILE", "BIND9_DLZ", "NONE"],
help="The DNS server backend. SAMBA_INTERNAL is the builtin name server (default), " \
"BIND9_FLATFILE uses bind9 text database to store zone information, " \
"BIND9_DLZ uses samba4 AD to store zone information, " \
"NONE skips the DNS setup entirely (not recommended)",
default="SAMBA_INTERNAL")
self.opts, self.args = self.parser.parse_args()
if not self.opts.conf:
print("Please specify a config file with --conf")
sys.exit(1)
# we don't need fsync safety in these tests
self.putenv('TDB_NO_FSYNC', '1')
self.load_config(self.opts.conf)
nameserver = self.get_nameserver()
if nameserver == self.getvar('NAMED_INTERFACE_IP'):
raise RuntimeError("old /etc/resolv.conf must not contain %s as a nameserver, this will create loops with the generated dns configuration" % nameserver)
self.setvar('DNSSERVER', nameserver)
self.set_skip(self.opts.skip)
self.set_vms(self.opts.vms)
if self.opts.list:
self.list_steps_mode()
if self.opts.prefix:
self.setvar('PREFIX', self.opts.prefix)
if self.opts.sourcetree:
self.setvar('SOURCETREE', self.opts.sourcetree)
if self.opts.rebase:
self.info('rebasing')
self.chdir('${SOURCETREE}')
self.run_cmd('git pull --rebase')
if self.opts.clean:
self.info('cleaning')
self.chdir('${SOURCETREE}/' + subdir)
self.run_cmd('make clean')
if self.opts.use_ntvfs:
self.setvar('USE_NTVFS', "--use-ntvfs")
else:
self.setvar('USE_NTVFS', "")
self.setvar('NAMESERVER_BACKEND', self.opts.dns_backend)
self.setvar('DNS_FORWARDER', "--option=dns forwarder=%s" % nameserver)
|
ncloudioj/splice
|
refs/heads/master
|
splice/commands.py
|
6
|
import os
import multiprocessing
import logging
import sys
import json
import calendar
from datetime import datetime
from operator import itemgetter
from flask.ext.script import Command, Option, Manager
from flask.ext.script.commands import InvalidCommand
from gunicorn.app.base import Application as GunicornApplication
from gunicorn.config import Config as GunicornConfig
command_logger_set = False
def setup_command_logger(loglevel=None):
global command_logger_set
if not command_logger_set:
loglevel = loglevel or logging.INFO
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(loglevel)
try:
from colorlog import ColoredFormatter
fmt = ColoredFormatter("%(log_color)s%(message)s",
log_colors={
"DEBUG": "cyan",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "bold_red"})
except ImportError:
# fall back to non-colored output
fmt = logging.Formatter("%(message)s")
handler.setFormatter(fmt)
logger = logging.getLogger("command")
logger.addHandler(handler)
logger.setLevel(loglevel)
command_logger_set = True
else:
logger = logging.getLogger("command")
return logger
class GunicornServerCommand(Command):
"""
Run the splice Server using gunicorn
"""
def __init__(self, host='127.0.0.1', port=5000, workers=1,
access_logfile='-', max_requests=0, debug=True):
self.options = {
"host": host,
"port": port,
"workers": workers,
"access_logfile": access_logfile,
"max_requests": max_requests,
"debug": debug,
}
def get_options(self):
options = (
Option('-H', '--host',
dest='host',
type=str,
default=self.options['host'],
help="hostname to bind server to"),
Option('-p', '--port',
dest='port',
type=int,
default=self.options['port'],
help="port to bind server to"),
Option('-w', '--workers',
dest='workers',
type=int,
default=self.options['workers'],
help="set the number of workers"),
Option('--access-logfile',
dest='access_logfile',
type=str,
default=self.options['access_logfile'],
help="set the access log output location"),
Option('--max-requests',
dest='max_requests',
type=int,
default=self.options['max_requests'],
help="set the maximum number of requests " +
"to serve before reloading"),
Option('--no-debug',
dest='debug',
action='store_false',
default=self.options['debug'],
help="turn off debug mode"),
)
return options
def run(self, **kwargs):
self.options.update(kwargs)
if not kwargs.get('debug'):
self.options['workers'] = multiprocessing.cpu_count() * 2 + 1
options = self.options
class GunicornServer(GunicornApplication):
def init(self, **kwargs):
config = {
'bind': '{0}:{1}'.format(
options['host'],
options['port']
),
'workers': options['workers'],
'worker_class': 'gevent',
'accesslog': options['access_logfile'],
'max_requests': options['max_requests'],
}
return config
def load(self):
# Step needed to get around flask's import time side-effects
from splice.environment import Environment
env = Environment.instance()
return env.application
def load_config(self):
# Overriding to prevent Gunicorn from reading
# the command-line arguments
self.cfg = GunicornConfig(self.usage, prog=self.prog)
cfg = self.init()
if cfg and cfg is not None:
for k, v in cfg.items():
self.cfg.set(k.lower(), v)
GunicornServer().run()
DataCommand = Manager(usage="database import/export utility")
ListCommand = Manager(usage="list http endpoints enabled")
@ListCommand.command
def urls():
"""
Return available endpoints
"""
logger = setup_command_logger()
endpoints = []
from flask import current_app
for rule in current_app.url_map.iter_rules():
try:
endpoints.append((
rule.rule,
sorted(list(rule.methods)),
))
except Exception, e:
logger.error(e)
endpoints = sorted(endpoints, key=itemgetter(0))
for url, methods in endpoints:
logger.info("{0} {1}".format(
url,
json.dumps(methods),
))
@DataCommand.option("-v", "--verbose", action="store_true", dest="verbose", help="turns on verbose mode", default=False, required=False)
@DataCommand.option("-p", "--preserve-format", action="store_true", dest="old_format", help="To keep data in the non-country aware format", required=False)
@DataCommand.option("-c", "--console", action="store_true", dest="console_out", help="Enable console output", required=False)
@DataCommand.option("-o", "--out_path", type=str, help="To dump to a file, provide a path/filename", required=False)
@DataCommand.option("in_file", type=str, help="Path to directoryLinks.json file")
@DataCommand.option("country_code", type=str, help="ISO3166 country code for the file")
@DataCommand.option("channel_id", type=int, help="Channel ID to ingest for")
def load_links(in_file, country_code, channel_id, out_path, console_out, verbose, old_format, *args, **kwargs):
"""
Load a set of links in the data warehouse
"""
if verbose:
logger = setup_command_logger(logging.DEBUG)
else:
logger = setup_command_logger(logging.INFO)
rawdata = None
with open(in_file, 'r') as f:
rawdata = json.load(f)
from splice.ingest import ingest_links, IngestError
try:
locale = rawdata.keys()[0]
country_locale_str = "/".join([country_code, locale])
new_data = ingest_links({country_locale_str: rawdata[locale]}, channel_id)
if old_format:
new_data = new_data[new_data.keys()[0]]
if console_out:
print json.dumps(new_data, sort_keys=True, indent=2)
if out_path:
directory, _ = os.path.split(out_path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(out_path, "w") as f:
json.dump(new_data, f, sort_keys=True, indent=2)
logger.info("wrote {0}".format(out_path))
except IngestError, e:
raise InvalidCommand(e.message)
except:
import traceback
traceback.print_exc()
@DataCommand.option("-v", "--verbose", action="store_true", dest="verbose", help="turns on verbose mode", default=False, required=False)
@DataCommand.option("-d", "--deploy", action="store_true", dest="deploy_flag", help="Deploy to S3", required=False)
@DataCommand.option("-c", "--console", action="store_true", dest="console_out", help="Enable console output", required=False)
@DataCommand.option("-o", "--out_path", type=str, help="To dump to a file, provide a path/filename", required=False)
@DataCommand.option("in_file", type=str, help="Path to tiles.json file")
@DataCommand.option("channel_id", type=int, help="Channel ID to ingest for")
def ingest_tiles(in_file, channel_id, out_path, console_out, deploy_flag, verbose, *args, **kwargs):
"""
Load a set of links for all country/locale combinations into data warehouse and optionally deploy
"""
if verbose:
logger = setup_command_logger(logging.DEBUG)
else:
logger = setup_command_logger(logging.INFO)
rawdata = None
with open(in_file, 'r') as f:
rawdata = json.load(f)
from splice.ingest import ingest_links, distribute, IngestError
try:
new_data = ingest_links(rawdata, channel_id)
if console_out:
print json.dumps(new_data, sort_keys=True, indent=2)
if out_path:
directory, _ = os.path.split(out_path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(out_path, "w") as f:
json.dump(new_data, f, sort_keys=True, indent=2)
logger.info("wrote {0}".format(out_path))
if deploy_flag:
logger.info("Distributing AND Deploying data")
else:
logger.info("Distributing data (NO deploy)")
distribute(new_data, channel_id, deploy_flag)
except IngestError, e:
raise InvalidCommand(e.message)
except:
import traceback
traceback.print_exc()
@DataCommand.option("-v", "--verbose", action="store_true", dest="verbose", help="turns on verbose mode", default=False, required=False)
@DataCommand.option("-q", "--quiet", action="store_true", dest="quiet", help="turns on quiet mode", default=False, required=False)
@DataCommand.option("-l", "--leniency-minutes", type=int, dest="leniency", help="Leniency period in minutes for scheduling", default=15)
@DataCommand.option("-d", "--deploy", action="store_true", dest="deploy_flag", help="Deploy to S3", required=False)
@DataCommand.option("-c", "--console", action="store_true", dest="console_out", help="Enable console output", required=False)
def deploy_scheduled(console_out, deploy_flag, leniency, verbose, quiet, *args, **kwargs):
"""
Find scheduled distributions and deploy
"""
if verbose:
logger = setup_command_logger(logging.DEBUG)
elif quiet:
logger = setup_command_logger(logging.ERROR)
else:
logger = setup_command_logger(logging.INFO)
from splice.queries import get_scheduled_distributions, unschedule_distribution
import requests
dt = datetime.utcnow()
distributions = get_scheduled_distributions(leniency, dt)
logger.info("{0} - found {1} distributions".format(dt, len(distributions)))
dist_data = []
for dist in distributions:
logger.info("fetching {0}".format(dist.url))
r = requests.get(dist.url)
if r.status_code == 200:
dist_data.append((r.json(), dist.channel_id, dist.id))
else:
logger.error("FETCH_ERROR status_code:{0} url:{1}".format(r.status_code, dist.url))
from splice.ingest import ingest_links, distribute, IngestError
if deploy_flag:
for rawdata, channel_id, dist_id in dist_data:
try:
new_data = ingest_links(rawdata, channel_id)
if console_out:
print json.dumps(new_data, sort_keys=True, indent=2)
distribute(new_data, channel_id, deploy_flag)
unschedule_distribution(dist_id)
except IngestError, e:
raise InvalidCommand(e.message)
except:
import traceback
traceback.print_exc()
else:
logger.info("DRY_RUN_MODE. To deploy, use the -d option")
RedshiftCommand = Manager(usage="Redshift utility commands")
@RedshiftCommand.option("out_path", type=str, help="Path to output new migration file")
def new_migration(out_path, *args, **kwargs):
"""
Create an empty migration file
"""
logger = setup_command_logger(logging.INFO)
utc_seconds = calendar.timegm(datetime.utcnow().timetuple())
file_path = os.path.join(out_path, "{0}.sql".format(utc_seconds))
open(file_path, "a").close()
logger.info("wrote {0}".format(file_path))
|
tareq89/foodbank
|
refs/heads/master
|
foodbank/Insert/functions/distanceCalculate.py
|
2
|
from math import sin, cos, sqrt, atan2, radians
import decimal
import geopy
import geopy.distance
def DistanceClaculate(lat1,lon1,lat2,lon2):
pt1 = geopy.Point(lat1, lon1)
pt2 = geopy.Point(lat2, lon2)
# distance.distance() is the VincentyDistance by default.
dist = geopy.distance.distance(pt1, pt2).km
return round(dist,2)
def DistanceClaculate2():
R = 6373.0
# lat1 = radians(23.812678)
# lon1 = radians(90.403876)
# lat2 = radians(23.778908)
# lon2 = radians(90.398211)
lat1 = radians(23.810332)
lon1 = radians(90.4125181)
lat2 = radians(23.7417656252)
lon2 = radians(90.4087260576)
dlon = lon2 - lon1
dlat = lat2 - lat1
a = (sin(dlat/2))**2 + cos(lat1) * cos(lat2) * (sin(dlon/2))**2
c = 2 * atan2(sqrt(a), sqrt(1-a))
distance = R * c
print "Result", distance
# print "Should be", 278.546
#print "Result", distance46
return distance
# DistanceClaculate2()
# Result 437.002911868
|
shrkey/ardupilot
|
refs/heads/dark
|
Tools/mavproxy_modules/lib/geodesic_grid.py
|
108
|
# Copyright (C) 2016 Intel Corporation. All rights reserved.
#
# This file is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
'''
This module takes libraries/AP_Math/AP_GeodesicGrid.h reference for defining
the geodesic sections.
'''
import math
from scipy.constants import golden as g
_first_half = (
((-g, 1, 0), (-1, 0,-g), (-g,-1, 0)),
((-1, 0,-g), (-g,-1, 0), ( 0,-g,-1)),
((-g,-1, 0), ( 0,-g,-1), ( 0,-g, 1)),
((-1, 0,-g), ( 0,-g,-1), ( 1, 0,-g)),
(( 0,-g,-1), ( 0,-g, 1), ( g,-1, 0)),
(( 0,-g,-1), ( 1, 0,-g), ( g,-1, 0)),
(( g,-1, 0), ( 1, 0,-g), ( g, 1, 0)),
(( 1, 0,-g), ( g, 1, 0), ( 0, g,-1)),
(( 1, 0,-g), ( 0, g,-1), (-1, 0,-g)),
(( 0, g,-1), (-g, 1, 0), (-1, 0,-g)),
)
_second_half = tuple(
((-xa, -ya, -za), (-xb, -yb, -zb), (-xc, -yc, -zc))
for (xa, ya, za), (xb, yb, zb), (xc, yc, zc) in _first_half
)
triangles = _first_half + _second_half
def _midpoint_projection(a, b):
xa, ya, za = a
xb, yb, zb = b
s = _midpoint_projection.scale
return s * (xa + xb), s * (ya + yb), s * (za + zb)
radius = math.sqrt(1 + g**2)
# radius / (length of two vertices of an icosahedron triangle)
_midpoint_projection.scale = radius / (2 * g)
sections_triangles = ()
for a, b, c in triangles:
ma = _midpoint_projection(a, b)
mb = _midpoint_projection(b, c)
mc = _midpoint_projection(c, a)
sections_triangles += (
(ma, mb, mc),
( a, ma, mc),
(ma, b, mb),
(mc, mb, c),
)
|
veekun/pokedex
|
refs/heads/master
|
scripts/disambiguate-location-identifiers.py
|
5
|
# Encoding: UTF-8
"""Automatically disambiguate location identifiers
This is an unmaintained one-shot script, only included in the repo for reference.
Disambiguates identifiers that aren't unique, Routes and Sea Routes, and
generic names like 'villa' or 'game corner' that could appear in future
generations again.
Does this by prepending the region name, and if that isn't enough, appends
numbers.
"""
import sys
import re
from collections import defaultdict
from pokedex.db import connect, tables
ambiguous_re = re.compile(r'^(sea-)?route-\d+$')
ambiguous_set = set('foreign-building game-corner global-terminal lighthouse '
'restaurant flower-shop cycle-shop cafe shopping-mall villa'.split())
def main(*argv):
session = connect()
location_dict = defaultdict(list)
for location in session.query(tables.Location).order_by(tables.Location.id):
location_dict[location.identifier].append(location)
changes = False
for identifier, locations in sorted(location_dict.items()):
disambiguate = any((
len(locations) > 1,
ambiguous_re.match(identifier),
identifier in ambiguous_set,
))
print len(locations), ' *'[disambiguate], identifier,
if disambiguate:
changes = True
print u'→'.encode('utf-8'),
by_region = defaultdict(list)
for location in locations:
if location.region:
by_region[location.region.identifier].append(location)
else:
by_region[None].append(location)
for region_identifier, region_locations in by_region.items():
if region_identifier:
new_identifier = '%s-%s' % (region_identifier, identifier)
else:
# No change
new_identifier = identifier
if len(region_locations) == 1:
location = region_locations[0]
# The region was enough
print new_identifier,
location.identifier = new_identifier
else:
# Need to number the locations :(
for i, location in enumerate(region_locations, start=1):
numbered_identifier = '%s-%s' % (new_identifier, i)
print numbered_identifier,
location.identifier = numbered_identifier
print
if changes:
if argv and argv[0] == '--commit':
session.commit()
print 'Committed'
else:
print 'Run with --commit to commit changes'
else:
print 'No changes needed'
if __name__ == '__main__':
main(*sys.argv[1:])
|
wujuguang/sentry
|
refs/heads/master
|
src/sentry/api/exceptions.py
|
34
|
from __future__ import absolute_import
from rest_framework.exceptions import APIException
class ResourceDoesNotExist(APIException):
status_code = 404
|
isht3/zulip
|
refs/heads/master
|
zerver/management/commands/turn_off_digests.py
|
10
|
from __future__ import absolute_import
from __future__ import print_function
from typing import Any
from optparse import make_option
from django.core.management.base import BaseCommand, CommandParser
from zerver.lib.actions import do_change_enable_digest_emails
from zerver.models import Realm, UserProfile, get_realm, get_user_profile_by_email
class Command(BaseCommand):
help = """Turn off digests for a subdomain/string_id or specified set of email addresses."""
def add_arguments(self, parser):
# type: (CommandParser) -> None
parser.add_argument('-r', '--realm',
dest='string_id',
type=str,
help='Turn off digests for all users in this domain.')
parser.add_argument('-u', '--users',
dest='users',
type=str,
help='Turn off digests for this comma-separated '
'list of email addresses.')
def handle(self, **options):
# type: (**str) -> None
if options["string_id"] is None and options["users"] is None:
self.print_help("./manage.py", "turn_off_digests")
exit(1)
if options["string_id"]:
realm = get_realm(options["string_id"])
user_profiles = UserProfile.objects.filter(realm=realm)
else:
emails = set([email.strip() for email in options["users"].split(",")])
user_profiles = []
for email in emails:
user_profiles.append(get_user_profile_by_email(email))
print("Turned off digest emails for:")
for user_profile in user_profiles:
already_disabled_prefix = ""
if user_profile.enable_digest_emails:
do_change_enable_digest_emails(user_profile, False)
else:
already_disabled_prefix = "(already off) "
print("%s%s <%s>" % (already_disabled_prefix, user_profile.full_name,
user_profile.email))
|
hojel/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/noco.py
|
5
|
# encoding: utf-8
from __future__ import unicode_literals
import re
import time
import hashlib
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_urllib_parse,
)
from ..utils import (
clean_html,
ExtractorError,
int_or_none,
float_or_none,
parse_iso8601,
sanitized_Request,
)
class NocoIE(InfoExtractor):
_VALID_URL = r'http://(?:(?:www\.)?noco\.tv/emission/|player\.noco\.tv/\?idvideo=)(?P<id>\d+)'
_LOGIN_URL = 'http://noco.tv/do.php'
_API_URL_TEMPLATE = 'https://api.noco.tv/1.1/%s?ts=%s&tk=%s'
_SUB_LANG_TEMPLATE = '&sub_lang=%s'
_NETRC_MACHINE = 'noco'
_TESTS = [
{
'url': 'http://noco.tv/emission/11538/nolife/ami-ami-idol-hello-france/',
'md5': '0a993f0058ddbcd902630b2047ef710e',
'info_dict': {
'id': '11538',
'ext': 'mp4',
'title': 'Ami Ami Idol - Hello! France',
'description': 'md5:4eaab46ab68fa4197a317a88a53d3b86',
'upload_date': '20140412',
'uploader': 'Nolife',
'uploader_id': 'NOL',
'duration': 2851.2,
},
'skip': 'Requires noco account',
},
{
'url': 'http://noco.tv/emission/12610/lbl42/the-guild/s01e01-wake-up-call',
'md5': 'c190f1f48e313c55838f1f412225934d',
'info_dict': {
'id': '12610',
'ext': 'mp4',
'title': 'The Guild #1 - Wake-Up Call',
'timestamp': 1403863200,
'upload_date': '20140627',
'uploader': 'LBL42',
'uploader_id': 'LBL',
'duration': 233.023,
},
'skip': 'Requires noco account',
}
]
def _real_initialize(self):
self._login()
def _login(self):
(username, password) = self._get_login_info()
if username is None:
return
login_form = {
'a': 'login',
'cookie': '1',
'username': username,
'password': password,
}
request = sanitized_Request(self._LOGIN_URL, compat_urllib_parse.urlencode(login_form))
request.add_header('Content-Type', 'application/x-www-form-urlencoded; charset=UTF-8')
login = self._download_json(request, None, 'Logging in as %s' % username)
if 'erreur' in login:
raise ExtractorError('Unable to login: %s' % clean_html(login['erreur']), expected=True)
def _call_api(self, path, video_id, note, sub_lang=None):
ts = compat_str(int(time.time() * 1000))
tk = hashlib.md5((hashlib.md5(ts.encode('ascii')).hexdigest() + '#8S?uCraTedap6a').encode('ascii')).hexdigest()
url = self._API_URL_TEMPLATE % (path, ts, tk)
if sub_lang:
url += self._SUB_LANG_TEMPLATE % sub_lang
resp = self._download_json(url, video_id, note)
if isinstance(resp, dict) and resp.get('error'):
self._raise_error(resp['error'], resp['description'])
return resp
def _raise_error(self, error, description):
raise ExtractorError(
'%s returned error: %s - %s' % (self.IE_NAME, error, description),
expected=True)
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
medias = self._call_api(
'shows/%s/medias' % video_id,
video_id, 'Downloading video JSON')
show = self._call_api(
'shows/by_id/%s' % video_id,
video_id, 'Downloading show JSON')[0]
options = self._call_api(
'users/init', video_id,
'Downloading user options JSON')['options']
audio_lang_pref = options.get('audio_language') or options.get('language', 'fr')
if audio_lang_pref == 'original':
audio_lang_pref = show['original_lang']
if len(medias) == 1:
audio_lang_pref = list(medias.keys())[0]
elif audio_lang_pref not in medias:
audio_lang_pref = 'fr'
qualities = self._call_api(
'qualities',
video_id, 'Downloading qualities JSON')
formats = []
for audio_lang, audio_lang_dict in medias.items():
preference = 1 if audio_lang == audio_lang_pref else 0
for sub_lang, lang_dict in audio_lang_dict['video_list'].items():
for format_id, fmt in lang_dict['quality_list'].items():
format_id_extended = 'audio-%s_sub-%s_%s' % (audio_lang, sub_lang, format_id)
video = self._call_api(
'shows/%s/video/%s/%s' % (video_id, format_id.lower(), audio_lang),
video_id, 'Downloading %s video JSON' % format_id_extended,
sub_lang if sub_lang != 'none' else None)
file_url = video['file']
if not file_url:
continue
if file_url in ['forbidden', 'not found']:
popmessage = video['popmessage']
self._raise_error(popmessage['title'], popmessage['message'])
formats.append({
'url': file_url,
'format_id': format_id_extended,
'width': int_or_none(fmt.get('res_width')),
'height': int_or_none(fmt.get('res_lines')),
'abr': int_or_none(fmt.get('audiobitrate')),
'vbr': int_or_none(fmt.get('videobitrate')),
'filesize': int_or_none(fmt.get('filesize')),
'format_note': qualities[format_id].get('quality_name'),
'quality': qualities[format_id].get('priority'),
'preference': preference,
})
self._sort_formats(formats)
timestamp = parse_iso8601(show.get('online_date_start_utc'), ' ')
if timestamp is not None and timestamp < 0:
timestamp = None
uploader = show.get('partner_name')
uploader_id = show.get('partner_key')
duration = float_or_none(show.get('duration_ms'), 1000)
thumbnails = []
for thumbnail_key, thumbnail_url in show.items():
m = re.search(r'^screenshot_(?P<width>\d+)x(?P<height>\d+)$', thumbnail_key)
if not m:
continue
thumbnails.append({
'url': thumbnail_url,
'width': int(m.group('width')),
'height': int(m.group('height')),
})
episode = show.get('show_TT') or show.get('show_OT')
family = show.get('family_TT') or show.get('family_OT')
episode_number = show.get('episode_number')
title = ''
if family:
title += family
if episode_number:
title += ' #' + compat_str(episode_number)
if episode:
title += ' - ' + compat_str(episode)
description = show.get('show_resume') or show.get('family_resume')
return {
'id': video_id,
'title': title,
'description': description,
'thumbnails': thumbnails,
'timestamp': timestamp,
'uploader': uploader,
'uploader_id': uploader_id,
'duration': duration,
'formats': formats,
}
|
DrXyzzy/cocalc
|
refs/heads/master
|
src/scripts/test_install.py
|
4
|
#!/usr/bin/env python
###############################################################################
#
# CoCalc: Collaborative Calculation in the Cloud
#
# Copyright (C) 2016, Sagemath Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
"""
This script runs tests to verify that a given SMC machine has all claimed software installed, and that it maybe
even works a little bit.
"""
import math, os, sys, time
from subprocess import Popen, PIPE
def test_atlas():
for f in ['libatlas.so', 'libcblas.so', 'libf77blas.so']:
if not os.path.exists('/usr/lib/%s' % f):
return "/usr/lib/%s doesn't exists" % f
def test_sage_packages():
imports = """
h5py
clawpack
tornado
virtualenv
pandas
statsmodels
numexpr
tables
sklearn # this is for scikit-learn
theano
scikits-image
shapely # for the Shapely package
simpy
xlrd xlwt
pyproj
bitarray
h5py
netCDF4
patsy
lxml
munkres
oct2py
psutil
plotly
mahotas
snappy
scimath
rpy2
neuron
mpl_toolkits.basemap
Bio
brian
Gnuplot
guppy
nose
nzmath
pybtex
CryptoPlus
pyx
zmq
"""
imports = sum([x.split('#')[0].split() for x in imports.splitlines()], [])
p = Popen(["sage"], shell=True, stdin=PIPE, stdout=PIPE, close_fds=True)
(child_stdout, child_stdin) = (p.stdout, p.stdin)
child_stdin.write('\n'.join('import %s' % m for m in imports))
child_stdin.close()
bad = [
out.split()[-1] for out in child_stdout.readlines()
if 'No module' in out
]
return ','.join(bad)
def main():
g = globals()
for k, t in sorted(g.items()):
if k.startswith("test_"):
print k, "...",
sys.stdout.flush()
t0 = time.time()
a = t()
sys.stdout.write(" (%s seconds)" % (int(time.time() - t0)))
if a:
print "FAIL!: %s" % a
else:
print
if __name__ == "__main__":
main()
|
aviralchandra/Sandhi
|
refs/heads/master
|
build/PMC/python/PMC/PMCFloats.py
|
1
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.4
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_PMCFloats', [dirname(__file__)])
except ImportError:
import _PMCFloats
return _PMCFloats
if fp is not None:
try:
_mod = imp.load_module('_PMCFloats', fp, pathname, description)
finally:
fp.close()
return _mod
_PMCFloats = swig_import_helper()
del swig_import_helper
else:
import _PMCFloats
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
from PMC import *
def pmc_is_float64(*args):
return _PMCFloats.pmc_is_float64(*args)
pmc_is_float64 = _PMCFloats.pmc_is_float64
def pmc_to_float64(*args):
return _PMCFloats.pmc_to_float64(*args)
pmc_to_float64 = _PMCFloats.pmc_to_float64
def float64_to_pmc(*args):
return _PMCFloats.float64_to_pmc(*args)
float64_to_pmc = _PMCFloats.float64_to_pmc
def pmc_is_float32(*args):
return _PMCFloats.pmc_is_float32(*args)
pmc_is_float32 = _PMCFloats.pmc_is_float32
def pmc_to_float32(*args):
return _PMCFloats.pmc_to_float32(*args)
pmc_to_float32 = _PMCFloats.pmc_to_float32
def float32_to_pmc(*args):
return _PMCFloats.float32_to_pmc(*args)
float32_to_pmc = _PMCFloats.float32_to_pmc
########################################################################
## python float is a float64
########################################################################
RegisterPy2PMC(
is_py = lambda x: type(x) is float,
py2pmc = float64_to_pmc,
)
RegisterPMC2Py(
is_pmc = pmc_is_float64,
pmc2py = lambda x: float(pmc_to_float64(x)),
)
########################################################################
## use other fixed types for float32
########################################################################
try:
import numpy
RegisterPy2PMC(
is_py = lambda x: type(x) is numpy.float32,
py2pmc = lambda x: float32_to_pmc(float(x)),
)
RegisterPMC2Py(
is_pmc = pmc_is_float32,
pmc2py = lambda x: numpy.float32(pmc_to_float32(x)),
)
RegisterPy2PMC(
is_py = lambda x: type(x) is numpy.float64,
py2pmc = lambda x: float64_to_pmc(float(x)),
)
RegisterPMC2Py(
is_pmc = pmc_is_float64,
pmc2py = lambda x: numpy.float64(pmc_to_float64(x)),
)
except ImportError: pass
import ctypes
RegisterPy2PMC(
is_py = lambda x: type(x) is ctypes.c_float,
py2pmc = lambda x: float32_to_pmc(x.value),
)
RegisterPMC2Py(
is_pmc = pmc_is_float32,
pmc2py = lambda x: ctypes.c_float(pmc_to_float32(x)),
)
RegisterPy2PMC(
is_py = lambda x: type(x) is ctypes.c_double,
py2pmc = lambda x: float64_to_pmc(x.value),
)
RegisterPMC2Py(
is_pmc = pmc_is_float64,
pmc2py = lambda x: ctypes.c_double(pmc_to_float64(x)),
)
# This file is compatible with both classic and new-style classes.
|
Shine-/xbmc
|
refs/heads/Krypton_alwaysontop
|
lib/libUPnP/Platinum/Build/Tools/Scripts/MakeAllVs.py
|
262
|
#! /usr/bin/env python
import os
import sys
import getopt
import subprocess
configs = ['Debug', 'Release']
solutions = ['../../../Build/Targets/x86-microsoft-win32-vs2008/Platinum.sln']
try:
opts, args = getopt.getopt(sys.argv[1:], "b:rc")
except getopt.GetoptError, (msg, opt):
print 'No build_config, defaulting to build all'
for opt, arg in opts:
if opt == '-b':
config = arg
def CallVsMake(sln, cfg):
cmd = 'python VsMake.py -s %s -b %s' % (sln, cfg)
print cmd
retVal = subprocess.call(cmd.split())
if retVal != 0:
sys.exit(retVal)
for sln in solutions:
if 'config' not in locals() and 'config' not in globals():
print '************ Building all configurations **************'
for cfg in configs:
CallVsMake(sln, cfg)
else:
print '************ Building configuration=' + config + ' ****************'
CallVsMake(sln, config)
|
pkats15/hdt_analyzer
|
refs/heads/master
|
django_test/django_venv/Lib/site-packages/wheel/pep425tags.py
|
220
|
"""Generate and work with PEP 425 Compatibility Tags."""
import sys
try:
import sysconfig
except ImportError: # pragma nocover
# Python < 2.7
import distutils.sysconfig as sysconfig
import distutils.util
def get_abbr_impl():
"""Return abbreviated implementation name."""
if hasattr(sys, 'pypy_version_info'):
pyimpl = 'pp'
elif sys.platform.startswith('java'):
pyimpl = 'jy'
elif sys.platform == 'cli':
pyimpl = 'ip'
else:
pyimpl = 'cp'
return pyimpl
def get_impl_ver():
"""Return implementation version."""
impl_ver = sysconfig.get_config_var("py_version_nodot")
if not impl_ver:
impl_ver = ''.join(map(str, sys.version_info[:2]))
return impl_ver
def get_platform():
"""Return our platform name 'win32', 'linux_x86_64'"""
# XXX remove distutils dependency
return distutils.util.get_platform().replace('.', '_').replace('-', '_')
def get_supported(versions=None):
"""Return a list of supported tags for each version specified in
`versions`.
:param versions: a list of string versions, of the form ["33", "32"],
or None. The first version will be assumed to support our ABI.
"""
supported = []
# Versions must be given with respect to the preference
if versions is None:
versions = []
major = sys.version_info[0]
# Support all previous minor Python versions.
for minor in range(sys.version_info[1], -1, -1):
versions.append(''.join(map(str, (major, minor))))
impl = get_abbr_impl()
abis = []
soabi = sysconfig.get_config_var('SOABI')
if soabi and soabi.startswith('cpython-'):
abis[0:0] = ['cp' + soabi.split('-', 1)[-1]]
abi3s = set()
import imp
for suffix in imp.get_suffixes():
if suffix[0].startswith('.abi'):
abi3s.add(suffix[0].split('.', 2)[1])
abis.extend(sorted(list(abi3s)))
abis.append('none')
arch = get_platform()
# Current version, current API (built specifically for our Python):
for abi in abis:
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
# No abi / arch, but requires our implementation:
for i, version in enumerate(versions):
supported.append(('%s%s' % (impl, version), 'none', 'any'))
if i == 0:
# Tagged specifically as being cross-version compatible
# (with just the major version specified)
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
# No abi / arch, generic Python
for i, version in enumerate(versions):
supported.append(('py%s' % (version,), 'none', 'any'))
if i == 0:
supported.append(('py%s' % (version[0]), 'none', 'any'))
return supported
|
GustavoHennig/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/amazon/ec2_lc.py
|
6
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'curated'}
DOCUMENTATION = """
---
module: ec2_lc
short_description: Create or delete AWS Autoscaling Launch Configurations
description:
- Can create or delete AWS Autoscaling Configurations
- Works with the ec2_asg module to manage Autoscaling Groups
notes:
- "Amazon ASG Autoscaling Launch Configurations are immutable once created, so modifying the configuration
after it is changed will not modify the launch configuration on AWS. You must create a new config and assign
it to the ASG instead."
version_added: "1.6"
author: "Gareth Rushgrove (@garethr)"
options:
state:
description:
- register or deregister the instance
required: true
choices: ['present', 'absent']
name:
description:
- Unique name for configuration
required: true
instance_type:
description:
- instance type to use for the instance
required: true
default: null
aliases: []
image_id:
description:
- The AMI unique identifier to be used for the group
required: false
key_name:
description:
- The SSH key name to be used for access to managed instances
required: false
security_groups:
description:
- A list of security groups to apply to the instances. For VPC instances, specify security group IDs. For EC2-Classic, specify either security group names or IDs.
required: false
volumes:
description:
- a list of volume dicts, each containing device name and optionally ephemeral id or snapshot id. Size and type (and number of iops for io device type) must be specified for a new volume or a root volume, and may be passed for a snapshot volume. For any volume, a volume size less than 1 will be interpreted as a request not to create the volume.
required: false
user_data:
description:
- opaque blob of data which is made available to the ec2 instance. Mutually exclusive with I(user_data_path).
required: false
user_data_path:
description:
- Path to the file that contains userdata for the ec2 instances. Mutually exclusive with I(user_data).
required: false
version_added: "2.3"
kernel_id:
description:
- Kernel id for the EC2 instance
required: false
spot_price:
description:
- The spot price you are bidding. Only applies for an autoscaling group with spot instances.
required: false
instance_monitoring:
description:
- whether instances in group are launched with detailed monitoring.
default: false
assign_public_ip:
description:
- Used for Auto Scaling groups that launch instances into an Amazon Virtual Private Cloud. Specifies whether to assign a public IP address to each instance launched in a Amazon VPC.
required: false
version_added: "1.8"
ramdisk_id:
description:
- A RAM disk id for the instances.
required: false
version_added: "1.8"
instance_profile_name:
description:
- The name or the Amazon Resource Name (ARN) of the instance profile associated with the IAM role for the instances.
required: false
version_added: "1.8"
ebs_optimized:
description:
- Specifies whether the instance is optimized for EBS I/O (true) or not (false).
required: false
default: false
version_added: "1.8"
classic_link_vpc_id:
description:
- Id of ClassicLink enabled VPC
required: false
version_added: "2.0"
classic_link_vpc_security_groups:
description:
- A list of security group id's with which to associate the ClassicLink VPC instances.
required: false
version_added: "2.0"
extends_documentation_fragment:
- aws
- ec2
requirements:
- "boto >= 2.39.0"
"""
EXAMPLES = '''
- ec2_lc:
name: special
image_id: ami-XXX
key_name: default
security_groups: ['group', 'group2' ]
instance_type: t1.micro
volumes:
- device_name: /dev/sda1
volume_size: 100
device_type: io1
iops: 3000
delete_on_termination: true
- device_name: /dev/sdb
ephemeral: ephemeral0
'''
import traceback
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
try:
from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping
import boto.ec2.autoscale
from boto.ec2.autoscale import LaunchConfiguration
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def create_block_device(module, volume):
# Not aware of a way to determine this programatically
# http://aws.amazon.com/about-aws/whats-new/2013/10/09/ebs-provisioned-iops-maximum-iops-gb-ratio-increased-to-30-1/
MAX_IOPS_TO_SIZE_RATIO = 30
if 'snapshot' not in volume and 'ephemeral' not in volume:
if 'volume_size' not in volume:
module.fail_json(msg='Size must be specified when creating a new volume or modifying the root volume')
if 'snapshot' in volume:
if 'device_type' in volume and volume.get('device_type') == 'io1' and 'iops' not in volume:
module.fail_json(msg='io1 volumes must have an iops value set')
if 'ephemeral' in volume:
if 'snapshot' in volume:
module.fail_json(msg='Cannot set both ephemeral and snapshot')
return BlockDeviceType(snapshot_id=volume.get('snapshot'),
ephemeral_name=volume.get('ephemeral'),
size=volume.get('volume_size'),
volume_type=volume.get('device_type'),
delete_on_termination=volume.get('delete_on_termination', False),
iops=volume.get('iops'))
def create_launch_config(connection, module):
name = module.params.get('name')
image_id = module.params.get('image_id')
key_name = module.params.get('key_name')
security_groups = module.params['security_groups']
user_data = module.params.get('user_data')
user_data_path = module.params.get('user_data_path')
volumes = module.params['volumes']
instance_type = module.params.get('instance_type')
spot_price = module.params.get('spot_price')
instance_monitoring = module.params.get('instance_monitoring')
assign_public_ip = module.params.get('assign_public_ip')
kernel_id = module.params.get('kernel_id')
ramdisk_id = module.params.get('ramdisk_id')
instance_profile_name = module.params.get('instance_profile_name')
ebs_optimized = module.params.get('ebs_optimized')
classic_link_vpc_id = module.params.get('classic_link_vpc_id')
classic_link_vpc_security_groups = module.params.get('classic_link_vpc_security_groups')
bdm = BlockDeviceMapping()
if user_data_path:
try:
with open(user_data_path, 'r') as user_data_file:
user_data = user_data_file.read()
except IOError as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
if volumes:
for volume in volumes:
if 'device_name' not in volume:
module.fail_json(msg='Device name must be set for volume')
# Minimum volume size is 1GB. We'll use volume size explicitly set to 0
# to be a signal not to create this volume
if 'volume_size' not in volume or int(volume['volume_size']) > 0:
bdm[volume['device_name']] = create_block_device(module, volume)
lc = LaunchConfiguration(
name=name,
image_id=image_id,
key_name=key_name,
security_groups=security_groups,
user_data=user_data,
block_device_mappings=[bdm],
instance_type=instance_type,
kernel_id=kernel_id,
spot_price=spot_price,
instance_monitoring=instance_monitoring,
associate_public_ip_address=assign_public_ip,
ramdisk_id=ramdisk_id,
instance_profile_name=instance_profile_name,
ebs_optimized=ebs_optimized,
classic_link_vpc_security_groups=classic_link_vpc_security_groups,
classic_link_vpc_id=classic_link_vpc_id,
)
launch_configs = connection.get_all_launch_configurations(names=[name])
changed = False
if not launch_configs:
try:
connection.create_launch_configuration(lc)
launch_configs = connection.get_all_launch_configurations(names=[name])
changed = True
except BotoServerError as e:
module.fail_json(msg=str(e))
result = dict(
((a[0], a[1]) for a in vars(launch_configs[0]).items()
if a[0] not in ('connection', 'created_time', 'instance_monitoring', 'block_device_mappings'))
)
result['created_time'] = str(launch_configs[0].created_time)
# Looking at boto's launchconfig.py, it looks like this could be a boolean
# value or an object with an enabled attribute. The enabled attribute
# could be a boolean or a string representation of a boolean. Since
# I can't test all permutations myself to see if my reading of the code is
# correct, have to code this *very* defensively
if launch_configs[0].instance_monitoring is True:
result['instance_monitoring'] = True
else:
try:
result['instance_monitoring'] = module.boolean(launch_configs[0].instance_monitoring.enabled)
except AttributeError:
result['instance_monitoring'] = False
if launch_configs[0].block_device_mappings is not None:
result['block_device_mappings'] = []
for bdm in launch_configs[0].block_device_mappings:
result['block_device_mappings'].append(dict(device_name=bdm.device_name, virtual_name=bdm.virtual_name))
if bdm.ebs is not None:
result['block_device_mappings'][-1]['ebs'] = dict(snapshot_id=bdm.ebs.snapshot_id, volume_size=bdm.ebs.volume_size)
if user_data_path:
result['user_data'] = "hidden" # Otherwise, we dump binary to the user's terminal
module.exit_json(changed=changed, name=result['name'], created_time=result['created_time'],
image_id=result['image_id'], arn=result['launch_configuration_arn'],
security_groups=result['security_groups'],
instance_type=result['instance_type'],
result=result)
def delete_launch_config(connection, module):
name = module.params.get('name')
launch_configs = connection.get_all_launch_configurations(names=[name])
if launch_configs:
launch_configs[0].delete()
module.exit_json(changed=True)
else:
module.exit_json(changed=False)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(required=True, type='str'),
image_id=dict(type='str'),
key_name=dict(type='str'),
security_groups=dict(type='list'),
user_data=dict(type='str'),
user_data_path=dict(type='path'),
kernel_id=dict(type='str'),
volumes=dict(type='list'),
instance_type=dict(type='str'),
state=dict(default='present', choices=['present', 'absent']),
spot_price=dict(type='float'),
ramdisk_id=dict(type='str'),
instance_profile_name=dict(type='str'),
ebs_optimized=dict(default=False, type='bool'),
associate_public_ip_address=dict(type='bool'),
instance_monitoring=dict(default=False, type='bool'),
assign_public_ip=dict(type='bool'),
classic_link_vpc_security_groups=dict(type='list'),
classic_link_vpc_id=dict(type='str')
)
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive = [['user_data', 'user_data_path']]
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
try:
connection = connect_to_aws(boto.ec2.autoscale, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
state = module.params.get('state')
if state == 'present':
create_launch_config(connection, module)
elif state == 'absent':
delete_launch_config(connection, module)
if __name__ == '__main__':
main()
|
fredRos/pypmc
|
refs/heads/master
|
doc/plot_directive.py
|
2
|
"""
A directive for including a matplotlib plot in a Sphinx document.
By default, in HTML output, `plot` will include a .png file with a
link to a high-res .png and .pdf. In LaTeX output, it will include a
.pdf.
The source code for the plot may be included in one of three ways:
1. **A path to a source file** as the argument to the directive::
.. plot:: path/to/plot.py
When a path to a source file is given, the content of the
directive may optionally contain a caption for the plot::
.. plot:: path/to/plot.py
This is the caption for the plot
Additionally, one my specify the name of a function to call (with
no arguments) immediately after importing the module::
.. plot:: path/to/plot.py plot_function1
2. Included as **inline content** to the directive::
.. plot::
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img = mpimg.imread('_static/stinkbug.png')
imgplot = plt.imshow(img)
3. Using **doctest** syntax::
.. plot::
A plotting example:
>>> import matplotlib.pyplot as plt
>>> plt.plot([1,2,3], [4,5,6])
Options
-------
The ``plot`` directive supports the following options:
format : {'python', 'doctest'}
Specify the format of the input
include-source : bool
Whether to display the source code. The default can be changed
using the `plot_include_source` variable in conf.py
encoding : str
If this source file is in a non-UTF8 or non-ASCII encoding,
the encoding must be specified using the `:encoding:` option.
The encoding will not be inferred using the ``-*- coding -*-``
metacomment.
context : bool or str
If provided, the code will be run in the context of all
previous plot directives for which the `:context:` option was
specified. This only applies to inline code plot directives,
not those run from files. If the ``:context: reset`` is specified,
the context is reset for this and future plots.
nofigs : bool
If specified, the code block will be run, but no figures will
be inserted. This is usually useful with the ``:context:``
option.
Additionally, this directive supports all of the options of the
`image` directive, except for `target` (since plot will add its own
target). These include `alt`, `height`, `width`, `scale`, `align` and
`class`.
Configuration options
---------------------
The plot directive has the following configuration options:
plot_include_source
Default value for the include-source option
plot_html_show_source_link
Whether to show a link to the source in HTML.
plot_pre_code
Code that should be executed before each plot.
plot_basedir
Base directory, to which ``plot::`` file names are relative
to. (If None or empty, file names are relative to the
directory where the file containing the directive is.)
plot_formats
File formats to generate. List of tuples or strings::
[(suffix, dpi), suffix, ...]
that determine the file format and the DPI. For entries whose
DPI was omitted, sensible defaults are chosen.
plot_html_show_formats
Whether to show links to the files in HTML.
plot_rcparams
A dictionary containing any non-standard rcParams that should
be applied before each plot.
plot_apply_rcparams
By default, rcParams are applied when `context` option is not used in
a plot directive. This configuration option overrides this behavior
and applies rcParams before each plot.
plot_working_directory
By default, the working directory will be changed to the directory of
the example, so the code can get at its data files, if any. Also its
path will be added to `sys.path` so it can import any helper modules
sitting beside it. This configuration option can be used to specify
a central directory (also added to `sys.path`) where data files and
helper modules for all code are located.
plot_template
Provide a customized template for preparing restructured text.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange
import sys, os, shutil, io, re, textwrap
from os.path import relpath
import traceback
if not six.PY3:
import cStringIO
from docutils.parsers.rst import directives
from docutils.parsers.rst.directives.images import Image
align = Image.align
import sphinx
sphinx_version = sphinx.__version__.split(".")
# The split is necessary for sphinx beta versions where the string is
# '6b1'
sphinx_version = tuple([int(re.split('[^0-9]', x)[0])
for x in sphinx_version[:2]])
try:
# Sphinx depends on either Jinja or Jinja2
import jinja2
def format_template(template, **kw):
return jinja2.Template(template).render(**kw)
except ImportError:
import jinja
def format_template(template, **kw):
return jinja.from_string(template, **kw)
import matplotlib
import matplotlib.cbook as cbook
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib import _pylab_helpers
__version__ = 2
#------------------------------------------------------------------------------
# Registration hook
#------------------------------------------------------------------------------
def plot_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
return run(arguments, content, options, state_machine, state, lineno)
plot_directive.__doc__ = __doc__
def _option_boolean(arg):
if not arg or not arg.strip():
# no argument given, assume used as a flag
return True
elif arg.strip().lower() in ('no', '0', 'false'):
return False
elif arg.strip().lower() in ('yes', '1', 'true'):
return True
else:
raise ValueError('"%s" unknown boolean' % arg)
def _option_context(arg):
if arg in [None, 'reset']:
return arg
else:
raise ValueError("argument should be None or 'reset'")
return directives.choice(arg, ('None', 'reset'))
def _option_format(arg):
return directives.choice(arg, ('python', 'doctest'))
def _option_align(arg):
return directives.choice(arg, ("top", "middle", "bottom", "left", "center",
"right"))
def mark_plot_labels(app, document):
"""
To make plots referenceable, we need to move the reference from
the "htmlonly" (or "latexonly") node to the actual figure node
itself.
"""
for name, explicit in six.iteritems(document.nametypes):
if not explicit:
continue
labelid = document.nameids[name]
if labelid is None:
continue
node = document.ids[labelid]
if node.tagname in ('html_only', 'latex_only'):
for n in node:
if n.tagname == 'figure':
sectname = name
for c in n:
if c.tagname == 'caption':
sectname = c.astext()
break
node['ids'].remove(labelid)
node['names'].remove(name)
n['ids'].append(labelid)
n['names'].append(name)
document.settings.env.labels[name] = \
document.settings.env.docname, labelid, sectname
break
def setup(app):
setup.app = app
setup.config = app.config
setup.confdir = app.confdir
options = {'alt': directives.unchanged,
'height': directives.length_or_unitless,
'width': directives.length_or_percentage_or_unitless,
'scale': directives.nonnegative_int,
'align': _option_align,
'class': directives.class_option,
'include-source': _option_boolean,
'format': _option_format,
'context': _option_context,
'nofigs': directives.flag,
'encoding': directives.encoding
}
app.add_directive('plot', plot_directive, True, (0, 2, False), **options)
app.add_config_value('plot_pre_code', None, True)
app.add_config_value('plot_include_source', False, True)
app.add_config_value('plot_html_show_source_link', True, True)
app.add_config_value('plot_formats', ['png', 'hires.png', 'pdf'], True)
app.add_config_value('plot_basedir', None, True)
app.add_config_value('plot_html_show_formats', True, True)
app.add_config_value('plot_rcparams', {}, True)
app.add_config_value('plot_apply_rcparams', False, True)
app.add_config_value('plot_working_directory', None, True)
app.add_config_value('plot_template', None, True)
app.connect(str('doctree-read'), mark_plot_labels)
#------------------------------------------------------------------------------
# Doctest handling
#------------------------------------------------------------------------------
def contains_doctest(text):
try:
# check if it's valid Python as-is
compile(text, '<string>', 'exec')
return False
except SyntaxError:
pass
r = re.compile(r'^\s*>>>', re.M)
m = r.search(text)
return bool(m)
def unescape_doctest(text):
"""
Extract code from a piece of text, which contains either Python code
or doctests.
"""
if not contains_doctest(text):
return text
code = ""
for line in text.split("\n"):
m = re.match(r'^\s*(>>>|\.\.\.) (.*)$', line)
if m:
code += m.group(2) + "\n"
elif line.strip():
code += "# " + line.strip() + "\n"
else:
code += "\n"
return code
def split_code_at_show(text):
"""
Split code at plt.show()
"""
parts = []
is_doctest = contains_doctest(text)
part = []
for line in text.split("\n"):
if (not is_doctest and line.strip() == 'plt.show()') or \
(is_doctest and line.strip() == '>>> plt.show()'):
part.append(line)
parts.append("\n".join(part))
part = []
else:
part.append(line)
if "\n".join(part).strip():
parts.append("\n".join(part))
return parts
def remove_coding(text):
"""
Remove the coding comment, which six.exec_ doesn't like.
"""
return re.sub(
"^#\s*-\*-\s*coding:\s*.*-\*-$", "", text, flags=re.MULTILINE)
#------------------------------------------------------------------------------
# Template
#------------------------------------------------------------------------------
TEMPLATE = """
{{ source_code }}
{{ only_html }}
{% if source_link or (html_show_formats and not multi_image) %}
(
{%- if source_link -%}
`Source code <{{ source_link }}>`__
{%- endif -%}
{%- if html_show_formats and not multi_image -%}
{%- for img in images -%}
{%- for fmt in img.formats -%}
{%- if source_link or not loop.first -%}, {% endif -%}
`{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__
{%- endfor -%}
{%- endfor -%}
{%- endif -%}
)
{% endif %}
{% for img in images %}
.. figure:: {{ build_dir }}/{{ img.basename }}.png
{% for option in options -%}
{{ option }}
{% endfor %}
{% if html_show_formats and multi_image -%}
(
{%- for fmt in img.formats -%}
{%- if not loop.first -%}, {% endif -%}
`{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__
{%- endfor -%}
)
{%- endif -%}
{{ caption }}
{% endfor %}
{{ only_latex }}
{% for img in images %}
.. image:: {{ build_dir }}/{{ img.basename }}.pdf
{% endfor %}
{{ only_texinfo }}
{% for img in images %}
.. image:: {{ build_dir }}/{{ img.basename }}.png
{% for option in options -%}
{{ option }}
{% endfor %}
{% endfor %}
"""
exception_template = """
.. htmlonly::
[`source code <%(linkdir)s/%(basename)s.py>`__]
Exception occurred rendering plot.
"""
# the context of the plot for all directives specified with the
# :context: option
plot_context = dict()
class ImageFile(object):
def __init__(self, basename, dirname):
self.basename = basename
self.dirname = dirname
self.formats = []
def filename(self, format):
return os.path.join(self.dirname, "%s.%s" % (self.basename, format))
def filenames(self):
return [self.filename(fmt) for fmt in self.formats]
def out_of_date(original, derived):
"""
Returns True if derivative is out-of-date wrt original,
both of which are full file paths.
"""
return (not os.path.exists(derived) or
(os.path.exists(original) and
os.stat(derived).st_mtime < os.stat(original).st_mtime))
class PlotError(RuntimeError):
pass
def run_code(code, code_path, ns=None, function_name=None):
"""
Import a Python module from a path, and run the function given by
name, if function_name is not None.
"""
# Change the working directory to the directory of the example, so
# it can get at its data files, if any. Add its path to sys.path
# so it can import any helper modules sitting beside it.
pwd = os.getcwd()
old_sys_path = list(sys.path)
if setup.config.plot_working_directory is not None:
try:
os.chdir(setup.config.plot_working_directory)
except OSError as err:
raise OSError(str(err) + '\n`plot_working_directory` option in'
'Sphinx configuration file must be a valid '
'directory path')
except TypeError as err:
raise TypeError(str(err) + '\n`plot_working_directory` option in '
'Sphinx configuration file must be a string or '
'None')
sys.path.insert(0, setup.config.plot_working_directory)
elif code_path is not None:
dirname = os.path.abspath(os.path.dirname(code_path))
os.chdir(dirname)
sys.path.insert(0, dirname)
# Reset sys.argv
old_sys_argv = sys.argv
sys.argv = [code_path]
# Redirect stdout
stdout = sys.stdout
if six.PY3:
sys.stdout = io.StringIO()
else:
sys.stdout = cStringIO.StringIO()
# Assign a do-nothing print function to the namespace. There
# doesn't seem to be any other way to provide a way to (not) print
# that works correctly across Python 2 and 3.
def _dummy_print(*arg, **kwarg):
pass
try:
try:
code = unescape_doctest(code)
if ns is None:
ns = {}
if not ns:
if setup.config.plot_pre_code is None:
six.exec_(six.text_type("import numpy as np\n" +
"from matplotlib import pyplot as plt\n"), ns)
else:
six.exec_(six.text_type(setup.config.plot_pre_code), ns)
ns['print'] = _dummy_print
if "__main__" in code:
six.exec_("__name__ = '__main__'", ns)
code = remove_coding(code)
six.exec_(code, ns)
if function_name is not None:
six.exec_(function_name + "()", ns)
except (Exception, SystemExit) as err:
raise PlotError(traceback.format_exc())
finally:
os.chdir(pwd)
sys.argv = old_sys_argv
sys.path[:] = old_sys_path
sys.stdout = stdout
return ns
def clear_state(plot_rcparams, close=True):
if close:
plt.close('all')
matplotlib.rc_file_defaults()
matplotlib.rcParams.update(plot_rcparams)
def render_figures(code, code_path, output_dir, output_base, context,
function_name, config, context_reset=False):
"""
Run a pyplot script and save the low and high res PNGs and a PDF
in *output_dir*.
Save the images under *output_dir* with file names derived from
*output_base*
"""
# -- Parse format list
default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 200}
formats = []
plot_formats = config.plot_formats
if isinstance(plot_formats, six.string_types):
plot_formats = eval(plot_formats)
for fmt in plot_formats:
if isinstance(fmt, six.string_types):
formats.append((fmt, default_dpi.get(fmt, 80)))
elif type(fmt) in (tuple, list) and len(fmt)==2:
formats.append((str(fmt[0]), int(fmt[1])))
else:
raise PlotError('invalid image format "%r" in plot_formats' % fmt)
# -- Try to determine if all images already exist
code_pieces = split_code_at_show(code)
# Look for single-figure output files first
all_exists = True
img = ImageFile(output_base, output_dir)
for format, dpi in formats:
if out_of_date(code_path, img.filename(format)):
all_exists = False
break
img.formats.append(format)
if all_exists:
return [(code, [img])]
# Then look for multi-figure output files
results = []
all_exists = True
for i, code_piece in enumerate(code_pieces):
images = []
for j in xrange(1000):
if len(code_pieces) > 1:
img = ImageFile('%s_%02d_%02d' % (output_base, i, j), output_dir)
else:
img = ImageFile('%s_%02d' % (output_base, j), output_dir)
for format, dpi in formats:
if out_of_date(code_path, img.filename(format)):
all_exists = False
break
img.formats.append(format)
# assume that if we have one, we have them all
if not all_exists:
all_exists = (j > 0)
break
images.append(img)
if not all_exists:
break
results.append((code_piece, images))
if all_exists:
return results
# We didn't find the files, so build them
results = []
if context:
ns = plot_context
else:
ns = {}
if context_reset:
clear_state(config.plot_rcparams)
for i, code_piece in enumerate(code_pieces):
if not context or config.plot_apply_rcparams:
clear_state(config.plot_rcparams, close=not context)
run_code(code_piece, code_path, ns, function_name)
images = []
fig_managers = _pylab_helpers.Gcf.get_all_fig_managers()
for j, figman in enumerate(fig_managers):
if len(fig_managers) == 1 and len(code_pieces) == 1:
img = ImageFile(output_base, output_dir)
elif len(code_pieces) == 1:
img = ImageFile("%s_%02d" % (output_base, j), output_dir)
else:
img = ImageFile("%s_%02d_%02d" % (output_base, i, j),
output_dir)
images.append(img)
for format, dpi in formats:
try:
figman.canvas.figure.savefig(img.filename(format), dpi=dpi)
except Exception as err:
raise PlotError(traceback.format_exc())
img.formats.append(format)
results.append((code_piece, images))
if not context or config.plot_apply_rcparams:
clear_state(config.plot_rcparams, close=not context)
return results
def run(arguments, content, options, state_machine, state, lineno):
# The user may provide a filename *or* Python code content, but not both
if arguments and content:
raise RuntimeError("plot:: directive can't have both args and content")
document = state_machine.document
config = document.settings.env.config
nofigs = 'nofigs' in options
options.setdefault('include-source', config.plot_include_source)
context = 'context' in options
context_reset = True if (context and options['context'] == 'reset') else False
rst_file = document.attributes['source']
rst_dir = os.path.dirname(rst_file)
if len(arguments):
if not config.plot_basedir:
source_file_name = os.path.join(setup.app.builder.srcdir,
directives.uri(arguments[0]))
else:
source_file_name = os.path.join(setup.confdir, config.plot_basedir,
directives.uri(arguments[0]))
# If there is content, it will be passed as a caption.
caption = '\n'.join(content)
# If the optional function name is provided, use it
if len(arguments) == 2:
function_name = arguments[1]
else:
function_name = None
with io.open(source_file_name, 'r', encoding='utf-8') as fd:
code = fd.read()
output_base = os.path.basename(source_file_name)
else:
source_file_name = rst_file
code = textwrap.dedent("\n".join(map(str, content)))
counter = document.attributes.get('_plot_counter', 0) + 1
document.attributes['_plot_counter'] = counter
base, ext = os.path.splitext(os.path.basename(source_file_name))
output_base = '%s-%d.py' % (base, counter)
function_name = None
caption = ''
base, source_ext = os.path.splitext(output_base)
if source_ext in ('.py', '.rst', '.txt'):
output_base = base
else:
source_ext = ''
# ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
output_base = output_base.replace('.', '-')
# is it in doctest format?
is_doctest = contains_doctest(code)
if 'format' in options:
if options['format'] == 'python':
is_doctest = False
else:
is_doctest = True
# determine output directory name fragment
source_rel_name = relpath(source_file_name, setup.confdir)
source_rel_dir = os.path.dirname(source_rel_name)
while source_rel_dir.startswith(os.path.sep):
source_rel_dir = source_rel_dir[1:]
# build_dir: where to place output files (temporarily)
build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
'plot_directive',
source_rel_dir)
# get rid of .. in paths, also changes pathsep
# see note in Python docs for warning about symbolic links on Windows.
# need to compare source and dest paths at end
build_dir = os.path.normpath(build_dir)
if not os.path.exists(build_dir):
os.makedirs(build_dir)
# output_dir: final location in the builder's directory
dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir,
source_rel_dir))
if not os.path.exists(dest_dir):
os.makedirs(dest_dir) # no problem here for me, but just use built-ins
# how to link to files from the RST file
dest_dir_link = os.path.join(relpath(setup.confdir, rst_dir),
source_rel_dir).replace(os.path.sep, '/')
build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
source_link = dest_dir_link + '/' + output_base + source_ext
# make figures
try:
results = render_figures(code, source_file_name, build_dir, output_base,
context, function_name, config,
context_reset=context_reset)
errors = []
except PlotError as err:
reporter = state.memo.reporter
sm = reporter.system_message(
2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base,
source_file_name, err),
line=lineno)
results = [(code, [])]
errors = [sm]
# Properly indent the caption
caption = '\n'.join(' ' + line.strip()
for line in caption.split('\n'))
# generate output restructuredtext
total_lines = []
for j, (code_piece, images) in enumerate(results):
if options['include-source']:
if is_doctest:
lines = ['']
lines += [row.rstrip() for row in code_piece.split('\n')]
else:
lines = ['.. code-block:: python', '']
lines += [' %s' % row.rstrip()
for row in code_piece.split('\n')]
source_code = "\n".join(lines)
else:
source_code = ""
if nofigs:
images = []
opts = [':%s: %s' % (key, val) for key, val in six.iteritems(options)
if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]
only_html = ".. only:: html"
only_latex = ".. only:: latex"
only_texinfo = ".. only:: texinfo"
# Not-None src_link signals the need for a source link in the generated
# html
if j == 0 and config.plot_html_show_source_link:
src_link = source_link
else:
src_link = None
result = format_template(
config.plot_template or TEMPLATE,
dest_dir=dest_dir_link,
build_dir=build_dir_link,
source_link=src_link,
multi_image=len(images) > 1,
only_html=only_html,
only_latex=only_latex,
only_texinfo=only_texinfo,
options=opts,
images=images,
source_code=source_code,
html_show_formats=config.plot_html_show_formats and not nofigs,
caption=caption)
total_lines.extend(result.split("\n"))
total_lines.extend("\n")
if total_lines:
state_machine.insert_input(total_lines, source=source_file_name)
# copy image files to builder's output directory, if necessary
if not os.path.exists(dest_dir):
cbook.mkdirs(dest_dir)
for code_piece, images in results:
for img in images:
for fn in img.filenames():
destimg = os.path.join(dest_dir, os.path.basename(fn))
if fn != destimg:
shutil.copyfile(fn, destimg)
# copy script (if necessary)
target_name = os.path.join(dest_dir, output_base + source_ext)
with io.open(target_name, 'w', encoding="utf-8") as f:
if source_file_name == rst_file:
code_escaped = unescape_doctest(code)
else:
code_escaped = code
f.write(code_escaped)
return errors
|
ryfeus/lambda-packs
|
refs/heads/master
|
Pdf_docx_pptx_xlsx_epub_png/source/pip/utils/__init__.py
|
61
|
from __future__ import absolute_import
import contextlib
import locale
import logging
import re
import os
import posixpath
import shutil
import stat
import subprocess
import sys
import tarfile
import zipfile
from pip.exceptions import InstallationError, BadCommand
from pip.compat import console_to_str, stdlib_pkgs
from pip.locations import (
site_packages, user_site, running_under_virtualenv, virtualenv_no_global,
write_delete_marker_file,
)
from pip._vendor import pkg_resources, six
from pip._vendor.six.moves import input
from pip._vendor.six.moves import cStringIO
from pip._vendor.six import PY2
from pip._vendor.retrying import retry
if PY2:
from io import BytesIO as StringIO
else:
from io import StringIO
__all__ = ['rmtree', 'display_path', 'backup_dir',
'find_command', 'ask', 'Inf',
'normalize_name', 'splitext',
'format_size', 'is_installable_dir',
'is_svn_page', 'file_contents',
'split_leading_dir', 'has_leading_dir',
'make_path_relative', 'normalize_path',
'renames', 'get_terminal_size', 'get_prog',
'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
'captured_stdout', 'remove_tracebacks']
logger = logging.getLogger(__name__)
def get_prog():
try:
if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'):
return "%s -m pip" % sys.executable
except (AttributeError, TypeError, IndexError):
pass
return 'pip'
# Retry every half second for up to 3 seconds
@retry(stop_max_delay=3000, wait_fixed=500)
def rmtree(dir, ignore_errors=False):
shutil.rmtree(dir, ignore_errors=ignore_errors,
onerror=rmtree_errorhandler)
def rmtree_errorhandler(func, path, exc_info):
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
remove them, an exception is thrown. We catch that here, remove the
read-only attribute, and hopefully continue without problems."""
# if file type currently read only
if os.stat(path).st_mode & stat.S_IREAD:
# convert to read/write
os.chmod(path, stat.S_IWRITE)
# use the original function to repeat the operation
func(path)
return
else:
raise
def display_path(path):
"""Gives the display value for a given path, making it relative to cwd
if possible."""
path = os.path.normcase(os.path.abspath(path))
if sys.version_info[0] == 2:
path = path.decode(sys.getfilesystemencoding(), 'replace')
path = path.encode(sys.getdefaultencoding(), 'replace')
if path.startswith(os.getcwd() + os.path.sep):
path = '.' + path[len(os.getcwd()):]
return path
def backup_dir(dir, ext='.bak'):
"""Figure out the name of a directory to back up the given dir to
(adding .bak, .bak2, etc)"""
n = 1
extension = ext
while os.path.exists(dir + extension):
n += 1
extension = ext + str(n)
return dir + extension
def find_command(cmd, paths=None, pathext=None):
"""Searches the PATH for the given command and returns its path"""
if paths is None:
paths = os.environ.get('PATH', '').split(os.pathsep)
if isinstance(paths, six.string_types):
paths = [paths]
# check if there are funny path extensions for executables, e.g. Windows
if pathext is None:
pathext = get_pathext()
pathext = [ext for ext in pathext.lower().split(os.pathsep) if len(ext)]
# don't use extensions if the command ends with one of them
if os.path.splitext(cmd)[1].lower() in pathext:
pathext = ['']
# check if we find the command on PATH
for path in paths:
# try without extension first
cmd_path = os.path.join(path, cmd)
for ext in pathext:
# then including the extension
cmd_path_ext = cmd_path + ext
if os.path.isfile(cmd_path_ext):
return cmd_path_ext
if os.path.isfile(cmd_path):
return cmd_path
raise BadCommand('Cannot find command %r' % cmd)
def get_pathext(default_pathext=None):
"""Returns the path extensions from environment or a default"""
if default_pathext is None:
default_pathext = os.pathsep.join(['.COM', '.EXE', '.BAT', '.CMD'])
pathext = os.environ.get('PATHEXT', default_pathext)
return pathext
def ask_path_exists(message, options):
for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
if action in options:
return action
return ask(message, options)
def ask(message, options):
"""Ask the message interactively, with the given possible responses"""
while 1:
if os.environ.get('PIP_NO_INPUT'):
raise Exception(
'No input was expected ($PIP_NO_INPUT set); question: %s' %
message
)
response = input(message)
response = response.strip().lower()
if response not in options:
print(
'Your response (%r) was not one of the expected responses: '
'%s' % (response, ', '.join(options))
)
else:
return response
class _Inf(object):
"""I am bigger than everything!"""
def __eq__(self, other):
if self is other:
return True
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
return False
def __le__(self, other):
return False
def __gt__(self, other):
return True
def __ge__(self, other):
return True
def __repr__(self):
return 'Inf'
Inf = _Inf() # this object is not currently used as a sortable in our code
del _Inf
_normalize_re = re.compile(r'[^a-z]', re.I)
def normalize_name(name):
return _normalize_re.sub('-', name.lower())
def format_size(bytes):
if bytes > 1000 * 1000:
return '%.1fMB' % (bytes / 1000.0 / 1000)
elif bytes > 10 * 1000:
return '%ikB' % (bytes / 1000)
elif bytes > 1000:
return '%.1fkB' % (bytes / 1000.0)
else:
return '%ibytes' % bytes
def is_installable_dir(path):
"""Return True if `path` is a directory containing a setup.py file."""
if not os.path.isdir(path):
return False
setup_py = os.path.join(path, 'setup.py')
if os.path.isfile(setup_py):
return True
return False
def is_svn_page(html):
"""
Returns true if the page appears to be the index page of an svn repository
"""
return (re.search(r'<title>[^<]*Revision \d+:', html)
and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
def file_contents(filename):
with open(filename, 'rb') as fp:
return fp.read().decode('utf-8')
def split_leading_dir(path):
path = str(path)
path = path.lstrip('/').lstrip('\\')
if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
or '\\' not in path):
return path.split('/', 1)
elif '\\' in path:
return path.split('\\', 1)
else:
return path, ''
def has_leading_dir(paths):
"""Returns true if all the paths have the same leading path name
(i.e., everything is in one subdirectory in an archive)"""
common_prefix = None
for path in paths:
prefix, rest = split_leading_dir(path)
if not prefix:
return False
elif common_prefix is None:
common_prefix = prefix
elif prefix != common_prefix:
return False
return True
def make_path_relative(path, rel_to):
"""
Make a filename relative, where the filename path, and it is
relative to rel_to
>>> make_path_relative('/usr/share/something/a-file.pth',
... '/usr/share/another-place/src/Directory')
'../../../something/a-file.pth'
>>> make_path_relative('/usr/share/something/a-file.pth',
... '/home/user/src/Directory')
'../../../usr/share/something/a-file.pth'
>>> make_path_relative('/usr/share/a-file.pth', '/usr/share/')
'a-file.pth'
"""
path_filename = os.path.basename(path)
path = os.path.dirname(path)
path = os.path.normpath(os.path.abspath(path))
rel_to = os.path.normpath(os.path.abspath(rel_to))
path_parts = path.strip(os.path.sep).split(os.path.sep)
rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
path_parts.pop(0)
rel_to_parts.pop(0)
full_parts = ['..'] * len(rel_to_parts) + path_parts + [path_filename]
if full_parts == ['']:
return '.' + os.path.sep
return os.path.sep.join(full_parts)
def normalize_path(path):
"""
Convert a path to its canonical, case-normalized, absolute version.
"""
return os.path.normcase(os.path.realpath(os.path.expanduser(path)))
def splitext(path):
"""Like os.path.splitext, but take off .tar too"""
base, ext = posixpath.splitext(path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def renames(old, new):
"""Like os.renames(), but handles renaming across devices."""
# Implementation borrowed from os.renames().
head, tail = os.path.split(new)
if head and tail and not os.path.exists(head):
os.makedirs(head)
shutil.move(old, new)
head, tail = os.path.split(old)
if head and tail:
try:
os.removedirs(head)
except OSError:
pass
def is_local(path):
"""
Return True if path is within sys.prefix, if we're running in a virtualenv.
If we're not in a virtualenv, all paths are considered "local."
"""
if not running_under_virtualenv():
return True
return normalize_path(path).startswith(normalize_path(sys.prefix))
def dist_is_local(dist):
"""
Return True if given Distribution object is installed locally
(i.e. within current virtualenv).
Always True if we're not in a virtualenv.
"""
return is_local(dist_location(dist))
def dist_in_usersite(dist):
"""
Return True if given Distribution is installed in user site.
"""
norm_path = normalize_path(dist_location(dist))
return norm_path.startswith(normalize_path(user_site))
def dist_in_site_packages(dist):
"""
Return True if given Distribution is installed in
distutils.sysconfig.get_python_lib().
"""
return normalize_path(
dist_location(dist)
).startswith(normalize_path(site_packages))
def dist_is_editable(dist):
"""Is distribution an editable install?"""
# TODO: factor out determining editableness out of FrozenRequirement
from pip import FrozenRequirement
req = FrozenRequirement.from_dist(dist, [])
return req.editable
def get_installed_distributions(local_only=True,
skip=stdlib_pkgs,
include_editables=True,
editables_only=False,
user_only=False):
"""
Return a list of installed Distribution objects.
If ``local_only`` is True (default), only return installations
local to the current virtualenv, if in a virtualenv.
``skip`` argument is an iterable of lower-case project names to
ignore; defaults to stdlib_pkgs
If ``editables`` is False, don't report editables.
If ``editables_only`` is True , only report editables.
If ``user_only`` is True , only report installations in the user
site directory.
"""
if local_only:
local_test = dist_is_local
else:
local_test = lambda d: True
if include_editables:
editable_test = lambda d: True
else:
editable_test = lambda d: not dist_is_editable(d)
if editables_only:
editables_only_test = lambda d: dist_is_editable(d)
else:
editables_only_test = lambda d: True
if user_only:
user_test = dist_in_usersite
else:
user_test = lambda d: True
return [d for d in pkg_resources.working_set
if local_test(d)
and d.key not in skip
and editable_test(d)
and editables_only_test(d)
and user_test(d)
]
def egg_link_path(dist):
"""
Return the path for the .egg-link file if it exists, otherwise, None.
There's 3 scenarios:
1) not in a virtualenv
try to find in site.USER_SITE, then site_packages
2) in a no-global virtualenv
try to find in site_packages
3) in a yes-global virtualenv
try to find in site_packages, then site.USER_SITE
(don't look in global location)
For #1 and #3, there could be odd cases, where there's an egg-link in 2
locations.
This method will just return the first one found.
"""
sites = []
if running_under_virtualenv():
if virtualenv_no_global():
sites.append(site_packages)
else:
sites.append(site_packages)
if user_site:
sites.append(user_site)
else:
if user_site:
sites.append(user_site)
sites.append(site_packages)
for site in sites:
egglink = os.path.join(site, dist.project_name) + '.egg-link'
if os.path.isfile(egglink):
return egglink
def dist_location(dist):
"""
Get the site-packages location of this distribution. Generally
this is dist.location, except in the case of develop-installed
packages, where dist.location is the source code location, and we
want to know where the egg-link file is.
"""
egg_link = egg_link_path(dist)
if egg_link:
return egg_link
return dist.location
def get_terminal_size():
"""Returns a tuple (x, y) representing the width(x) and the height(x)
in characters of the terminal window."""
def ioctl_GWINSZ(fd):
try:
import fcntl
import termios
import struct
cr = struct.unpack(
'hh',
fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')
)
except:
return None
if cr == (0, 0):
return None
return cr
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr:
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
return int(cr[1]), int(cr[0])
def current_umask():
"""Get the current umask which involves having to set it temporarily."""
mask = os.umask(0)
os.umask(mask)
return mask
def unzip_file(filename, location, flatten=True):
"""
Unzip the file (with path `filename`) to the destination `location`. All
files are written based on system defaults and umask (i.e. permissions are
not preserved), except that regular file members with any execute
permissions (user, group, or world) have "chmod +x" applied after being
written. Note that for windows, any execute changes using os.chmod are
no-ops per the python docs.
"""
if not os.path.exists(location):
os.makedirs(location)
zipfp = open(filename, 'rb')
try:
zip = zipfile.ZipFile(zipfp, allowZip64=True)
leading = has_leading_dir(zip.namelist()) and flatten
for info in zip.infolist():
name = info.filename
data = zip.read(name)
fn = name
if leading:
fn = split_leading_dir(name)[1]
fn = os.path.join(location, fn)
dir = os.path.dirname(fn)
if not os.path.exists(dir):
os.makedirs(dir)
if fn.endswith('/') or fn.endswith('\\'):
# A directory
if not os.path.exists(fn):
os.makedirs(fn)
else:
fp = open(fn, 'wb')
try:
fp.write(data)
finally:
fp.close()
mode = info.external_attr >> 16
# if mode and regular file and any execute permissions for
# user/group/world?
if mode and stat.S_ISREG(mode) and mode & 0o111:
# make dest file have execute for user/group/world
# (chmod +x) no-op on windows per python docs
os.chmod(fn, (0o777 - current_umask() | 0o111))
finally:
zipfp.close()
def untar_file(filename, location):
"""
Untar the file (with path `filename`) to the destination `location`.
All files are written based on system defaults and umask (i.e. permissions
are not preserved), except that regular file members with any execute
permissions (user, group, or world) have "chmod +x" applied after being
written. Note that for windows, any execute changes using os.chmod are
no-ops per the python docs.
"""
if not os.path.exists(location):
os.makedirs(location)
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
mode = 'r:gz'
elif (filename.lower().endswith('.bz2')
or filename.lower().endswith('.tbz')):
mode = 'r:bz2'
elif filename.lower().endswith('.tar'):
mode = 'r'
else:
logger.warning(
'Cannot determine compression type for file %s', filename,
)
mode = 'r:*'
tar = tarfile.open(filename, mode)
try:
# note: python<=2.5 doesn't seem to know about pax headers, filter them
leading = has_leading_dir([
member.name for member in tar.getmembers()
if member.name != 'pax_global_header'
])
for member in tar.getmembers():
fn = member.name
if fn == 'pax_global_header':
continue
if leading:
fn = split_leading_dir(fn)[1]
path = os.path.join(location, fn)
if member.isdir():
if not os.path.exists(path):
os.makedirs(path)
elif member.issym():
try:
tar._extract_member(member, path)
except Exception as exc:
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warning(
'In the tar file %s the member %s is invalid: %s',
filename, member.name, exc,
)
continue
else:
try:
fp = tar.extractfile(member)
except (KeyError, AttributeError) as exc:
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warning(
'In the tar file %s the member %s is invalid: %s',
filename, member.name, exc,
)
continue
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
destfp = open(path, 'wb')
try:
shutil.copyfileobj(fp, destfp)
finally:
destfp.close()
fp.close()
# member have any execute permissions for user/group/world?
if member.mode & 0o111:
# make dest file have execute for user/group/world
# no-op on windows per python docs
os.chmod(path, (0o777 - current_umask() | 0o111))
finally:
tar.close()
def unpack_file(filename, location, content_type, link):
filename = os.path.realpath(filename)
if (content_type == 'application/zip'
or filename.endswith('.zip')
or filename.endswith('.whl')
or zipfile.is_zipfile(filename)):
unzip_file(
filename,
location,
flatten=not filename.endswith('.whl')
)
elif (content_type == 'application/x-gzip'
or tarfile.is_tarfile(filename)
or splitext(filename)[1].lower() in (
'.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
untar_file(filename, location)
elif (content_type and content_type.startswith('text/html')
and is_svn_page(file_contents(filename))):
# We don't really care about this
from pip.vcs.subversion import Subversion
Subversion('svn+' + link.url).unpack(location)
else:
# FIXME: handle?
# FIXME: magic signatures?
logger.critical(
'Cannot unpack file %s (downloaded from %s, content-type: %s); '
'cannot detect archive format',
filename, location, content_type,
)
raise InstallationError(
'Cannot determine archive format of %s' % location
)
def remove_tracebacks(output):
pattern = (r'(?:\W+File "(?:.*)", line (?:.*)\W+(?:.*)\W+\^\W+)?'
r'Syntax(?:Error|Warning): (?:.*)')
output = re.sub(pattern, '', output)
if PY2:
return output
# compileall.compile_dir() prints different messages to stdout
# in Python 3
return re.sub(r"\*\*\* Error compiling (?:.*)", '', output)
def call_subprocess(cmd, show_stdout=True,
filter_stdout=None, cwd=None,
raise_on_returncode=True,
command_level=logging.DEBUG, command_desc=None,
extra_environ=None):
if command_desc is None:
cmd_parts = []
for part in cmd:
if ' ' in part or '\n' in part or '"' in part or "'" in part:
part = '"%s"' % part.replace('"', '\\"')
cmd_parts.append(part)
command_desc = ' '.join(cmd_parts)
if show_stdout:
stdout = None
else:
stdout = subprocess.PIPE
logger.log(command_level, "Running command %s", command_desc)
env = os.environ.copy()
if extra_environ:
env.update(extra_environ)
try:
proc = subprocess.Popen(
cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
cwd=cwd, env=env)
except Exception as exc:
logger.critical(
"Error %s while executing command %s", exc, command_desc,
)
raise
all_output = []
if stdout is not None:
stdout = remove_tracebacks(console_to_str(proc.stdout.read()))
stdout = cStringIO(stdout)
while 1:
line = stdout.readline()
if not line:
break
line = line.rstrip()
all_output.append(line + '\n')
if filter_stdout:
level = filter_stdout(line)
if isinstance(level, tuple):
level, line = level
logger.log(level, line)
# if not logger.stdout_level_matches(level) and False:
# # TODO(dstufft): Handle progress bar.
# logger.show_progress()
else:
logger.debug(line)
else:
returned_stdout, returned_stderr = proc.communicate()
all_output = [returned_stdout or '']
proc.wait()
if proc.returncode:
if raise_on_returncode:
if all_output:
logger.info(
'Complete output from command %s:', command_desc,
)
logger.info(
'\n'.join(all_output) +
'\n----------------------------------------'
)
raise InstallationError(
'Command "%s" failed with error code %s in %s'
% (command_desc, proc.returncode, cwd))
else:
logger.warning(
'Command "%s" had error code %s in %s',
command_desc, proc.returncode, cwd,
)
if stdout is not None:
return remove_tracebacks(''.join(all_output))
def read_text_file(filename):
"""Return the contents of *filename*.
Try to decode the file contents with utf-8, the preferred system encoding
(e.g., cp1252 on some Windows machines), and latin1, in that order.
Decoding a byte string with latin1 will never raise an error. In the worst
case, the returned string will contain some garbage characters.
"""
with open(filename, 'rb') as fp:
data = fp.read()
encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
for enc in encodings:
try:
data = data.decode(enc)
except UnicodeDecodeError:
continue
break
assert type(data) != bytes # Latin1 should have worked.
return data
def _make_build_dir(build_dir):
os.makedirs(build_dir)
write_delete_marker_file(build_dir)
class FakeFile(object):
"""Wrap a list of lines in an object with readline() to make
ConfigParser happy."""
def __init__(self, lines):
self._gen = (l for l in lines)
def readline(self):
try:
try:
return next(self._gen)
except NameError:
return self._gen.next()
except StopIteration:
return ''
def __iter__(self):
return self._gen
class StreamWrapper(StringIO):
@classmethod
def from_stream(cls, orig_stream):
cls.orig_stream = orig_stream
return cls()
# compileall.compile_dir() needs stdout.encoding to print to stdout
@property
def encoding(self):
return self.orig_stream.encoding
@contextlib.contextmanager
def captured_output(stream_name):
"""Return a context manager used by captured_stdout/stdin/stderr
that temporarily replaces the sys stream *stream_name* with a StringIO.
Taken from Lib/support/__init__.py in the CPython repo.
"""
orig_stdout = getattr(sys, stream_name)
setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
try:
yield getattr(sys, stream_name)
finally:
setattr(sys, stream_name, orig_stdout)
def captured_stdout():
"""Capture the output of sys.stdout:
with captured_stdout() as stdout:
print('hello')
self.assertEqual(stdout.getvalue(), 'hello\n')
Taken from Lib/support/__init__.py in the CPython repo.
"""
return captured_output('stdout')
class cached_property(object):
"""A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
"""
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None:
# We're being accessed from the class itself, not from an object
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
|
deepakantony/sms-tools
|
refs/heads/master
|
lectures/05-Sinusoidal-model/plots-code/spectral-peaks.py
|
22
|
import numpy as np
import matplotlib.pyplot as plt
from scipy.signal import hamming, triang, blackmanharris
from scipy.fftpack import fft, ifft
import math
import sys, os, functools, time
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../software/models/'))
import dftModel as DFT
import utilFunctions as UF
(fs, x) = UF.wavread('../../../sounds/oboe-A4.wav')
N = 512
M = 511
t = -60
w = np.hamming(M)
start = .8*fs
hN = N/2
hM = (M+1)/2
x1 = x[start:start+M]
mX, pX = DFT.dftAnal(x1, w, N)
ploc = UF.peakDetection(mX, t)
pmag = mX[ploc]
freqaxis = fs*np.arange(mX.size)/float(N)
plt.figure(1, figsize=(9.5, 5.5))
plt.subplot (2,1,1)
plt.plot(freqaxis, mX, 'r', lw=1.5)
plt.axis([300,2500,-70,max(mX)])
plt.plot(fs * ploc / N, pmag, marker='x', color='b', linestyle='', markeredgewidth=1.5)
plt.title('mX + spectral peaks (oboe-A4.wav)')
plt.subplot (2,1,2)
plt.plot(freqaxis,pX,'c', lw=1.5)
plt.axis([300,2500,6,14])
plt.plot(fs * ploc / N, pX[ploc], marker='x', color='b', linestyle='', markeredgewidth=1.5)
plt.title('pX + spectral peaks')
plt.tight_layout()
plt.savefig('spectral-peaks.png')
plt.show()
|
Soya93/Extract-Refactoring
|
refs/heads/master
|
python/testData/refactoring/move/moveNamespacePackage2/before/src/b.py
|
237
|
import nspkg.nssubpkg.a
print(nspkg.nssubpkg.a.VAR)
|
haitdai/qtbase5-inprocess-surface-compositor-remote-display
|
refs/heads/master
|
myqt/qtbase/src/3rdparty/protobuf/gtest/test/gtest_test_utils.py
|
1100
|
#!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test utilities for Google C++ Testing Framework."""
__author__ = '[email protected] (Zhanyong Wan)'
import atexit
import os
import shutil
import sys
import tempfile
import unittest
_test_module = unittest
# Suppresses the 'Import not at the top of the file' lint complaint.
# pylint: disable-msg=C6204
try:
import subprocess
_SUBPROCESS_MODULE_AVAILABLE = True
except:
import popen2
_SUBPROCESS_MODULE_AVAILABLE = False
# pylint: enable-msg=C6204
GTEST_OUTPUT_VAR_NAME = 'GTEST_OUTPUT'
IS_WINDOWS = os.name == 'nt'
IS_CYGWIN = os.name == 'posix' and 'CYGWIN' in os.uname()[0]
# The environment variable for specifying the path to the premature-exit file.
PREMATURE_EXIT_FILE_ENV_VAR = 'TEST_PREMATURE_EXIT_FILE'
environ = os.environ.copy()
def SetEnvVar(env_var, value):
"""Sets/unsets an environment variable to a given value."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
# Here we expose a class from a particular module, depending on the
# environment. The comment suppresses the 'Invalid variable name' lint
# complaint.
TestCase = _test_module.TestCase # pylint: disable-msg=C6409
# Initially maps a flag to its default value. After
# _ParseAndStripGTestFlags() is called, maps a flag to its actual value.
_flag_map = {'source_dir': os.path.dirname(sys.argv[0]),
'build_dir': os.path.dirname(sys.argv[0])}
_gtest_flags_are_parsed = False
def _ParseAndStripGTestFlags(argv):
"""Parses and strips Google Test flags from argv. This is idempotent."""
# Suppresses the lint complaint about a global variable since we need it
# here to maintain module-wide state.
global _gtest_flags_are_parsed # pylint: disable-msg=W0603
if _gtest_flags_are_parsed:
return
_gtest_flags_are_parsed = True
for flag in _flag_map:
# The environment variable overrides the default value.
if flag.upper() in os.environ:
_flag_map[flag] = os.environ[flag.upper()]
# The command line flag overrides the environment variable.
i = 1 # Skips the program name.
while i < len(argv):
prefix = '--' + flag + '='
if argv[i].startswith(prefix):
_flag_map[flag] = argv[i][len(prefix):]
del argv[i]
break
else:
# We don't increment i in case we just found a --gtest_* flag
# and removed it from argv.
i += 1
def GetFlag(flag):
"""Returns the value of the given flag."""
# In case GetFlag() is called before Main(), we always call
# _ParseAndStripGTestFlags() here to make sure the --gtest_* flags
# are parsed.
_ParseAndStripGTestFlags(sys.argv)
return _flag_map[flag]
def GetSourceDir():
"""Returns the absolute path of the directory where the .py files are."""
return os.path.abspath(GetFlag('source_dir'))
def GetBuildDir():
"""Returns the absolute path of the directory where the test binaries are."""
return os.path.abspath(GetFlag('build_dir'))
_temp_dir = None
def _RemoveTempDir():
if _temp_dir:
shutil.rmtree(_temp_dir, ignore_errors=True)
atexit.register(_RemoveTempDir)
def GetTempDir():
"""Returns a directory for temporary files."""
global _temp_dir
if not _temp_dir:
_temp_dir = tempfile.mkdtemp()
return _temp_dir
def GetTestExecutablePath(executable_name, build_dir=None):
"""Returns the absolute path of the test binary given its name.
The function will print a message and abort the program if the resulting file
doesn't exist.
Args:
executable_name: name of the test binary that the test script runs.
build_dir: directory where to look for executables, by default
the result of GetBuildDir().
Returns:
The absolute path of the test binary.
"""
path = os.path.abspath(os.path.join(build_dir or GetBuildDir(),
executable_name))
if (IS_WINDOWS or IS_CYGWIN) and not path.endswith('.exe'):
path += '.exe'
if not os.path.exists(path):
message = (
'Unable to find the test binary. Please make sure to provide path\n'
'to the binary via the --build_dir flag or the BUILD_DIR\n'
'environment variable.')
print >> sys.stderr, message
sys.exit(1)
return path
def GetExitStatus(exit_code):
"""Returns the argument to exit(), or -1 if exit() wasn't called.
Args:
exit_code: the result value of os.system(command).
"""
if os.name == 'nt':
# On Windows, os.WEXITSTATUS() doesn't work and os.system() returns
# the argument to exit() directly.
return exit_code
else:
# On Unix, os.WEXITSTATUS() must be used to extract the exit status
# from the result of os.system().
if os.WIFEXITED(exit_code):
return os.WEXITSTATUS(exit_code)
else:
return -1
class Subprocess:
def __init__(self, command, working_dir=None, capture_stderr=True, env=None):
"""Changes into a specified directory, if provided, and executes a command.
Restores the old directory afterwards.
Args:
command: The command to run, in the form of sys.argv.
working_dir: The directory to change into.
capture_stderr: Determines whether to capture stderr in the output member
or to discard it.
env: Dictionary with environment to pass to the subprocess.
Returns:
An object that represents outcome of the executed process. It has the
following attributes:
terminated_by_signal True iff the child process has been terminated
by a signal.
signal Sygnal that terminated the child process.
exited True iff the child process exited normally.
exit_code The code with which the child process exited.
output Child process's stdout and stderr output
combined in a string.
"""
# The subprocess module is the preferrable way of running programs
# since it is available and behaves consistently on all platforms,
# including Windows. But it is only available starting in python 2.4.
# In earlier python versions, we revert to the popen2 module, which is
# available in python 2.0 and later but doesn't provide required
# functionality (Popen4) under Windows. This allows us to support Mac
# OS X 10.4 Tiger, which has python 2.3 installed.
if _SUBPROCESS_MODULE_AVAILABLE:
if capture_stderr:
stderr = subprocess.STDOUT
else:
stderr = subprocess.PIPE
p = subprocess.Popen(command,
stdout=subprocess.PIPE, stderr=stderr,
cwd=working_dir, universal_newlines=True, env=env)
# communicate returns a tuple with the file obect for the child's
# output.
self.output = p.communicate()[0]
self._return_code = p.returncode
else:
old_dir = os.getcwd()
def _ReplaceEnvDict(dest, src):
# Changes made by os.environ.clear are not inheritable by child
# processes until Python 2.6. To produce inheritable changes we have
# to delete environment items with the del statement.
for key in dest.keys():
del dest[key]
dest.update(src)
# When 'env' is not None, backup the environment variables and replace
# them with the passed 'env'. When 'env' is None, we simply use the
# current 'os.environ' for compatibility with the subprocess.Popen
# semantics used above.
if env is not None:
old_environ = os.environ.copy()
_ReplaceEnvDict(os.environ, env)
try:
if working_dir is not None:
os.chdir(working_dir)
if capture_stderr:
p = popen2.Popen4(command)
else:
p = popen2.Popen3(command)
p.tochild.close()
self.output = p.fromchild.read()
ret_code = p.wait()
finally:
os.chdir(old_dir)
# Restore the old environment variables
# if they were replaced.
if env is not None:
_ReplaceEnvDict(os.environ, old_environ)
# Converts ret_code to match the semantics of
# subprocess.Popen.returncode.
if os.WIFSIGNALED(ret_code):
self._return_code = -os.WTERMSIG(ret_code)
else: # os.WIFEXITED(ret_code) should return True here.
self._return_code = os.WEXITSTATUS(ret_code)
if self._return_code < 0:
self.terminated_by_signal = True
self.exited = False
self.signal = -self._return_code
else:
self.terminated_by_signal = False
self.exited = True
self.exit_code = self._return_code
def Main():
"""Runs the unit test."""
# We must call _ParseAndStripGTestFlags() before calling
# unittest.main(). Otherwise the latter will be confused by the
# --gtest_* flags.
_ParseAndStripGTestFlags(sys.argv)
# The tested binaries should not be writing XML output files unless the
# script explicitly instructs them to.
# TODO([email protected]): Move this into Subprocess when we implement
# passing environment into it as a parameter.
if GTEST_OUTPUT_VAR_NAME in os.environ:
del os.environ[GTEST_OUTPUT_VAR_NAME]
_test_module.main()
|
Softmotions/edx-platform
|
refs/heads/master
|
lms/djangoapps/django_comment_client/base/urls.py
|
102
|
from django.conf.urls.defaults import url, patterns
urlpatterns = patterns(
'django_comment_client.base.views',
url(r'upload$', 'upload', name='upload'),
url(r'threads/(?P<thread_id>[\w\-]+)/update$', 'update_thread', name='update_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/reply$', 'create_comment', name='create_comment'),
url(r'threads/(?P<thread_id>[\w\-]+)/delete', 'delete_thread', name='delete_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/upvote$', 'vote_for_thread', {'value': 'up'}, name='upvote_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/downvote$', 'vote_for_thread', {'value': 'down'}, name='downvote_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/flagAbuse$', 'flag_abuse_for_thread', name='flag_abuse_for_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/unFlagAbuse$', 'un_flag_abuse_for_thread', name='un_flag_abuse_for_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/unvote$', 'undo_vote_for_thread', name='undo_vote_for_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/pin$', 'pin_thread', name='pin_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/unpin$', 'un_pin_thread', name='un_pin_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/follow$', 'follow_thread', name='follow_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/unfollow$', 'unfollow_thread', name='unfollow_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/close$', 'openclose_thread', name='openclose_thread'),
url(r'comments/(?P<comment_id>[\w\-]+)/update$', 'update_comment', name='update_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/endorse$', 'endorse_comment', name='endorse_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/reply$', 'create_sub_comment', name='create_sub_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/delete$', 'delete_comment', name='delete_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/upvote$', 'vote_for_comment', {'value': 'up'}, name='upvote_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/downvote$', 'vote_for_comment', {'value': 'down'}, name='downvote_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/unvote$', 'undo_vote_for_comment', name='undo_vote_for_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/flagAbuse$', 'flag_abuse_for_comment', name='flag_abuse_for_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/unFlagAbuse$', 'un_flag_abuse_for_comment', name='un_flag_abuse_for_comment'),
url(r'^(?P<commentable_id>[\w\-.]+)/threads/create$', 'create_thread', name='create_thread'),
url(r'^(?P<commentable_id>[\w\-.]+)/follow$', 'follow_commentable', name='follow_commentable'),
url(r'^(?P<commentable_id>[\w\-.]+)/unfollow$', 'unfollow_commentable', name='unfollow_commentable'),
url(r'users$', 'users', name='users'),
)
|
lsqtongxin/django
|
refs/heads/master
|
tests/admin_scripts/custom_templates/project_template/additional_dir/extra.py
|
701
|
# this file uses the {{ extra }} variable
|
zoincoin/zoin
|
refs/heads/master
|
contrib/testgen/gen_base58_test_vectors.py
|
9
|
#!/usr/bin/env python
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
gen_base58_test_vectors.py valid 50 > ../../src/test/data/base58_keys_valid.json
gen_base58_test_vectors.py invalid 50 > ../../src/test/data/base58_keys_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode, b58decode, b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
# key types
PUBKEY_ADDRESS = 50
SCRIPT_ADDRESS = 7
PUBKEY_ADDRESS_TEST = 65
SCRIPT_ADDRESS_TEST = 178
PRIVKEY = 178
PRIVKEY_TEST = 193
metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)),
((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)),
((PRIVKEY,), 32, (), (True, False, None, False)),
((PRIVKEY,), 32, (1,), (True, False, None, True)),
((PRIVKEY_TEST,), 32, (), (True, True, None, False)),
((PRIVKEY_TEST,), 32, (1,), (True, True, None, True))
]
def is_valid(v):
'''Check vector v for validity'''
result = b58decode_chk(v)
if result is None:
return False
valid = False
for template in templates:
prefix = str(bytearray(template[0]))
suffix = str(bytearray(template[2]))
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return False
def gen_valid_vectors():
'''Generate valid test vectors'''
while True:
for template in templates:
prefix = str(bytearray(template[0]))
payload = os.urandom(template[1])
suffix = str(bytearray(template[2]))
rv = b58encode_chk(prefix + payload + suffix)
assert is_valid(rv)
metadata = dict([(x,y) for (x,y) in zip(metadata_keys,template[3]) if y is not None])
yield (rv, b2a_hex(payload), metadata)
def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix):
'''Generate possibly invalid vector'''
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = str(bytearray(template[0]))
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = str(bytearray(template[2]))
return b58encode_chk(prefix + payload + suffix)
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
while True:
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
for template in templates:
val = gen_invalid_vector(template, randbool(0.2), randbool(0.2), randbool(0.2))
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys, json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
|
Hasimir/pyjs
|
refs/heads/master
|
pyjswidgets/pyjamas/ui/PopupPanel.mshtml.py
|
7
|
#class PopupPanel:
#
# # PopupImpl.onShow
# def onShowImpl(self, popup):
# frame = doc().createElement('iframe')
# frame.scrolling = 'no'
# frame.frameBorder = 0
# frame.style.position = 'absolute'
#
# popup.__frame = frame
# frame.__popup = popup
# frame.style.setExpression('left', 'this.__popup.offsetLeft')
# frame.style.setExpression('top', 'this.__popup.offsetTop')
# frame.style.setExpression('width', 'this.__popup.offsetWidth')
# frame.style.setExpression('height', 'this.__popup.offsetHeight')
# popup.parentElement.insertBefore(frame, popup)
#
# # PopupImpl.onHide
# def onHideImpl(self, popup):
# var frame = popup.__frame
# frame.parentElement.removeChild(frame)
# popup.__frame = None
# frame.__popup = None
|
jrising/open-estimate
|
refs/heads/master
|
openest/generate/weathertools.py
|
1
|
import os, csv
import numpy as np
county_dir = "/home/dmr/county_text/access1-3/rcp45/tas"
def date_to_datestr(date):
return ''.join([date.year, date.month, date.day])
def get_crop_calendar(cropfile):
cropcals = {}
with open(cropfile, 'rU') as fipsfp:
reader = csv.reader(fipsfp, delimiter=',')
for row in reader:
if row[1] == "None":
continue
plantday = int(row[1])
harvestday = int(row[2])
cropcals[row[0]] = (plantday, harvestday)
return cropcals
# assumes temppath has rows YYYYMMDD,#### and yields (year, temp)
# allows negative plantday
def growing_seasons_mean_reader(reader, plantday, harvestday):
prevtemps = None
row = reader.next()
more_rows = True
while more_rows:
year = row[0][0:4]
temps = [float(row[1]) if row[1] != '-99.99' else float('NaN')]
more_rows = False
for row in reader:
if row[0][0:4] != year:
more_rows = True
break
temps.append(float(row[1]) if row[1] != '-99.99' else float('NaN'))
if plantday < 0:
if prevtemps is not None:
temp = np.mean(prevtemps[plantday:] + temps[0:harvestday])
yield (int(year), temp)
prevtemps = temps
else:
temp = np.mean(temps[plantday:harvestday])
yield (int(year), temp)
# allows negative plantday
def growing_seasons_mean_ncdf(yyyyddd, weather, plantday, harvestday):
if plantday < 0:
year0 = yyyyddd[0] // 1000
seasons = np.array_split(weather, range(plantday - 1, len(yyyyddd), 365))
else:
year0 = yyyyddd[0] // 1000 + 1
seasons = np.array_split(weather, range(plantday - 1 + 365, len(yyyyddd), 365))
year1 = yyyyddd[-1] // 1000
for chunk in zip(range(year0, year1 + 1), seasons):
yield (chunk[0], np.mean(chunk[1][0:harvestday-plantday+1]))
# Version 1
#ii = 0
#while ii < len(yyyyddd):
# year = yyyyddd[ii] // 1000
# if ii + plantday - 1 >= 0 and ii + harvestday <= len(yyyyddd):
# mean = np.mean(weather[ii:ii+365][plantday-1:harvestday])
# ii += 365
# yield (year, mean)
# else:
# ii += 365
# allows negative plantday
def growing_seasons_daily_ncdf(yyyyddd, weather, plantday, harvestday):
if plantday < 0:
year0 = yyyyddd[0] // 1000
index0 = plantday - 1
else:
year0 = yyyyddd[0] // 1000 + 1
index0 = plantday - 1 + 365
year1 = yyyyddd[-1] // 1000
if isinstance(weather, list):
seasons = np.array_split(weather, range(plantday - 1, len(yyyyddd), 365))
for chunk in zip(range(year0, year1 + 1), seasons):
yield (chunk[0], chunk[1][0:harvestday-plantday+1])
else:
seasons = {}
for variable in weather:
seasons[variable] = np.array_split(weather[variable], range(plantday - 1, len(yyyyddd), 365))
for year in range(year0, year1 + 1):
yield (year, {variable: seasons[variable][year - year0][0:harvestday-plantday+1] for variable in seasons})
# Version 1
#ii = 0
#while ii < len(yyyyddd):
# year = yyyyddd[ii] // 1000
# if ii + plantday - 1 >= 0 and ii + harvestday <= len(yyyyddd):
# if isinstance(weather, list):
# yield (year, weather[ii:ii+365][plantday-1:harvestday])
# else:
# season = {}
# for variable in weather:
# season[variable] = weather[variable][ii:ii+365][plantday-1:harvestday]
# yield (year, season)
# ii += 365
# else:
# ii += 365
def yearly_daily_ncdf(yyyyddd, weather):
year0 = int(yyyyddd[0]) // 1000
year1 = int(yyyyddd[-1]) // 1000
chunks = zip(range(year0, year1+1), np.array_split(weather, range(365, len(yyyyddd), 365)))
for chunk in chunks:
yield chunk
# Version 2
#for ii in xrange(0, len(yyyyddd), 365):
# yield (yyyyddd[ii] // 1000, weather[ii:ii+365])
# Version 1
#ii = 0
#while ii < len(yyyyddd):
# year = yyyyddd[ii] // 1000
# if ii + 365 <= len(yyyyddd):
# yield (year, weather[ii:ii+365])
# ii += 365
# else:
# ii += 365
def xmap_apply_model(xmap, model, pval):
data = {}
total = 0
for (key, val) in xmap:
total += 1
if total % 100 == 0:
print total
result = model.eval_pval(val, pval, 1e-2)
if not np.isnan(result):
yield (key, result)
# effects and scales need to be lists of same length, containing iterators (key, val) with same keys
def combo_effects(effect_dicts, scale_gens):
numers = {}
denoms = {}
for ii in range(len(effect_dicts)):
for (key, scale) in scale_gens[ii]:
if scale == 0 or key not in effect_dicts[ii]:
continue
if key not in numers:
numers[key] = 0
denoms[key] = 0
numers[key] += effect_dicts[ii][key] * scale
denoms[key] += scale
return {key: numers[key] / denoms[key] for key in numers}
def read_scale_file(filepath, factor):
with open(filepath, "r") as fp:
reader = csv.reader(fp, delimiter=',')
for row in reader:
if row[1] == 'NA':
continue
fips = row[0]
if len(fips) == 4:
fips = '0' + fips
yield (fips, float(row[1]) * factor)
|
ChrisGoedhart/Uforia
|
refs/heads/master
|
source/django/db/backends/mysql/introspection.py
|
82
|
from django.db.backends import BaseDatabaseIntrospection
from MySQLdb import ProgrammingError, OperationalError
from MySQLdb.constants import FIELD_TYPE
import re
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
class DatabaseIntrospection(BaseDatabaseIntrospection):
data_types_reverse = {
FIELD_TYPE.BLOB: 'TextField',
FIELD_TYPE.CHAR: 'CharField',
FIELD_TYPE.DECIMAL: 'DecimalField',
FIELD_TYPE.NEWDECIMAL: 'DecimalField',
FIELD_TYPE.DATE: 'DateField',
FIELD_TYPE.DATETIME: 'DateTimeField',
FIELD_TYPE.DOUBLE: 'FloatField',
FIELD_TYPE.FLOAT: 'FloatField',
FIELD_TYPE.INT24: 'IntegerField',
FIELD_TYPE.LONG: 'IntegerField',
FIELD_TYPE.LONGLONG: 'BigIntegerField',
FIELD_TYPE.SHORT: 'IntegerField',
FIELD_TYPE.STRING: 'CharField',
FIELD_TYPE.TIMESTAMP: 'DateTimeField',
FIELD_TYPE.TINY: 'IntegerField',
FIELD_TYPE.TINY_BLOB: 'TextField',
FIELD_TYPE.MEDIUM_BLOB: 'TextField',
FIELD_TYPE.LONG_BLOB: 'TextField',
FIELD_TYPE.VAR_STRING: 'CharField',
}
def get_table_list(self, cursor):
"Returns a list of table names in the current database."
cursor.execute("SHOW TABLES")
return [row[0] for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"Returns a description of the table, with the DB-API cursor.description interface."
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
return cursor.description
def _name_to_index(self, cursor, table_name):
"""
Returns a dictionary of {field_name: field_index} for the given table.
Indexes are 0-based.
"""
return dict([(d[0], i) for i, d in enumerate(self.get_table_description(cursor, table_name))])
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based.
"""
my_field_dict = self._name_to_index(cursor, table_name)
constraints = self.get_key_columns(cursor, table_name)
relations = {}
for my_fieldname, other_table, other_field in constraints:
other_field_index = self._name_to_index(cursor, other_table)[other_field]
my_field_index = my_field_dict[my_fieldname]
relations[my_field_index] = (other_field_index, other_table)
return relations
def get_key_columns(self, cursor, table_name):
"""
Returns a list of (column_name, referenced_table_name, referenced_column_name) for all
key columns in given table.
"""
key_columns = []
try:
cursor.execute("""
SELECT column_name, referenced_table_name, referenced_column_name
FROM information_schema.key_column_usage
WHERE table_name = %s
AND table_schema = DATABASE()
AND referenced_table_name IS NOT NULL
AND referenced_column_name IS NOT NULL""", [table_name])
key_columns.extend(cursor.fetchall())
except (ProgrammingError, OperationalError):
# Fall back to "SHOW CREATE TABLE", for previous MySQL versions.
# Go through all constraints and save the equal matches.
cursor.execute("SHOW CREATE TABLE %s" % self.connection.ops.quote_name(table_name))
for row in cursor.fetchall():
pos = 0
while True:
match = foreign_key_re.search(row[1], pos)
if match == None:
break
pos = match.end()
key_columns.append(match.groups())
return key_columns
def get_primary_key_column(self, cursor, table_name):
"""
Returns the name of the primary key column for the given table
"""
for column in self.get_indexes(cursor, table_name).iteritems():
if column[1]['primary_key']:
return column[0]
return None
def get_indexes(self, cursor, table_name):
"""
Returns a dictionary of fieldname -> infodict for the given table,
where each infodict is in the format:
{'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index}
"""
cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name))
indexes = {}
for row in cursor.fetchall():
indexes[row[4]] = {'primary_key': (row[2] == 'PRIMARY'), 'unique': not bool(row[1])}
return indexes
|
ff94315/hiwifi-openwrt-HC5661-HC5761
|
refs/heads/master
|
staging_dir/host/lib/python2.7/ctypes/test/test_find.py
|
82
|
import unittest
import sys
from ctypes import *
from ctypes.util import find_library
from ctypes.test import is_resource_enabled
if sys.platform == "win32":
lib_gl = find_library("OpenGL32")
lib_glu = find_library("Glu32")
lib_gle = None
elif sys.platform == "darwin":
lib_gl = lib_glu = find_library("OpenGL")
lib_gle = None
else:
lib_gl = find_library("GL")
lib_glu = find_library("GLU")
lib_gle = find_library("gle")
## print, for debugging
if is_resource_enabled("printing"):
if lib_gl or lib_glu or lib_gle:
print "OpenGL libraries:"
for item in (("GL", lib_gl),
("GLU", lib_glu),
("gle", lib_gle)):
print "\t", item
# On some systems, loading the OpenGL libraries needs the RTLD_GLOBAL mode.
class Test_OpenGL_libs(unittest.TestCase):
def setUp(self):
self.gl = self.glu = self.gle = None
if lib_gl:
self.gl = CDLL(lib_gl, mode=RTLD_GLOBAL)
if lib_glu:
self.glu = CDLL(lib_glu, RTLD_GLOBAL)
if lib_gle:
try:
self.gle = CDLL(lib_gle)
except OSError:
pass
if lib_gl:
def test_gl(self):
if self.gl:
self.gl.glClearIndex
if lib_glu:
def test_glu(self):
if self.glu:
self.glu.gluBeginCurve
if lib_gle:
def test_gle(self):
if self.gle:
self.gle.gleGetJoinStyle
##if os.name == "posix" and sys.platform != "darwin":
## # On platforms where the default shared library suffix is '.so',
## # at least some libraries can be loaded as attributes of the cdll
## # object, since ctypes now tries loading the lib again
## # with '.so' appended of the first try fails.
## #
## # Won't work for libc, unfortunately. OTOH, it isn't
## # needed for libc since this is already mapped into the current
## # process (?)
## #
## # On MAC OSX, it won't work either, because dlopen() needs a full path,
## # and the default suffix is either none or '.dylib'.
## class LoadLibs(unittest.TestCase):
## def test_libm(self):
## import math
## libm = cdll.libm
## sqrt = libm.sqrt
## sqrt.argtypes = (c_double,)
## sqrt.restype = c_double
## self.assertEqual(sqrt(2), math.sqrt(2))
if __name__ == "__main__":
unittest.main()
|
mtconley/turntable
|
refs/heads/master
|
test/lib/python2.7/site-packages/pip/index.py
|
62
|
"""Routines related to PyPI, indexes"""
from __future__ import absolute_import
import logging
import cgi
import sys
import os
import re
import mimetypes
import posixpath
import warnings
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip.compat import ipaddress
from pip.utils import Inf, cached_property, normalize_name, splitext
from pip.utils.deprecation import RemovedInPip7Warning, RemovedInPip8Warning
from pip.utils.logging import indent_log
from pip.exceptions import (
DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename,
UnsupportedWheel,
)
from pip.download import url_to_path, path_to_url
from pip.models import PyPI
from pip.wheel import Wheel, wheel_ext
from pip.pep425tags import supported_tags, supported_tags_noarch, get_platform
from pip.req.req_requirement import InstallationCandidate
from pip._vendor import html5lib, requests, pkg_resources, six
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.requests.exceptions import SSLError
__all__ = ['PackageFinder']
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
SECURE_ORIGINS = [
# protocol, hostname, port
("https", "*", "*"),
("*", "localhost", "*"),
("*", "127.0.0.0/8", "*"),
("*", "::1/128", "*"),
("file", "*", None),
]
logger = logging.getLogger(__name__)
class PackageFinder(object):
"""This finds packages.
This is meant to match easy_install's technique for looking for
packages, by reading pages and looking for appropriate links
"""
def __init__(self, find_links, index_urls,
use_wheel=True, allow_external=(), allow_unverified=(),
allow_all_external=False, allow_all_prereleases=False,
trusted_hosts=None, process_dependency_links=False,
session=None):
if session is None:
raise TypeError(
"PackageFinder() missing 1 required keyword argument: "
"'session'"
)
self.find_links = find_links
self.index_urls = index_urls
self.dependency_links = []
# These are boring links that have already been logged somehow:
self.logged_links = set()
self.use_wheel = use_wheel
# Do we allow (safe and verifiable) externally hosted files?
self.allow_external = set(normalize_name(n) for n in allow_external)
# Which names are allowed to install insecure and unverifiable files?
self.allow_unverified = set(
normalize_name(n) for n in allow_unverified
)
# Anything that is allowed unverified is also allowed external
self.allow_external |= self.allow_unverified
# Do we allow all (safe and verifiable) externally hosted files?
self.allow_all_external = allow_all_external
# Domains that we won't emit warnings for when not using HTTPS
self.secure_origins = [
("*", host, "*")
for host in (trusted_hosts if trusted_hosts else [])
]
# Stores if we ignored any external links so that we can instruct
# end users how to install them if no distributions are available
self.need_warn_external = False
# Stores if we ignored any unsafe links so that we can instruct
# end users how to install them if no distributions are available
self.need_warn_unverified = False
# Do we want to allow _all_ pre-releases?
self.allow_all_prereleases = allow_all_prereleases
# Do we process dependency links?
self.process_dependency_links = process_dependency_links
# The Session we'll use to make requests
self.session = session
def add_dependency_links(self, links):
# # FIXME: this shouldn't be global list this, it should only
# # apply to requirements of the package that specifies the
# # dependency_links value
# # FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
warnings.warn(
"Dependency Links processing has been deprecated and will be "
"removed in a future release.",
RemovedInPip7Warning,
)
self.dependency_links.extend(links)
def _sort_locations(self, locations):
"""
Sort locations into "files" (archives) and "urls", and return
a pair of lists (files,urls)
"""
files = []
urls = []
# puts the url for the given file path into the appropriate list
def sort_path(path):
url = path_to_url(path)
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
urls.append(url)
else:
files.append(url)
for url in locations:
is_local_path = os.path.exists(url)
is_file_url = url.startswith('file:')
is_find_link = url in self.find_links
if is_local_path or is_file_url:
if is_local_path:
path = url
else:
path = url_to_path(url)
if is_find_link and os.path.isdir(path):
path = os.path.realpath(path)
for item in os.listdir(path):
sort_path(os.path.join(path, item))
elif is_file_url and os.path.isdir(path):
urls.append(url)
elif os.path.isfile(path):
sort_path(path)
else:
urls.append(url)
return files, urls
def _candidate_sort_key(self, candidate):
"""
Function used to generate link sort key for link tuples.
The greater the return value, the more preferred it is.
If not finding wheels, then sorted by version only.
If finding wheels, then the sort order is by version, then:
1. existing installs
2. wheels ordered via Wheel.support_index_min()
3. source archives
Note: it was considered to embed this logic into the Link
comparison operators, but then different sdist links
with the same version, would have to be considered equal
"""
if self.use_wheel:
support_num = len(supported_tags)
if candidate.location == INSTALLED_VERSION:
pri = 1
elif candidate.location.ext == wheel_ext:
# can raise InvalidWheelFilename
wheel = Wheel(candidate.location.filename)
if not wheel.supported():
raise UnsupportedWheel(
"%s is not a supported wheel for this platform. It "
"can't be sorted." % wheel.filename
)
pri = -(wheel.support_index_min())
else: # sdist
pri = -(support_num)
return (candidate.version, pri)
else:
return candidate.version
def _sort_versions(self, applicable_versions):
"""
Bring the latest version (and wheels) to the front, but maintain the
existing ordering as secondary. See the docstring for `_link_sort_key`
for details. This function is isolated for easier unit testing.
"""
return sorted(
applicable_versions,
key=self._candidate_sort_key,
reverse=True
)
def _validate_secure_origin(self, logger, location):
# Determine if this url used a secure transport mechanism
parsed = urllib_parse.urlparse(str(location))
origin = (parsed.scheme, parsed.hostname, parsed.port)
# Determine if our origin is a secure origin by looking through our
# hardcoded list of secure origins, as well as any additional ones
# configured on this PackageFinder instance.
for secure_origin in (SECURE_ORIGINS + self.secure_origins):
# Check to see if the protocol matches
if origin[0] != secure_origin[0] and secure_origin[0] != "*":
continue
try:
# We need to do this decode dance to ensure that we have a
# unicode object, even on Python 2.x.
addr = ipaddress.ip_address(
origin[1]
if (
isinstance(origin[1], six.text_type)
or origin[1] is None
)
else origin[1].decode("utf8")
)
network = ipaddress.ip_network(
secure_origin[1]
if isinstance(secure_origin[1], six.text_type)
else secure_origin[1].decode("utf8")
)
except ValueError:
# We don't have both a valid address or a valid network, so
# we'll check this origin against hostnames.
if origin[1] != secure_origin[1] and secure_origin[1] != "*":
continue
else:
# We have a valid address and network, so see if the address
# is contained within the network.
if addr not in network:
continue
# Check to see if the port patches
if (origin[2] != secure_origin[2]
and secure_origin[2] != "*"
and secure_origin[2] is not None):
continue
# If we've gotten here, then this origin matches the current
# secure origin and we should break out of the loop and continue
# on.
break
else:
# If the loop successfully completed without a break, that means
# that the origin we are testing is not a secure origin.
logger.warning(
"This repository located at %s is not a trusted host, if "
"this repository is available via HTTPS it is recommend to "
"use HTTPS instead, otherwise you may silence this warning "
"with '--trusted-host %s'.",
parsed.hostname,
parsed.hostname,
)
warnings.warn(
"Implicitly allowing locations which are not hosted at a "
"secure origin is deprecated and will require the use of "
"--trusted-host in the future.",
RemovedInPip7Warning,
)
def find_requirement(self, req, upgrade):
def mkurl_pypi_url(url):
loc = posixpath.join(url, url_name)
# For maximum compatibility with easy_install, ensure the path
# ends in a trailing slash. Although this isn't in the spec
# (and PyPI can handle it without the slash) some other index
# implementations might break if they relied on easy_install's
# behavior.
if not loc.endswith('/'):
loc = loc + '/'
return loc
url_name = req.url_name
# Only check main index if index URL is given:
main_index_url = None
if self.index_urls:
# Check that we have the url_name correctly spelled:
main_index_url = Link(
mkurl_pypi_url(self.index_urls[0]),
trusted=True,
)
page = self._get_page(main_index_url, req)
if page is None and PyPI.netloc not in str(main_index_url):
warnings.warn(
"Failed to find %r at %s. It is suggested to upgrade "
"your index to support normalized names as the name in "
"/simple/{name}." % (req.name, main_index_url),
RemovedInPip8Warning,
)
url_name = self._find_url_name(
Link(self.index_urls[0], trusted=True),
url_name, req
) or req.url_name
if url_name is not None:
locations = [
mkurl_pypi_url(url)
for url in self.index_urls] + self.find_links
else:
locations = list(self.find_links)
file_locations, url_locations = self._sort_locations(locations)
_flocations, _ulocations = self._sort_locations(self.dependency_links)
file_locations.extend(_flocations)
# We trust every url that the user has given us whether it was given
# via --index-url or --find-links
locations = [Link(url, trusted=True) for url in url_locations]
# We explicitly do not trust links that came from dependency_links
locations.extend([Link(url) for url in _ulocations])
logger.debug('URLs to search for versions for %s:', req)
for location in locations:
logger.debug('* %s', location)
self._validate_secure_origin(logger, location)
found_versions = []
found_versions.extend(
self._package_versions(
# We trust every directly linked archive in find_links
[Link(url, '-f', trusted=True) for url in self.find_links],
req.name.lower()
)
)
page_versions = []
for page in self._get_pages(locations, req):
logger.debug('Analyzing links from page %s', page.url)
with indent_log():
page_versions.extend(
self._package_versions(page.links, req.name.lower())
)
dependency_versions = list(self._package_versions(
[Link(url) for url in self.dependency_links], req.name.lower()))
if dependency_versions:
logger.debug(
'dependency_links found: %s',
', '.join([
version.location.url for version in dependency_versions
])
)
file_versions = list(
self._package_versions(
[Link(url) for url in file_locations],
req.name.lower()
)
)
if (not found_versions
and not page_versions
and not dependency_versions
and not file_versions):
logger.critical(
'Could not find any downloads that satisfy the requirement %s',
req,
)
if self.need_warn_external:
logger.warning(
"Some externally hosted files were ignored as access to "
"them may be unreliable (use --allow-external %s to "
"allow).",
req.name,
)
if self.need_warn_unverified:
logger.warning(
"Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow).",
req.name,
)
raise DistributionNotFound(
'No distributions at all found for %s' % req
)
installed_version = []
if req.satisfied_by is not None:
installed_version = [
InstallationCandidate(
req.name,
req.satisfied_by.version,
INSTALLED_VERSION,
),
]
if file_versions:
file_versions.sort(reverse=True)
logger.debug(
'Local files found: %s',
', '.join([
url_to_path(candidate.location.url)
for candidate in file_versions
])
)
# This is an intentional priority ordering
all_versions = (
file_versions + found_versions + page_versions
+ dependency_versions
)
# Filter out anything which doesn't match our specifier
_versions = set(
req.specifier.filter(
[x.version for x in all_versions],
prereleases=(
self.allow_all_prereleases
if self.allow_all_prereleases else None
),
)
)
applicable_versions = [
x for x in all_versions if x.version in _versions
]
# Finally add our existing versions to the front of our versions.
applicable_versions = installed_version + applicable_versions
applicable_versions = self._sort_versions(applicable_versions)
existing_applicable = any(
i.location is INSTALLED_VERSION
for i in applicable_versions
)
if not upgrade and existing_applicable:
if applicable_versions[0].location is INSTALLED_VERSION:
logger.debug(
'Existing installed version (%s) is most up-to-date and '
'satisfies requirement',
req.satisfied_by.version,
)
else:
logger.debug(
'Existing installed version (%s) satisfies requirement '
'(most up-to-date version is %s)',
req.satisfied_by.version,
applicable_versions[0][2],
)
return None
if not applicable_versions:
logger.critical(
'Could not find a version that satisfies the requirement %s '
'(from versions: %s)',
req,
', '.join(
sorted(
set(str(i.version) for i in all_versions),
key=parse_version,
)
)
)
if self.need_warn_external:
logger.warning(
"Some externally hosted files were ignored as access to "
"them may be unreliable (use --allow-external to allow)."
)
if self.need_warn_unverified:
logger.warning(
"Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow).",
req.name,
)
raise DistributionNotFound(
'No distributions matching the version for %s' % req
)
if applicable_versions[0].location is INSTALLED_VERSION:
# We have an existing version, and its the best version
logger.debug(
'Installed version (%s) is most up-to-date (past versions: '
'%s)',
req.satisfied_by.version,
', '.join(str(i.version) for i in applicable_versions[1:])
or "none",
)
raise BestVersionAlreadyInstalled
if len(applicable_versions) > 1:
logger.debug(
'Using version %s (newest of versions: %s)',
applicable_versions[0].version,
', '.join(str(i.version) for i in applicable_versions)
)
selected_version = applicable_versions[0].location
if (selected_version.verifiable is not None
and not selected_version.verifiable):
logger.warning(
"%s is potentially insecure and unverifiable.", req.name,
)
if selected_version._deprecated_regex:
warnings.warn(
"%s discovered using a deprecated method of parsing, in the "
"future it will no longer be discovered." % req.name,
RemovedInPip7Warning,
)
return selected_version
def _find_url_name(self, index_url, url_name, req):
"""
Finds the true URL name of a package, when the given name isn't quite
correct.
This is usually used to implement case-insensitivity.
"""
if not index_url.url.endswith('/'):
# Vaguely part of the PyPI API... weird but true.
# FIXME: bad to modify this?
index_url.url += '/'
page = self._get_page(index_url, req)
if page is None:
logger.critical('Cannot fetch index base URL %s', index_url)
return
norm_name = normalize_name(req.url_name)
for link in page.links:
base = posixpath.basename(link.path.rstrip('/'))
if norm_name == normalize_name(base):
logger.debug(
'Real name of requirement %s is %s', url_name, base,
)
return base
return None
def _get_pages(self, locations, req):
"""
Yields (page, page_url) from the given locations, skipping
locations that have errors, and adding download/homepage links
"""
all_locations = list(locations)
seen = set()
while all_locations:
location = all_locations.pop(0)
if location in seen:
continue
seen.add(location)
page = self._get_page(location, req)
if page is None:
continue
yield page
for link in page.rel_links():
normalized = normalize_name(req.name).lower()
if (normalized not in self.allow_external
and not self.allow_all_external):
self.need_warn_external = True
logger.debug(
"Not searching %s for files because external "
"urls are disallowed.",
link,
)
continue
if (link.trusted is not None
and not link.trusted
and normalized not in self.allow_unverified):
logger.debug(
"Not searching %s for urls, it is an "
"untrusted link and cannot produce safe or "
"verifiable files.",
link,
)
self.need_warn_unverified = True
continue
all_locations.append(link)
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
_egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
def _sort_links(self, links):
"""
Returns elements of links in order, non-egg links first, egg links
second, while eliminating duplicates
"""
eggs, no_eggs = [], []
seen = set()
for link in links:
if link not in seen:
seen.add(link)
if link.egg_fragment:
eggs.append(link)
else:
no_eggs.append(link)
return no_eggs + eggs
def _package_versions(self, links, search_name):
for link in self._sort_links(links):
v = self._link_package_versions(link, search_name)
if v is not None:
yield v
def _known_extensions(self):
extensions = ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip')
if self.use_wheel:
return extensions + (wheel_ext,)
return extensions
def _link_package_versions(self, link, search_name):
"""
Return an iterable of triples (pkg_resources_version_key,
link, python_version) that can be extracted from the given
link.
Meant to be overridden by subclasses, not called by clients.
"""
platform = get_platform()
version = None
if link.egg_fragment:
egg_info = link.egg_fragment
else:
egg_info, ext = link.splitext()
if not ext:
if link not in self.logged_links:
logger.debug('Skipping link %s; not a file', link)
self.logged_links.add(link)
return
if egg_info.endswith('.tar'):
# Special double-extension case:
egg_info = egg_info[:-4]
ext = '.tar' + ext
if ext not in self._known_extensions():
if link not in self.logged_links:
logger.debug(
'Skipping link %s; unknown archive format: %s',
link,
ext,
)
self.logged_links.add(link)
return
if "macosx10" in link.path and ext == '.zip':
if link not in self.logged_links:
logger.debug('Skipping link %s; macosx10 one', link)
self.logged_links.add(link)
return
if ext == wheel_ext:
try:
wheel = Wheel(link.filename)
except InvalidWheelFilename:
logger.debug(
'Skipping %s because the wheel filename is invalid',
link
)
return
if (pkg_resources.safe_name(wheel.name).lower()
!= pkg_resources.safe_name(search_name).lower()):
logger.debug(
'Skipping link %s; wrong project name (not %s)',
link,
search_name,
)
return
if not wheel.supported():
logger.debug(
'Skipping %s because it is not compatible with this '
'Python',
link,
)
return
# This is a dirty hack to prevent installing Binary Wheels from
# PyPI unless it is a Windows or Mac Binary Wheel. This is
# paired with a change to PyPI disabling uploads for the
# same. Once we have a mechanism for enabling support for
# binary wheels on linux that deals with the inherent problems
# of binary distribution this can be removed.
comes_from = getattr(link, "comes_from", None)
if (
(
not platform.startswith('win')
and not platform.startswith('macosx')
and not platform == 'cli'
)
and comes_from is not None
and urllib_parse.urlparse(
comes_from.url
).netloc.endswith(PyPI.netloc)):
if not wheel.supported(tags=supported_tags_noarch):
logger.debug(
"Skipping %s because it is a pypi-hosted binary "
"Wheel on an unsupported platform",
link,
)
return
version = wheel.version
if not version:
version = self._egg_info_matches(egg_info, search_name, link)
if version is None:
logger.debug(
'Skipping link %s; wrong project name (not %s)',
link,
search_name,
)
return
if (link.internal is not None
and not link.internal
and not normalize_name(search_name).lower()
in self.allow_external
and not self.allow_all_external):
# We have a link that we are sure is external, so we should skip
# it unless we are allowing externals
logger.debug("Skipping %s because it is externally hosted.", link)
self.need_warn_external = True
return
if (link.verifiable is not None
and not link.verifiable
and not (normalize_name(search_name).lower()
in self.allow_unverified)):
# We have a link that we are sure we cannot verify its integrity,
# so we should skip it unless we are allowing unsafe installs
# for this requirement.
logger.debug(
"Skipping %s because it is an insecure and unverifiable file.",
link,
)
self.need_warn_unverified = True
return
match = self._py_version_re.search(version)
if match:
version = version[:match.start()]
py_version = match.group(1)
if py_version != sys.version[:3]:
logger.debug(
'Skipping %s because Python version is incorrect', link
)
return
logger.debug('Found link %s, version: %s', link, version)
return InstallationCandidate(search_name, version, link)
def _egg_info_matches(self, egg_info, search_name, link):
match = self._egg_info_re.search(egg_info)
if not match:
logger.debug('Could not parse version from link: %s', link)
return None
name = match.group(0).lower()
# To match the "safe" name that pkg_resources creates:
name = name.replace('_', '-')
# project name and version must be separated by a dash
look_for = search_name.lower() + "-"
if name.startswith(look_for):
return match.group(0)[len(look_for):]
else:
return None
def _get_page(self, link, req):
return HTMLPage.get_page(link, req, session=self.session)
class HTMLPage(object):
"""Represents one page, along with its URL"""
# FIXME: these regexes are horrible hacks:
_homepage_re = re.compile(b'<th>\\s*home\\s*page', re.I)
_download_re = re.compile(b'<th>\\s*download\\s+url', re.I)
_href_re = re.compile(
b'href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))',
re.I | re.S
)
def __init__(self, content, url, headers=None, trusted=None):
# Determine if we have any encoding information in our headers
encoding = None
if headers and "Content-Type" in headers:
content_type, params = cgi.parse_header(headers["Content-Type"])
if "charset" in params:
encoding = params['charset']
self.content = content
self.parsed = html5lib.parse(
self.content,
encoding=encoding,
namespaceHTMLElements=False,
)
self.url = url
self.headers = headers
self.trusted = trusted
def __str__(self):
return self.url
@classmethod
def get_page(cls, link, req, skip_archives=True, session=None):
if session is None:
raise TypeError(
"get_page() missing 1 required keyword argument: 'session'"
)
url = link.url
url = url.split('#', 1)[0]
# Check for VCS schemes that do not support lookup as web pages.
from pip.vcs import VcsSupport
for scheme in VcsSupport.schemes:
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
logger.debug('Cannot look at %s URL %s', scheme, link)
return None
try:
if skip_archives:
filename = link.filename
for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
if filename.endswith(bad_ext):
content_type = cls._get_content_type(
url, session=session,
)
if content_type.lower().startswith('text/html'):
break
else:
logger.debug(
'Skipping page %s because of Content-Type: %s',
link,
content_type,
)
return
logger.debug('Getting page %s', url)
# Tack index.html onto file:// URLs that point to directories
(scheme, netloc, path, params, query, fragment) = \
urllib_parse.urlparse(url)
if (scheme == 'file'
and os.path.isdir(urllib_request.url2pathname(path))):
# add trailing slash if not present so urljoin doesn't trim
# final segment
if not url.endswith('/'):
url += '/'
url = urllib_parse.urljoin(url, 'index.html')
logger.debug(' file: URL is directory, getting %s', url)
resp = session.get(
url,
headers={
"Accept": "text/html",
"Cache-Control": "max-age=600",
},
)
resp.raise_for_status()
# The check for archives above only works if the url ends with
# something that looks like an archive. However that is not a
# requirement of an url. Unless we issue a HEAD request on every
# url we cannot know ahead of time for sure if something is HTML
# or not. However we can check after we've downloaded it.
content_type = resp.headers.get('Content-Type', 'unknown')
if not content_type.lower().startswith("text/html"):
logger.debug(
'Skipping page %s because of Content-Type: %s',
link,
content_type,
)
return
inst = cls(
resp.content, resp.url, resp.headers,
trusted=link.trusted,
)
except requests.HTTPError as exc:
level = 2 if exc.response.status_code == 404 else 1
cls._handle_fail(req, link, exc, url, level=level)
except requests.ConnectionError as exc:
cls._handle_fail(
req, link, "connection error: %s" % exc, url,
)
except requests.Timeout:
cls._handle_fail(req, link, "timed out", url)
except SSLError as exc:
reason = ("There was a problem confirming the ssl certificate: "
"%s" % exc)
cls._handle_fail(
req, link, reason, url,
level=2,
meth=logger.info,
)
else:
return inst
@staticmethod
def _handle_fail(req, link, reason, url, level=1, meth=None):
if meth is None:
meth = logger.debug
meth("Could not fetch URL %s: %s", link, reason)
meth("Will skip URL %s when looking for download links for %s" %
(link.url, req))
@staticmethod
def _get_content_type(url, session):
"""Get the Content-Type of the given url, using a HEAD request"""
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
if scheme not in ('http', 'https'):
# FIXME: some warning or something?
# assertion error?
return ''
resp = session.head(url, allow_redirects=True)
resp.raise_for_status()
return resp.headers.get("Content-Type", "")
@cached_property
def api_version(self):
metas = [
x for x in self.parsed.findall(".//meta")
if x.get("name", "").lower() == "api-version"
]
if metas:
try:
return int(metas[0].get("value", None))
except (TypeError, ValueError):
pass
return None
@cached_property
def base_url(self):
bases = [
x for x in self.parsed.findall(".//base")
if x.get("href") is not None
]
if bases and bases[0].get("href"):
return bases[0].get("href")
else:
return self.url
@property
def links(self):
"""Yields all links in the page"""
for anchor in self.parsed.findall(".//a"):
if anchor.get("href"):
href = anchor.get("href")
url = self.clean_link(
urllib_parse.urljoin(self.base_url, href)
)
# Determine if this link is internal. If that distinction
# doesn't make sense in this context, then we don't make
# any distinction.
internal = None
if self.api_version and self.api_version >= 2:
# Only api_versions >= 2 have a distinction between
# external and internal links
internal = bool(
anchor.get("rel")
and "internal" in anchor.get("rel").split()
)
yield Link(url, self, internal=internal)
def rel_links(self):
for url in self.explicit_rel_links():
yield url
for url in self.scraped_rel_links():
yield url
def explicit_rel_links(self, rels=('homepage', 'download')):
"""Yields all links with the given relations"""
rels = set(rels)
for anchor in self.parsed.findall(".//a"):
if anchor.get("rel") and anchor.get("href"):
found_rels = set(anchor.get("rel").split())
# Determine the intersection between what rels were found and
# what rels were being looked for
if found_rels & rels:
href = anchor.get("href")
url = self.clean_link(
urllib_parse.urljoin(self.base_url, href)
)
yield Link(url, self, trusted=False)
def scraped_rel_links(self):
# Can we get rid of this horrible horrible method?
for regex in (self._homepage_re, self._download_re):
match = regex.search(self.content)
if not match:
continue
href_match = self._href_re.search(self.content, pos=match.end())
if not href_match:
continue
url = (
href_match.group(1)
or href_match.group(2)
or href_match.group(3)
)
if not url:
continue
try:
url = url.decode("ascii")
except UnicodeDecodeError:
continue
url = self.clean_link(urllib_parse.urljoin(self.base_url, url))
yield Link(url, self, trusted=False, _deprecated_regex=True)
_clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
def clean_link(self, url):
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in
the link, it will be rewritten to %20 (while not over-quoting
% or other characters)."""
return self._clean_re.sub(
lambda match: '%%%2x' % ord(match.group(0)), url)
class Link(object):
def __init__(self, url, comes_from=None, internal=None, trusted=None,
_deprecated_regex=False):
# url can be a UNC windows share
if url != Inf and url.startswith('\\\\'):
url = path_to_url(url)
self.url = url
self.comes_from = comes_from
self.internal = internal
self.trusted = trusted
self._deprecated_regex = _deprecated_regex
def __str__(self):
if self.comes_from:
return '%s (from %s)' % (self.url, self.comes_from)
else:
return str(self.url)
def __repr__(self):
return '<Link %s>' % self
def __eq__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url == other.url
def __ne__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url != other.url
def __lt__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url < other.url
def __le__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url <= other.url
def __gt__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url > other.url
def __ge__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url >= other.url
def __hash__(self):
return hash(self.url)
@property
def filename(self):
_, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
name = posixpath.basename(path.rstrip('/')) or netloc
name = urllib_parse.unquote(name)
assert name, ('URL %r produced no filename' % self.url)
return name
@property
def scheme(self):
return urllib_parse.urlsplit(self.url)[0]
@property
def netloc(self):
return urllib_parse.urlsplit(self.url)[1]
@property
def path(self):
return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])
def splitext(self):
return splitext(posixpath.basename(self.path.rstrip('/')))
@property
def ext(self):
return self.splitext()[1]
@property
def url_without_fragment(self):
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
@property
def egg_fragment(self):
match = self._egg_fragment_re.search(self.url)
if not match:
return None
return match.group(1)
_hash_re = re.compile(
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
)
@property
def hash(self):
match = self._hash_re.search(self.url)
if match:
return match.group(2)
return None
@property
def hash_name(self):
match = self._hash_re.search(self.url)
if match:
return match.group(1)
return None
@property
def show_url(self):
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
@property
def verifiable(self):
"""
Returns True if this link can be verified after download, False if it
cannot, and None if we cannot determine.
"""
trusted = self.trusted or getattr(self.comes_from, "trusted", None)
if trusted is not None and trusted:
# This link came from a trusted source. It *may* be verifiable but
# first we need to see if this page is operating under the new
# API version.
try:
api_version = getattr(self.comes_from, "api_version", None)
api_version = int(api_version)
except (ValueError, TypeError):
api_version = None
if api_version is None or api_version <= 1:
# This link is either trusted, or it came from a trusted,
# however it is not operating under the API version 2 so
# we can't make any claims about if it's safe or not
return
if self.hash:
# This link came from a trusted source and it has a hash, so we
# can consider it safe.
return True
else:
# This link came from a trusted source, using the new API
# version, and it does not have a hash. It is NOT verifiable
return False
elif trusted is not None:
# This link came from an untrusted source and we cannot trust it
return False
# An object to represent the "link" for the installed version of a requirement.
# Using Inf as the url makes it sort higher.
INSTALLED_VERSION = Link(Inf)
|
mozilla/inventory
|
refs/heads/master
|
bulk_action/import_utils.py
|
2
|
from django.db.models.fields.related import ForeignKey
from systems.models import System
from core.registration.static.models import StaticReg
from core.hwadapter.models import HWAdapter
from mozdns.cname.models import CNAME
from mozdns.utils import ensure_label_domain
import decimal
import datetime
import simplejson as json
# Objects are created/updated at during different phases. The need for this
# comes from the necessity of certain objects existing before other objects
# exist. For example, a KeyValue pair needs its object to exist before it can
# be saved. Also, SREG objects need system objects before they are saved --
# likewise HWAdapter objects need SREG objects.
# As functions are built up to save a JSON blob they are paired with a PHASE
# number to ensure an order that will allow things to work.
PHASE_1 = 1
PHASE_2 = 2
PHASE_3 = 3
PHASE_4 = 4
class BadImportData(Exception):
def __init__(self, bad_blob=None, msg=''):
self.bad_blob = bad_blob
self.msg = msg
return super(BadImportData, self).__init__()
class BadUpdateCreate(BadImportData):
pass
class BAEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return str(o)
elif isinstance(o, datetime.datetime):
return o.strftime("%Y-%m-%d %H:%M")
elif isinstance(o, datetime.date):
return o.isoformat()
super(BAEncoder, self).default(o)
class BADecoder(json.JSONDecoder):
pass
def dumps(j):
return json.dumps(j, cls=BAEncoder)
def loads(j):
return json.loads(j, cls=BADecoder)
def make_save(obj, blob):
def save():
obj.save()
if 'pk' in blob:
assert blob['pk'] == obj.pk
else:
blob['pk'] = obj.pk
return save
def recurse_confirm_no_pk(blob):
for attr, value in blob.iteritems():
if isinstance(value, dict) and attr != 'views':
# the views attr should never be touched.
for sub_blob in value.values():
recurse_confirm_no_pk(sub_blob)
elif attr == 'pk':
raise BadImportData(
bad_blob=blob,
msg='This object is new (it has no pk) but other objects tied '
'to it have pk values. This is not allowed.'
)
def system_import(blob):
if 'pk' in blob:
try:
system = System.objects.get(pk=blob['pk'])
return system_update(system, blob)
except System.DoesNotExist:
raise BadImportData(
bad_blob=blob,
msg='Could not find the System with primary key '
'{0}.'.format(blob['pk'])
)
else:
recurse_confirm_no_pk(blob)
system = System()
return system_update(system, blob)
def sreg_import(system, blob):
if 'pk' in blob:
try:
sreg = StaticReg.objects.get(pk=blob['pk'])
return sreg_update(sreg, blob)
except StaticReg.DoesNotExist:
raise BadImportData(
bad_blob=blob,
msg='Could not find the Static Registration with primary key '
'{0}.'.format(blob['pk'])
)
else:
recurse_confirm_no_pk(blob)
sreg = StaticReg(system=system)
return sreg_update(sreg, blob)
def hw_import(sreg, blob):
if 'pk' in blob:
try:
hw = HWAdapter.objects.get(pk=blob['pk'])
return hw_update(hw, blob)
except StaticReg.DoesNotExist:
raise BadImportData(
bad_blob=blob,
msg='Could not find the Hardware Adapter with primary key '
'{0}.'.format(blob['pk'])
)
else:
recurse_confirm_no_pk(blob)
hw = HWAdapter(sreg=sreg)
return hw_update(hw, blob)
def import_kv(obj, blobs):
save_functions = []
Klass = obj.keyvalue_set.model
for blob in blobs.values():
if 'pk' in blob:
try:
kv = Klass.objects.get(pk=blob['pk'])
except Klass.DoesNotExist:
raise BadImportData(
bad_blob=blob,
msg='Could not find the Key Value pair with primary key '
'{0}.'.format(blob['pk'])
)
save_functions += update_kv(kv, blob)
else:
if obj.pk:
try:
kv = Klass.objects.get(obj=obj, key=blob.get('key', None))
except Klass.DoesNotExist:
kv = Klass(obj=obj)
else:
kv = Klass(obj=obj)
save_functions += update_kv(kv, blob)
return save_functions
def import_cname(sreg, blobs):
if not isinstance(blobs, list):
raise BadImportData(
bad_blob=blobs,
msg='The cname attribute should be a list of CNAME blobs'
)
save_functions = []
for blob in blobs:
if 'pk' in blob:
try:
cname = CNAME.objects.get(pk=blob['pk'])
save_functions += cname_update(cname, blob)
except CNAME.DoesNotExist:
raise BadImportData(
bad_blob=blob,
msg='Could not find the CNAME with primary key '
'{0}.'.format(blob['pk'])
)
else:
recurse_confirm_no_pk(blob)
save_functions += cname_update(CNAME(), blob)
return save_functions
def cname_update(cname, blob):
save_functions = []
for attr, value in blob.iteritems():
if attr == 'views':
continue # We handle views in save since new objects need a pk
else:
setattr(cname, attr, value)
def save():
# This code runs in a transaction that is rolled back if an exception
# is raised.
cname.label, cname.domain = ensure_label_domain(cname.fqdn)
make_save(cname, blob)()
# Now save the views
for view in blob.get('views', []):
cname.views.add(view)
return [(PHASE_3, save)] + save_functions
def update_kv(kv, blob):
try:
kv.key, kv.value = blob['key'], blob['value']
except KeyError:
raise BadImportData(
bad_blob=blob,
msg="Either the 'key' or 'value' attribute is missing from this "
"blob. Both are required for KeyValue pairs."
)
def save():
# Refresh the cash with an actual object
kv.obj = kv.obj.__class__.objects.get(pk=kv.obj.pk)
try:
kv.clean()
except Exception, e:
raise type(e)(
"Failed to clean() <{0}>. Error was '{1}'".format(kv, e)
)
make_save(kv, blob)()
return [(PHASE_4, save)]
def hw_update(hw, blob):
save_functions = []
for attr, value in blob.iteritems():
if attr == 'keyvalue_set':
save_functions += import_kv(hw, value)
else:
setattr(hw, attr, value)
def save():
# Refresh the cash with an actual object
hw.sreg = StaticReg.objects.get(pk=hw.sreg.pk)
make_save(hw, blob)()
return [(PHASE_3, save)] + save_functions
def sreg_update(sreg, blob):
save_functions = []
for attr, value in blob.iteritems():
if attr == 'hwadapter_set':
if not isinstance(value, dict):
raise BadImportData(
bad_blob=blob,
msg='The Static Registration attribute hwadapter_set must '
'a dict of Hardware Adapter JSON blobs'
)
for hw_blob in value.values():
save_functions += hw_import(sreg, hw_blob)
elif attr == 'keyvalue_set':
save_functions += import_kv(sreg, value)
elif attr == 'cname':
save_functions += import_cname(sreg, value)
elif attr == 'views':
continue # We handle views in save since new objects need a pk
else:
setattr(sreg, attr, value)
def save():
# This code runs in a transaction that is rolled back if an exception
# is raised.
sreg.label, sreg.domain = ensure_label_domain(sreg.fqdn)
# So this is strange, but probably reasonable due to the face we are
# using so many closers and aliasing objects left and right. We need to
# update set the system again or else sreg.system ends up being None.
sreg.system = System.objects.get(pk=sreg.system.pk)
make_save(sreg, blob)()
# Now save the views
for view in blob.get('views', []):
sreg.views.add(view)
return [(PHASE_2, save)] + save_functions
def clone_system_extras(system, other_hostname):
# XXX if ever SystemChangeLog is swapped out this function will need to be
# changed
"""
Copy all SystemChangeLog over from the system named "other_hostname" to
system's SystemChangeLog store.
This function is called after the system's save() is called, so if the
object is new we will need to refresh the object before using it.
"""
other_system = System.objects.get(hostname=other_hostname)
def _clone_system_extras():
s = System.objects.get(pk=system.pk)
for cl in other_system.systemchangelog_set.all():
cl.pk = None
cl.system = s
cl.save()
# if the new system didn't have a created_on date it was set to
# now()-ish. Set the new system's created_on to also be None
if not other_system.created_on:
s.created_on = None
s.save()
return _clone_system_extras
def system_update(system, blob):
"""
If there is a key 'clone' with "truish" value we must look for that and
possibly copy history from an existing object. The history will be taken
from the system who's "hostname" is the value of the "clone" key. If no
system exists for cloning a BadImportData exception will be raised.
"""
save_functions = []
mother_hostname = blob.get('clone', None)
if mother_hostname and isinstance(mother_hostname, str):
try:
save_functions += [
(PHASE_2, clone_system_extras(system, mother_hostname))
]
except System.DoesNotExist:
raise BadImportData(
bad_blob=blob,
msg="Tried to clone the host {0} but a host with that "
"hostname didn't exist".format(mother_hostname)
)
for attr, value in blob.iteritems():
if attr == 'staticreg_set':
if not isinstance(value, dict):
raise BadImportData(
bad_blob=blob,
msg='The system attribute statirc_reg_set must a dict of '
'Static Registration JSON blobs'
)
for sreg_blob in value.values():
save_functions += sreg_import(system, sreg_blob)
elif attr == 'keyvalue_set':
save_functions += import_kv(system, value)
else:
set_field(system, attr, value)
return [(PHASE_1, make_save(system, blob))] + save_functions
def set_field(obj, attr, value):
# yay side effects
if attr == 'pk': # Don't ever set a primary key
return
if hasattr(obj.__class__, attr):
m_attr = getattr(obj.__class__, attr)
if isinstance(m_attr.field, ForeignKey):
if value is None:
m_value = value
else:
try:
m_value = m_attr.field.rel.to.objects.get(pk=value)
except (ValueError, m_attr.field.rel.to.DoesNotExist), e:
raise BadImportData(
"Using the data '{0}' to look up '{1}' and "
"received the error '{2}'".format(value, attr, str(e))
)
else:
raise Exception("Really bad error")
else:
m_value = value
if attr == 'rack_order':
# The serialization here makes this a float which cannot be converted
# to a decimal. Casting to a string makes this acceptible to the django
# field validators.
m_value = str(m_value)
setattr(obj, attr, m_value)
|
luotao1/Paddle
|
refs/heads/develop
|
python/paddle/fluid/framework.py
|
1
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import collections
from collections import defaultdict
from collections import Iterable
import contextlib
from .wrapped_decorator import signature_safe_contextmanager, wrap_decorator
import os
import re
import traceback
import six
import copy
from types import MethodType, FunctionType
import numpy as np
import subprocess
import multiprocessing
import sys
import logging
from .. import compat as cpt
from .proto import framework_pb2
from . import core
from . import unique_name
import paddle.version as fluid_version
import warnings
import functools
__all__ = [
'Program',
'default_startup_program',
'default_main_program',
'program_guard',
'name_scope',
'cuda_places',
'cpu_places',
'xpu_places',
'cuda_pinned_places',
'in_dygraph_mode',
'is_compiled_with_cuda',
'is_compiled_with_xpu',
'Variable',
'require_version',
'device_guard',
'set_flags',
'get_flags',
]
EMPTY_VAR_NAME = core.kEmptyVarName()
TEMP_VAR_NAME = core.kTempVarName()
GRAD_VAR_SUFFIX = core.kGradVarSuffix()
ZERO_VAR_SUFFIX = core.kZeroVarSuffix()
CONTROL_DEP_VAR_PREFIX = core.kControlDepVarName()
_dygraph_tracer_ = None
_global_expected_place_ = None
_current_device = None
global_prog_seed = 0
def require_version(min_version, max_version=None):
"""
Check if the installed version of PaddlePaddle is in [min_version, max_version],
if the installed version is lower than ``min_version`` or higher than ``max_version``,
an exception will be thrown, NO returns if the installed version is satisfied.
Args:
min_version (str): the minimum version required (like '1.4.0').
max_version (str, optional): the max version required (like '1.6.0'), default is None,
meaning any version equal or higher than ``min_version`` is acceptable.
Returns:
None.
Raises:
TypeError: if the type of ``min_version`` is not str.
TypeError: if the type of ``max_version`` is not str or type(None).
ValueError: if the value of ``min_version`` is not in version format.
ValueError: if the value of ``max_version`` is not in version format or None.
Exception: if the installed version is lower than ``min_version`` or higher than ``max_version``.
Examples:
.. code-block:: python
import paddle.fluid as fluid
# any version >= 0.1.0 is acceptable.
fluid.require_version('0.1.0')
# if 0.1.0 <= version <= 10.0.0, it is acceptable.
fluid.require_version(min_version='0.1.0', max_version='10.0.0')
"""
if not isinstance(min_version, str):
raise TypeError(
"The type of 'min_version' in require_version must be str, but received %s."
% (type(min_version)))
if not isinstance(max_version, (str, type(None))):
raise TypeError(
"The type of 'max_version' in require_version must be str or type(None), but received %s."
% (type(max_version)))
check_format = re.match(r'\d+(\.\d+){0,3}', min_version)
if check_format is None or check_format.group() != min_version:
raise ValueError(
"The value of 'min_version' in require_version must be in format '\\d+(\\.\\d+){0,3}', "
"like '1.5.2.0', but received %s" % min_version)
if max_version is not None:
check_format = re.match(r'\d+(\.\d+){0,3}', max_version)
if check_format is None or check_format.group() != max_version:
raise ValueError(
"The value of 'max_version' in require_version must be in format '\\d+(\\.\\d+){0,3}', "
"like '1.5.2.0', but received %s" % max_version)
version_installed = [
fluid_version.major, fluid_version.minor, fluid_version.patch,
fluid_version.rc
]
zero_version = ['0', '0', '0', '0']
def version_cmp(ver_a, ver_b):
for i in six.moves.range(len(ver_a)):
if int(ver_a[i]) > int(ver_b[i]):
return 1
elif int(ver_a[i]) < int(ver_b[i]):
return -1
return 0
if version_cmp(version_installed, zero_version) == 0:
if max_version is not None:
warnings.warn(
"PaddlePaddle version in [%s, %s] required, but %s installed. "
"Maybe you are using a develop version, "
"please make sure the version is good with your code." %
(min_version, max_version, fluid_version.full_version))
else:
warnings.warn(
"PaddlePaddle version %s or higher is required, but %s installed, "
"Maybe you are using a develop version, "
"please make sure the version is good with your code." %
(min_version, fluid_version.full_version))
return
min_version_split = min_version.split('.')
min_version_to_check = min_version_split + zero_version[len(
min_version_split):]
if max_version is not None:
max_version_split = max_version.split('.')
max_version_to_check = max_version_split + zero_version[len(
max_version_split):]
if version_cmp(version_installed,
max_version_to_check) > 0 or version_cmp(
version_installed, min_version_to_check) < 0:
raise Exception(
"VersionError: PaddlePaddle version in [%s, %s] required, but %s installed."
% (min_version, max_version, fluid_version.full_version))
else:
if version_cmp(version_installed, min_version_to_check) < 0:
raise Exception(
"VersionError: PaddlePaddle version %s or higher is required, but %s installed, "
"please upgrade your PaddlePaddle to %s or other higher version."
% (min_version, fluid_version.full_version, min_version))
def in_dygraph_mode():
"""
.. note::
Dynamic graph mode is turn ON by default since paddle 2.0.0
This API checks whether paddle runs in dynamic graph mode.
You can turn ON static graph mode by `enable_static <../dygraph/base/disable_dygraph_en.html>`_ ,
and turn OFF static graph mode by `disable_static <../dygraph/base/enable_dygraph_en.html>`_ .
Returns:
bool: Whether paddle runs in dynamic graph mode.
Examples:
.. code-block:: python
import paddle
print(paddle.in_dynamic_mode()) # True, dynamic mode is turn ON by default since paddle 2.0.0
paddle.enable_static()
print(paddle.in_dynamic_mode()) # False, Now we are in static mode
paddle.disable_static()
print(paddle.in_dynamic_mode()) # True, Now we are in dynamic mode
"""
return _dygraph_tracer_ is not None
def _dygraph_not_support_(func):
def __impl__(*args, **kwargs):
assert not in_dygraph_mode(
), "We don't support %s in imperative mode" % func.__name__
return func(*args, **kwargs)
return __impl__
def _dygraph_only_(func):
def __impl__(*args, **kwargs):
assert in_dygraph_mode(
), "We only support '%s()' in dynamic graph mode, please call 'paddle.disable_static()' to enter dynamic graph mode." % func.__name__
return func(*args, **kwargs)
return __impl__
def _static_only_(func):
def __impl__(*args, **kwargs):
assert not in_dygraph_mode(
), "In PaddlePaddle 2.x, we turn on dynamic graph mode by default, and '%s()' is only supported in static graph mode. So if you want to use this api, please call 'paddle.enable_static()' before this api to enter static graph mode." % func.__name__
return func(*args, **kwargs)
return __impl__
# NOTE(zhiqiu): This decorator is used for the APIs of Variable which is only
# used to make Variable and VarBase has same interfaces, like numpy. Since VarBase is not exposed in our
# official docments, logically, we want to keep VarBase and logically consistent. While, actually,
# in our implementation, there some APIs not supported, like numpy, because Variable contains the desc.
# So, those APIs are listed under class Variable to generate docs only.
# TODO(zhiqiu): We should make VarBase consistent with Variable in future, for example, by inheritting
# same base class.
def _fake_interface_only_(func):
def __impl__(*args, **kwargs):
raise AssertionError(
"'%s' should be called by imperative Varible in imperative mode, please run it in dygraph "
"mode. You can turn off paddle.enable_static() if you are in static mode, or turn off "
"ProgramTranslator if you are using @paddle.jit.to_static. If you have to run ProgramTranslator, "
"please use other API to replace '%s'" % (func.__name__,
func.__name__))
return __impl__
# NOTE(chenweihang): There is argument name typo (stat_dict, correct name is state_dict)
# in fluid api Layer.set_dict, Optimizer.load, in order to correct the argument without
# introducing compatibility issues, add this decorator
# NOTE(chenweihang): not using `wrap_decorator` here is because `wrap_decorator` will
# move kwargs to args, which doesn't work in this decorate case
def deprecate_stat_dict(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if 'stat_dict' in kwargs:
warnings.warn(
"The argument `stat_dict` has deprecated, please change it to `state_dict`.",
DeprecationWarning)
kwargs['state_dict'] = kwargs['stat_dict']
kwargs.pop('stat_dict')
return func(*args, **kwargs)
return wrapper
dygraph_not_support = wrap_decorator(_dygraph_not_support_)
dygraph_only = wrap_decorator(_dygraph_only_)
static_only = wrap_decorator(_static_only_)
fake_interface_only = wrap_decorator(_fake_interface_only_)
def _dygraph_tracer():
return _dygraph_tracer_
def _current_expected_place():
global _global_expected_place_
if _global_expected_place_ is None:
if core.is_compiled_with_cuda():
try:
device_count = core.get_cuda_device_count()
except Exception as e:
device_count = 0
if device_count > 0:
_global_expected_place_ = core.CUDAPlace(0)
else:
warnings.warn(
"You are using GPU version Paddle, but your CUDA device is not set properly. CPU device will be used by default."
)
_global_expected_place_ = core.CPUPlace()
else:
_global_expected_place_ = core.CPUPlace()
return _global_expected_place_
def _set_dygraph_tracer_expected_place(place):
global _dygraph_tracer_
if _dygraph_tracer_ is not None:
_dygraph_tracer_._expected_place = place
def _set_expected_place(place):
global _global_expected_place_
_global_expected_place_ = place
_set_dygraph_tracer_expected_place(place)
# TODO(zhiqiu): remove this function.
def _var_base_to_np(var_base):
"""
convert VarBase tp numpy
Args:
var_base(VarBase) : the VarBase to convert
Returns (np.ndarray): the np.ndarray contain the value of VarBase
"""
warnings.warn(
"paddle.fluid.framework._var_base_to_np is deprecated, please use var_base.numpy() instead of _var_base_to_np(var_base)."
)
return var_base.numpy()
def _cpu_num():
if "CPU_NUM" not in os.environ.keys():
if multiprocessing.cpu_count() > 1:
sys.stderr.write(
'!!! The CPU_NUM is not specified, you should set CPU_NUM in the environment variable list.\n'
'CPU_NUM indicates that how many CPUPlace are used in the current task.\n'
'And if this parameter are set as N (equal to the number of physical CPU core) the program may be faster.\n\n'
'export CPU_NUM={} # for example, set CPU_NUM as number of physical CPU core which is {}.\n\n'
'!!! The default number of CPU_NUM=1.\n'.format(
multiprocessing.cpu_count(), multiprocessing.cpu_count()))
os.environ['CPU_NUM'] = str(1)
cpu_num = os.environ.get('CPU_NUM')
return int(cpu_num)
def _cuda_ids():
gpus_env = os.getenv("FLAGS_selected_gpus")
if gpus_env:
device_ids = [int(s) for s in gpus_env.split(",")]
else:
device_ids = six.moves.range(core.get_cuda_device_count())
return device_ids
def _xpu_ids():
xpus_env = os.getenv("FLAGS_selected_xpus")
if xpus_env:
device_ids = [int(s) for s in xpus_env.split(",")]
else:
device_ids = six.moves.range(core.get_xpu_device_count())
return device_ids
def is_compiled_with_xpu():
"""
Whether this whl package can be used to run the model on XPU.
Returns (bool): support xpu or not.
Examples:
.. code-block:: python
import paddle.fluid as fluid
support_xpu = fluid.is_compiled_with_xpu()
"""
return core.is_compiled_with_xpu()
def is_compiled_with_cuda():
"""
Whether this whl package can be used to run the model on GPU.
Returns (bool): `True` if CUDA is currently available, otherwise `False`.
Examples:
.. code-block:: python
import paddle
support_gpu = paddle.is_compiled_with_cuda()
"""
return core.is_compiled_with_cuda()
def cuda_places(device_ids=None):
"""
**Note**:
For multi-card tasks, please use `FLAGS_selected_gpus` environment variable to set the visible GPU device.
The next version will fix the problem with `CUDA_VISIBLE_DEVICES` environment variable.
This function creates a list of :code:`paddle.CUDAPlace` objects.
If :code:`device_ids` is None, environment variable of
:code:`FLAGS_selected_gpus` would be checked first. For example, if
:code:`FLAGS_selected_gpus=0,1,2`, the returned list would
be [paddle.CUDAPlace(0), paddle.CUDAPlace(1), paddle.CUDAPlace(2)].
If :code:`FLAGS_selected_gpus` is not set, all visible
gpu places would be returned according to the :code:`CUDA_VISIBLE_DEVICES` environment variable.
If :code:`device_ids` is not None, it should be the device
ids of GPUs. For example, if :code:`device_ids=[0,1,2]`,
the returned list would be
[paddle.CUDAPlace(0), paddle.CUDAPlace(1), paddle.CUDAPlace(2)].
Parameters:
device_ids (list or tuple of int, optional): list of GPU device ids.
Returns:
list of paddle.CUDAPlace: Created GPU place list.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
cuda_places = static.cuda_places()
"""
assert core.is_compiled_with_cuda(), \
"Not compiled with CUDA"
if device_ids is None:
device_ids = _cuda_ids()
elif not isinstance(device_ids, (list, tuple)):
device_ids = [device_ids]
return [core.CUDAPlace(dev_id) for dev_id in device_ids]
def xpu_places(device_ids=None):
"""
**Note**:
For multi-card tasks, please use `FLAGS_selected_xpus` environment variable to set the visible XPU device.
This function creates a list of :code:`paddle.XPUPlace` objects.
If :code:`device_ids` is None, environment variable of
:code:`FLAGS_selected_xpus` would be checked first. For example, if
:code:`FLAGS_selected_xpus=0,1,2`, the returned list would
be [paddle.XPUPlace(0), paddle.XPUPlace(1), paddle.XPUPlace(2)].
If :code:`FLAGS_selected_xpus` is not set, all visible
xpu places would be returned.
If :code:`device_ids` is not None, it should be the device
ids of XPUs. For example, if :code:`device_ids=[0,1,2]`,
the returned list would be
[paddle.XPUPlace(0), paddle.XPUPlace(1), paddle.XPUPlace(2)].
Parameters:
device_ids (list or tuple of int, optional): list of XPU device ids.
Returns:
list of paddle.XPUPlace: Created XPU place list.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
xpu_places = static.xpu_places()
"""
assert core.is_compiled_with_xpu(), \
"Not compiled with XPU"
if device_ids is None:
device_ids = _xpu_ids()
elif not isinstance(device_ids, (list, tuple)):
device_ids = [device_ids]
return [core.XPUPlace(dev_id) for dev_id in device_ids]
def cpu_places(device_count=None):
"""
This function creates a list of :code:`paddle.CPUPlace` objects, and returns the created list.
If :code:`device_count` is None, the device count would
be determined by environment variable :code:`CPU_NUM`.
If :code:`CPU_NUM` is not set, the default value is 1,
i.e. CPU_NUM=1.
:code:`CPU_NUM` indicates the number of devices used in the current task.
The running of the program can be accelerated if :code:`CPU_NUM` is the same as the number of physical cores.
Parameters:
device_count (int, optional): device number. Default: None.
Returns:
list of paddle.CPUPlace: Created list of CPU places.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
cpu_places = static.cpu_places()
"""
if device_count is None:
device_count = _cpu_num()
return [core.CPUPlace()] * device_count
def cuda_pinned_places(device_count=None):
"""
This function creates a list of :code:`fluid.CUDAPinnedPlace` objects.
If :code:`device_count` is None, the device count would
be determined by environment variable :code:`CPU_NUM`.
If :code:`CPU_NUM` is not set, the default value is 1,
i.e. CPU_NUM=1.
:code:`CPU_NUM` indicates the number of devices used in the current task.
The running of the program can be accelerated if :code:`CPU_NUM` is the same as the number of physical cores.
Parameters:
device_count (int, optional): device number. Default: None.
Returns:
list of fluid.CUDAPinnedPlace: Created list of CUDA pinned places.
Examples:
.. code-block:: python
import paddle.fluid as fluid
cuda_pinned_places_cpu_num = fluid.cuda_pinned_places()
# or
cuda_pinned_places = fluid.cuda_pinned_places(1)
"""
assert core.is_compiled_with_cuda(), \
"Not compiled with CUDA"
if device_count is None:
device_count = len(_cuda_ids())
return [core.CUDAPinnedPlace()] * device_count
class NameScope(object):
def __init__(self, name="", parent=None):
self._children = dict()
self._name = name
self._parent = parent
def child(self, prefix):
if prefix not in self._children:
new_child = NameScope(prefix, self)
self._children[prefix] = [new_child]
else:
new_child = NameScope(prefix + "_%d" % len(self._children[prefix]),
self)
self._children[prefix].append(new_child)
return new_child
def parent(self):
return self._parent
def name(self):
return self._name
_name_scope = NameScope()
@signature_safe_contextmanager
def name_scope(prefix=None):
"""
:api_attr: Static Graph
Generate hierarchical name prefix for the operators in Static Graph.
Note:
This should only used for debugging and visualization purpose.
Don't use it for serious analysis such as graph/program transformations.
Don't use it in dygraph, since it will cause memory leak.
Args:
prefix(str, optional): prefix. Default is none.
Examples:
.. code-block:: python
import paddle
paddle.enable_static()
with paddle.static.name_scope("s1"):
a = paddle.static.data(name='data', shape=[None, 1], dtype='int32')
b = a + 1
with paddle.static.name_scope("s2"):
c = b * 1
with paddle.static.name_scope("s3"):
d = c / 1
with paddle.static.name_scope("s1"):
f = paddle.tensor.pow(d, 2.0)
with paddle.static.name_scope("s4"):
g = f - 1
# Op are created in the default main program.
for op in paddle.static.default_main_program().block(0).ops:
# elementwise_add is created in /s1/
if op.type == 'elementwise_add':
assert op.desc.attr("op_namescope") == '/s1/'
# elementwise_mul is created in '/s1/s2'
elif op.type == 'elementwise_mul':
assert op.desc.attr("op_namescope") == '/s1/s2/'
# elementwise_div is created in '/s1/s3'
elif op.type == 'elementwise_div':
assert op.desc.attr("op_namescope") == '/s1/s3/'
# elementwise_sum is created in '/s4'
elif op.type == 'elementwise_sub':
assert op.desc.attr("op_namescope") == '/s4/'
# pow is created in /s1_1/
elif op.type == 'pow':
assert op.desc.attr("op_namescope") == '/s1_1/'
"""
# TODO(panyx0718): Only [0-9a-z].
# in dygraph we don't need namescope since it will cause mem leak
if in_dygraph_mode():
yield
else:
assert prefix, "namescope prefix can not be empty."
global _name_scope
_name_scope = _name_scope.child(prefix)
try:
yield
finally:
_name_scope = _name_scope.parent()
def _full_name_scope():
global _name_scope
scope = _name_scope
name = ""
while scope:
name = scope.name() + "/" + name
scope = scope.parent()
return name
def generate_control_dev_var_name():
import random
return CONTROL_DEP_VAR_PREFIX + "@" + str(random.random())
def grad_var_name(var_name):
"""
Returns:
str: gradient name for a certain var name
"""
return var_name + GRAD_VAR_SUFFIX
def convert_np_dtype_to_dtype_(np_dtype):
"""
Convert the data type in numpy to the data type in Paddle
Args:
np_dtype(np.dtype): the data type in numpy.
Returns:
core.VarDesc.VarType: the data type in Paddle.
"""
dtype = np.dtype(np_dtype)
if dtype == np.float32:
return core.VarDesc.VarType.FP32
elif dtype == np.float64:
return core.VarDesc.VarType.FP64
elif dtype == np.float16:
return core.VarDesc.VarType.FP16
elif dtype == np.int32:
return core.VarDesc.VarType.INT32
elif dtype == np.int16:
return core.VarDesc.VarType.INT16
elif dtype == np.int64:
return core.VarDesc.VarType.INT64
elif dtype == np.bool:
return core.VarDesc.VarType.BOOL
elif dtype == np.uint16:
# since there is still no support for bfloat16 in NumPy,
# uint16 is used for casting bfloat16
return core.VarDesc.VarType.BF16
elif dtype == np.uint8:
return core.VarDesc.VarType.UINT8
elif dtype == np.int8:
return core.VarDesc.VarType.INT8
elif dtype == np.complex64:
return core.VarDesc.VarType.COMPLEX64
elif dtype == np.complex128:
return core.VarDesc.VarType.COMPLEX128
else:
raise ValueError("Not supported numpy dtype %s" % dtype)
def dtype_is_floating(dtype):
"""
Check the data type is floating or not.
Args:
dtype(np.dtype|core.VarDesc.VarType): data type.
Could be numpy format or Paddle format
Returns(bool): True if data type is a float value
"""
if not isinstance(dtype, core.VarDesc.VarType):
dtype = convert_np_dtype_to_dtype_(dtype)
return dtype in [
core.VarDesc.VarType.FP16, core.VarDesc.VarType.FP32,
core.VarDesc.VarType.FP64
]
def _debug_string_(proto, throw_on_error=True):
"""
Get the debug string of a protobuf message. The message could be not
initialized.
Args:
proto(google.protobuf.message.Message): The protobuf message
throw_on_error(bool): True if raise an error when the protobuf message
is not initialized.
Returns(str): The debug string of the protobuf message
"""
error_fields = list()
if not proto.IsInitialized(error_fields) and throw_on_error:
raise ValueError("{0} are not initialized.\nThe message is {1}:\n".
format(error_fields, proto))
return proto.__str__()
def _varbase_creator(type=core.VarDesc.VarType.LOD_TENSOR,
name=None,
shape=None,
dtype=None,
persistable=None,
**kwargs):
if dtype is not None:
if not isinstance(dtype, core.VarDesc.VarType):
dtype = convert_np_dtype_to_dtype_(dtype)
return core.VarBase(dtype if dtype else core.VarDesc.VarType.FP32,
list(shape) if shape else [], name, type
if type else core.VarDesc.VarType.LOD_TENSOR, True
if persistable else False)
class VariableMetaClass(type):
@classmethod
def __instancecheck__(cls, instance):
t = type(instance)
if in_dygraph_mode():
return issubclass(t, core.VarBase)
else:
return issubclass(t, Variable)
class ParameterMetaClass(VariableMetaClass):
@classmethod
def __instancecheck__(cls, instance):
t = type(instance)
if in_dygraph_mode():
return issubclass(t, ParamBase)
else:
return issubclass(t, Parameter)
def _getitem_impl_(var, item):
"""
Slice the variable.
Args:
item(int/slice/tuple) : the index.
Returns:
Sliced variable
"""
if not isinstance(item, tuple):
item = [item]
decrease_axis = []
slice_axis = []
slice_start = []
slice_end = []
slice_step = []
use_strided_slice = False
reverse_axis = []
target_block = default_main_program().current_block()
def fill_constant(shape, value, force_cpu=False, out=None):
var.block.append_op(
type='fill_constant',
inputs={},
outputs={'Out': [out]},
attrs={
'shape': shape,
'dtype': out.dtype,
'value': float(value),
'force_cpu': force_cpu
})
out.stop_gradient = True
return out
for dim, slice_item in enumerate(item):
if isinstance(slice_item, slice):
start = slice_item.start
end = slice_item.stop
step = slice_item.step
if start is None and end is None and step is None:
continue
if step is None:
step = 1
if start is None and end is None:
assert (step == -1)
reverse_axis.append(dim)
continue
if start is None:
start = 0
if end is None:
end = 10000000
if step != 1:
use_strided_slice = True
slice_axis.append(dim)
slice_start.append(start)
slice_end.append(end)
slice_step.append(step)
else:
decrease_axis.append(dim)
slice_axis.append(dim)
slice_start.append(slice_item)
slice_step.append(1)
if isinstance(slice_item, Variable):
temp_1 = var.block.create_var(dtype=slice_item.dtype)
fill_constant([1], 1, force_cpu=True, out=temp_1)
temp_end = target_block.create_var(dtype=slice_item.dtype)
target_block.append_op(
type='elementwise_add',
inputs={'X': slice_item,
'Y': temp_1},
outputs={'Out': temp_end},
attrs={'axis': -1})
slice_end.append(temp_end)
else:
slice_end.append(slice_item + 1
if slice_item != -1 else 10000000)
def contain_var(one_list):
for ele in one_list:
if isinstance(ele, Variable):
return True
return False
def get_new_list_tensor(old_list):
new_list_tensor = []
for dim in old_list:
if isinstance(dim, Variable):
dim.stop_gradient = True
new_list_tensor.append(dim)
else:
assert (isinstance(dim, int))
temp_out = var.block.create_var(dtype='int64')
fill_constant([1], dim, force_cpu=True, out=temp_out)
new_list_tensor.append(temp_out)
return new_list_tensor
inputs = {'Input': [var]}
attrs = {
'axes': slice_axis,
'starts': [],
'ends': [],
'decrease_axis': decrease_axis
}
if (use_strided_slice == True):
attrs['strides'] = []
infer_flags = list(1 for i in range(len(slice_axis)))
# starts
if contain_var(slice_start):
inputs['StartsTensorList'] = get_new_list_tensor(slice_start)
for i, dim in enumerate(slice_start):
if isinstance(dim, Variable):
attrs['starts'].append(-1)
infer_flags[i] = -1
else:
attrs['starts'].append(dim)
else:
attrs['starts'] = slice_start
# ends
if contain_var(slice_end):
inputs['EndsTensorList'] = get_new_list_tensor(slice_end)
for i, dim in enumerate(slice_end):
if isinstance(dim, Variable):
attrs['ends'].append(-1)
infer_flags[i] = -1
else:
attrs['ends'].append(dim)
else:
attrs['ends'] = slice_end
# strides
if use_strided_slice == True:
if contain_var(slice_step):
inputs['StridesTensorList'] = get_new_list_tensor(slice_step)
for i, dim in enumerate(slice_step):
if isinstance(dim, Variable):
attrs['strides'].append(-1)
infer_flags[i] = -1
else:
attrs['strides'].append(dim)
else:
attrs['strides'] = slice_step
# infer_flags
attrs['infer_flags'] = infer_flags
out = var
if use_strided_slice == False and len(slice_axis) > 0:
# append slice_op here
slice_out_var = target_block.create_var(
name=unique_name.generate_with_ignorable_key(var.name + "_slice"),
dtype=var.dtype)
target_block.append_op(
type="slice",
inputs=inputs,
outputs={'Out': [slice_out_var]},
attrs=attrs)
out = slice_out_var
elif use_strided_slice == True and len(slice_axis) > 0:
strided_slice_out_var = target_block.create_var(
name=unique_name.generate_with_ignorable_key(var.name +
"_strided_slice"),
dtype=var.dtype)
target_block.append_op(
type="strided_slice",
inputs=inputs,
outputs={'Out': [strided_slice_out_var]},
attrs=attrs)
out = strided_slice_out_var
if len(reverse_axis) > 0:
reverse_out_var = target_block.create_var(
name=unique_name.generate_with_ignorable_key(var.name +
"_slice_reverse"),
dtype=var.dtype)
target_block.append_op(
type="reverse",
inputs={'X': out},
outputs={'Out': [reverse_out_var]},
attrs={'axis': reverse_axis})
out = reverse_out_var
return out
@six.add_metaclass(VariableMetaClass)
class Variable(object):
"""
**Notes**:
**The constructor of Variable should not be invoked directly.**
**In Static Graph Mode: Please use** `Block.create_var` **to create a Static variable which has no data until being feed.**
**In Dygraph Mode: Please use** :ref:`api_fluid_dygraph_to_variable` **to create a dygraph variable with real data**
In Fluid, every input and output of an OP is a variable. In most
cases, variables are used for holding different kinds of data or training
labels. A variable belongs to a :ref:`api_guide_Block_en` . All variable has its own name and
two variables in different :ref:`api_guide_Block_en` could have the same name.
There are many kinds of variables. Each kind of them has its own attributes
and usages. Please refer to the `framework.proto <https://github.com/PaddlePaddle/Paddle/blob/develop/paddle/fluid/framework/framework.proto>`_ for details.
Most of a Variable's member variables can be set to be None. It mean
it is not available or will be specified later.
Examples:
In Static Graph Mode:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
In `Dygraph <../../user_guides/howto/dygraph/DyGraph.html>`_ Mode:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
with fluid.dygraph.guard():
new_variable = fluid.dygraph.to_variable(np.arange(10))
"""
def __init__(self,
block,
type=core.VarDesc.VarType.LOD_TENSOR,
name=None,
shape=None,
dtype=None,
lod_level=None,
capacity=None,
persistable=None,
error_clip=None,
stop_gradient=False,
is_data=False,
need_check_feed=False,
belong_to_optimizer=False,
**kwargs):
self.block = block
if name is None:
name = unique_name.generate('_generated_var')
if dtype is not None:
if not isinstance(dtype, core.VarDesc.VarType):
dtype = convert_np_dtype_to_dtype_(dtype)
self.belong_to_optimizer = belong_to_optimizer
self.error_clip = error_clip
is_new_var = False
name = cpt.to_text(name)
self.desc = self.block.desc.find_var(cpt.to_bytes(name))
if self.desc is None:
self.desc = self.block.desc.var(cpt.to_bytes(name))
is_new_var = True
if is_new_var:
self.desc.set_type(type)
elif self.desc.type() != type:
raise ValueError("Variable '{0}' has been created before. The "
"previous type is {1}, the new type is {2}. They"
" are not matched".format(self.name,
self.desc.type(), type))
if shape is not None:
if is_new_var:
self.desc.set_shape(shape)
else:
old_shape = self.shape
shape = tuple(shape)
if shape != old_shape:
raise ValueError(
"Variable '{0}' has been created before. The previous "
"shape is {1}, the new shape is {2}. They are not "
"matched.".format(self.name, old_shape, shape))
if dtype is not None:
if is_new_var:
self.desc.set_dtype(dtype)
else:
old_dtype = self.dtype
if dtype != old_dtype:
raise ValueError("Variable '{0}' has been created before. "
"The previous data type is {1}, the new "
"data type is {2}. They are not "
"matched.".format(self.name, old_dtype,
dtype))
if lod_level is not None:
if is_new_var:
self.desc.set_lod_level(lod_level)
else:
if lod_level != self.lod_level:
raise ValueError("Variable '{0}' has been created before. "
"The previous lod_level is {1}, the new "
"lod_level is {2}. They are not "
"matched".format(self.name, self.lod_level,
lod_level))
if persistable is not None:
if is_new_var:
self.desc.set_persistable(persistable)
else:
if persistable != self.persistable:
raise ValueError(
"Variable '{0}' has been created before."
"The previous persistable is {1}, the new "
"persistable is {2}. They are not matched".format(
self.name, self.persistable, persistable))
if need_check_feed and is_new_var:
self.desc.set_need_check_feed(need_check_feed)
if capacity is not None:
if is_new_var:
self.desc.set_capacity(capacity)
else:
# TODO(abhinavarora) : Compare with set capacity once,
# get_capacity is implemented
pass
self.block.vars[name] = self
self.op = None
self._stop_gradient = stop_gradient
self.is_data = is_data
@fake_interface_only
def detach(self):
"""
**Notes**:
**This API is ONLY available in Dygraph mode**
Returns a new Variable, detached from the current graph.
Returns:
( :ref:`api_guide_Variable_en` | dtype is same as current Variable): The detached Variable.
Examples:
.. code-block:: python
import paddle.fluid as fluid
from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.dygraph import Linear
import numpy as np
data = np.random.uniform(-1, 1, [30, 10, 32]).astype('float32')
with fluid.dygraph.guard():
linear = Linear(32, 64)
data = to_variable(data)
x = linear(data)
y = x.detach()
"""
pass
@fake_interface_only
def numpy(self):
"""
**Notes**:
**This API is ONLY available in Dygraph mode**
Returns a numpy array shows the value of current :ref:`api_guide_Variable_en`
Returns:
ndarray: The numpy value of current Variable.
Returns type:
ndarray: dtype is same as current Variable
Examples:
.. code-block:: python
import paddle.fluid as fluid
from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.dygraph import Linear
import numpy as np
data = np.random.uniform(-1, 1, [30, 10, 32]).astype('float32')
with fluid.dygraph.guard():
linear = Linear(32, 64)
data = to_variable(data)
x = linear(data)
print(x.numpy())
"""
pass
@fake_interface_only
def backward(self, retain_graph=False):
"""
**Notes**:
**This API is ONLY available in Dygraph mode**
Run backward of current Graph which starts from current Tensor.
Args:
retain_graph(bool, optional): If False, the graph used to compute grads will be freed. If you would
like to add more ops to the built graph after calling this method( :code:`backward` ), set the parameter
:code:`retain_graph` to True, then the grads will be retained. Thus, seting it to False is much more memory-efficient.
Defaults to False.
Returns:
NoneType: None
Examples:
.. code-block:: python
import numpy as np
import paddle
paddle.disable_static()
x = np.ones([2, 2], np.float32)
inputs = []
for _ in range(10):
tmp = paddle.to_tensor(x)
# if we don't set tmp's stop_gradient as False then, all path to loss will has no gradient since
# there is no one need gradient on it.
tmp.stop_gradient=False
inputs.append(tmp)
ret = paddle.add_n(inputs)
loss = paddle.sum(ret)
loss.backward()
"""
pass
@fake_interface_only
def gradient(self):
"""
**Notes**:
**This API is ONLY available in Dygraph mode**
Get the Gradient of Current Variable
Returns:
ndarray or tuple of ndarray: if Variable's type is LoDTensor, return numpy value of the gradient of current Variable, if Variable's type is SelectedRows, return tuple of ndarray, first element of tuple is numpy value of the gradient of current Variable, second element of tuple is numpy value of the rows of current Variable.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
# example1: return ndarray
x = np.ones([2, 2], np.float32)
with fluid.dygraph.guard():
inputs2 = []
for _ in range(10):
tmp = fluid.dygraph.base.to_variable(x)
tmp.stop_gradient=False
inputs2.append(tmp)
ret2 = fluid.layers.sums(inputs2)
loss2 = fluid.layers.reduce_sum(ret2)
loss2.backward()
print(loss2.gradient())
# example2: return tuple of ndarray
with fluid.dygraph.guard():
embedding = fluid.dygraph.Embedding(
size=[20, 32],
param_attr='emb.w',
is_sparse=True)
x_data = np.arange(12).reshape(4, 3).astype('int64')
x_data = x_data.reshape((-1, 3, 1))
x = fluid.dygraph.base.to_variable(x_data)
out = embedding(x)
out.backward()
print(embedding.weight.gradient())
"""
pass
@fake_interface_only
def clear_gradient(self):
"""
**Notes**:
**1. This API is ONLY available in Dygraph mode**
**2. Use it only Variable has gradient, normally we use this for Parameters since other temporal Variable will be deleted by Python's GC**
Clear (set to ``0`` ) the Gradient of Current Variable
Returns: None
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
x = np.ones([2, 2], np.float32)
with fluid.dygraph.guard():
inputs2 = []
for _ in range(10):
tmp = fluid.dygraph.base.to_variable(x)
tmp.stop_gradient=False
inputs2.append(tmp)
ret2 = fluid.layers.sums(inputs2)
loss2 = fluid.layers.reduce_sum(ret2)
loss2.backward()
print(loss2.gradient())
loss2.clear_gradient()
print("After clear {}".format(loss2.gradient()))
"""
pass
def __str__(self):
return self._to_readable_code()
def _to_readable_code(self):
"""
Get readable debug string of Variable.
.. note::
If you want to get the debug string in protobuf format,
please use :code:`to_string` method.
Returns:
string: The formatted Variable string.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
cur_program = static.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print(new_variable._to_readable_code())
"""
# VarType.LOD_TENSOR -> LOD_TENSOR
type_str = str(self.type).split('.')[1]
if self.type == core.VarDesc.VarType.SELECTED_ROWS or self.type == core.VarDesc.VarType.LOD_TENSOR:
dtype_str = str(self.dtype).split('.')[1]
var_str = "{name} : {type}.shape{shape}.dtype({dtype}).stop_gradient({stop_gradient})".\
format(name=self.name, type=type_str, shape=self.shape,
dtype=dtype_str, stop_gradient=self.stop_gradient)
else:
var_str = "{name} : {type})".\
format(name=self.name, type=type_str)
if type(self) == Parameter:
if self.trainable:
var_str = "trainable param " + var_str
else:
var_str = "param " + var_str
else:
var_str = "var " + var_str
if self.persistable:
var_str = "persist " + var_str
return var_str
def to_string(self, throw_on_error, with_details=False):
"""
Get debug string.
Args:
throw_on_error (bool): True if raise an exception when self is not initialized.
with_details (bool): more details about variables and parameters (e.g. trainable, optimize_attr, ...) will be printed when with_details is True. Default value is False;
Returns:
str: The debug string.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
paddle.enable_static()
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print(new_variable.to_string(True))
print("=============with detail===============")
print(new_variable.to_string(True, True))
"""
assert isinstance(throw_on_error, bool) and isinstance(with_details,
bool)
protostr = self.desc.serialize_to_string()
proto = framework_pb2.VarDesc.FromString(six.binary_type(protostr))
res_str = _debug_string_(proto, throw_on_error)
if with_details:
additional_attr = ("error_clip", "stop_gradient")
for attr_name in additional_attr:
res_str += "%s: %s\n" % (attr_name,
cpt.to_text(getattr(self, attr_name)))
return res_str
__repr__ = __str__
@property
def stop_gradient(self):
"""
Indicating if we stop gradient from current Variable
**Notes: This Property has default value as** ``True`` **in** `Dygraph <../../user_guides/howto/dygraph/DyGraph.html>`_ **mode, while Parameter's default value is False. However, in Static Graph Mode all Variable's default stop_gradient value is** ``False``
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
with fluid.dygraph.guard():
value0 = np.arange(26).reshape(2, 13).astype("float32")
value1 = np.arange(6).reshape(2, 3).astype("float32")
value2 = np.arange(10).reshape(2, 5).astype("float32")
linear = fluid.Linear(13, 5, dtype="float32")
linear2 = fluid.Linear(3, 3, dtype="float32")
a = fluid.dygraph.to_variable(value0)
b = fluid.dygraph.to_variable(value1)
c = fluid.dygraph.to_variable(value2)
out1 = linear(a)
out2 = linear2(b)
out1.stop_gradient = True
out = fluid.layers.concat(input=[out1, out2, c], axis=1)
out.backward()
assert linear.weight.gradient() is None
assert (out1.gradient() == 0).all()
"""
return self._stop_gradient
@stop_gradient.setter
def stop_gradient(self, s):
self._stop_gradient = s
@property
def persistable(self):
"""
Indicating if we current Variable should be long-term alive
**Notes: This Property will be deprecated and this API is just to help user understand concept**
**1. All Variable's persistable is** ``False`` **except Parameters.**
**2. In** `Dygraph <../../user_guides/howto/dygraph/DyGraph.html>`_ **mode, this property should not be changed**
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("persistable of current Var is: {}".format(new_variable.persistable))
"""
return self.desc.persistable()
@persistable.setter
def persistable(self, p):
self.desc.set_persistable(p)
@property
def name(self):
"""
Indicating name of current Variable
**Notes: If it has two or more Varaible share the same name in the same** :ref:`api_guide_Block_en` **, it means these Variable will share content in no-** `Dygraph <../../user_guides/howto/dygraph/DyGraph.html>`_ **mode. This is how we achieve Parameter sharing**
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("name of current Var is: {}".format(new_variable.name))
"""
return cpt.to_text(self.desc.name())
@property
def grad_name(self):
"""
Indicating name of the gradient Variable of current Variable.
**Notes: This is a read-only property. It simply returns name of
gradient Variable from a naming convention but doesn't guarantee
the gradient exists.**
Examples:
.. code-block:: python
import paddle.fluid as fluid
x = fluid.data(name="x", shape=[-1, 23, 48], dtype='float32')
print(x.grad_name) # output is "x@GRAD"
"""
return self.name + "@GRAD"
@name.setter
def name(self, new_name):
self.desc.set_name(new_name)
@property
def shape(self):
"""
Indicating shape of current Variable
**Notes: This is a read-only property**
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("shape of current Var is: {}".format(new_variable.shape))
"""
# convert to tuple, make it as same as numpy API.
return tuple(self.desc.shape())
@property
def dtype(self):
"""
Indicating data type of current Variable
**Notes: This is a read-only property**
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("Dtype of current Var is: {}".format(new_variable.dtype))
"""
return self.desc.dtype()
@property
def lod_level(self):
"""
Indicating ``LoD`` info of current Variable, please refer to :ref:`api_fluid_LoDTensor_en` to check the meaning
of ``LoD``
**Notes**:
**1. This is a read-only property**
**2. Don't support this property in** `Dygraph <../../user_guides/howto/dygraph/DyGraph.html>`_ **mode, it's value should be** ``0(int)``
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("LoD Level of current Var is: {}".format(new_variable.lod_level))
"""
if self.type == core.VarDesc.VarType.SELECTED_ROWS:
raise Exception("SelectedRows DO NOT supprt lod")
return self.desc.lod_level()
@property
def type(self):
"""
Indicating Type of current Variable
**Notes: This is a read-only property**
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("Type of current Var is: {}".format(new_variable.type))
"""
return self.desc.type()
def clone(self):
"""
Returns a new static Variable, which is the clone of the original static
Variable. It remains in the current graph, that is, the cloned Variable
provides gradient propagation. Calling ``out = tensor.clone()`` is same
as ``out = assign(tensor)`` .
Returns:
Variable: The cloned Variable.
Examples:
.. code-block:: python
import paddle
paddle.enable_static()
# create a static Variable
x = paddle.static.data(name='x', shape=[3, 2, 1])
# create a cloned Variable
y = x.clone()
"""
output = self.block.create_var(
name=unique_name.generate_with_ignorable_key(self.name + "_clone"),
dtype=self.dtype,
type=self.type,
persistable=self.persistable,
stop_gradient=self.stop_gradient)
self.block.append_op(
type='assign', inputs={'X': [self]}, outputs={'Out': [output]})
return output
def _set_error_clip(self, error_clip):
"""
Set the error_clip.
Args:
error_clip(BaseErrorClipAttr) : The new error_clip.
Returns:
None
"""
self.error_clip = error_clip
def _set_info(self, key, value):
"""
Set key-value information for this variable.
Args:
key(str): Key for this information.
value(object): The value associated to the key.
Returns:
None
"""
if not hasattr(self, "_info"):
self._info = {}
self._info[key] = value
def _get_info(self, key):
"""
Get the information of this variable corresponding to key.
Args:
key(str): Key for this information.
Returns:
object
"""
if hasattr(self, "_info") and key in self._info:
return self._info[key]
return None
def _slice_indices(self, slice, length):
"""
Reference implementation for the slice.indices method.
"""
# Compute step and length as integers.
step = 1 if slice.step is None else slice.step
# Raise ValueError for negative length or zero step.
if length < 0:
raise ValueError("length should not be negative")
if step == 0:
raise ValueError("slice step can not be zero")
# Find lower and upper bounds for start and stop.
lower = -1 if step < 0 else 0
upper = length - 1 if step < 0 else length
# Compute start.
if slice.start is None:
start = upper if step < 0 else lower
else:
start = slice.start
start = max(start + length, lower) if start < 0 else min(start,
upper)
# Compute stop.
if slice.stop is None:
stop = lower if step < 0 else upper
else:
stop = slice.stop
stop = max(stop + length, lower) if stop < 0 else min(stop, upper)
return start, stop, step
def _detectEllipsis(self, item):
has_ellipsis = False
start = 0
end = len(self.shape)
for index, o in enumerate(item):
if o is Ellipsis:
if has_ellipsis:
raise ValueError("Index can have one ellipsis only.")
has_ellipsis = True
start = index
else:
if has_ellipsis:
end = index
return has_ellipsis, start, end
def _reconstructSliceinfo(self, item):
has_ellipsis, start, end = self._detectEllipsis(item)
if has_ellipsis:
newitem = []
for i in range(start):
newitem.append(item[i])
for i in range(start, end):
newitem.append(slice(None, None, None))
for i in range(end, len(item)):
newitem.append(item[i])
return newitem
else:
return None
def _detectContinuesSlice(self, item):
starts = []
ends = []
for index, o in enumerate(item):
if isinstance(o, int):
start = int(o)
if (index > 0 and index >= self.shape[index]) \
or (index < 0 and (index + self.shape[index]) < 0):
raise IndexError("invalid index")
start = max(start + self.shape[index], 0) if start < 0 else min(
start, self.shape[index])
starts.append(start)
ends.append(start + 1)
elif isinstance(o, slice):
start, stop, step = self._slice_indices(o, self.shape[index])
if step == 1 or step == -1:
starts.append(start)
ends.append(stop)
else:
return False, None
else:
raise IndexError("Valid index accept int or slice or ellipsis")
return True, [starts, ends]
def _cloneVar(self, copy=False):
if not copy:
return self.block.create_var(
name=unique_name.generate_with_ignorable_key(self.name),
dtype=self.dtype)
else:
return self
def _sliceVar(self, axes, starts, ends):
new_var = self._cloneVar()
self.block.append_op(
type="slice",
inputs={'Input': [self]},
outputs={'Out': [new_var]},
attrs={'axes': axes,
'starts': starts,
'ends': ends})
return new_var
def _concatVar(self, inputs, axis):
new_var = self._cloneVar()
self.block.append_op(
type="concat",
inputs={'X': inputs},
outputs={'Out': [new_var]},
attrs={'axis': axis, })
return new_var
def _sliceAndConcatVar(self, item, axis):
if isinstance(item, slice):
if self.shape[axis] < 0:
return self._cloneVar(True)
start, stop, step = self._slice_indices(item, self.shape[axis])
if step == 1:
return self._sliceVar([axis], [start], [stop])
else:
vars = []
if step > 0:
while start < stop:
vars.append(
self._sliceVar([axis], [start], [start + 1]))
start += step
else:
while start > stop:
vars.append(
self._sliceVar([axis], [start], [start + 1]))
start += step
return self._concatVar(vars, axis)
elif isinstance(item, int):
if self.shape[axis] < 0:
return self._cloneVar(True)
index = int(item)
if (index > 0 and index >= self.shape[axis]) \
or (index < 0 and (index + self.shape[axis]) < 0):
raise IndexError("invalid index")
return self._sliceVar([axis], [index], [index + 1])
else:
raise IndexError("Valid index accept int or slice or tuple")
def __getitem__(self, item):
return _getitem_impl_(self, item)
def __setitem__(self, item, value):
inputs = {'Input': self}
# 1. Parse item
if not isinstance(item, tuple):
item = [item]
decrease_axes = []
axes = []
starts = []
ends = []
steps = []
max_integer = sys.maxsize
def replace_ellipsis(item):
# Use slice(None) to replace Ellipsis.
# For var, var.shape = [3,4,5,6]
#
# var[..., 1:2] -> var[:, :, :, 1:2]
# var[0, ...] -> var[0]
# var[0, ..., 1:2] -> var[0, :, :, 1:2]
item = list(item)
# Remove Variable to skip bug when counting Ellipsis
item_remove_var = [
ele for ele in item if not isinstance(ele, Variable)
]
ell_count = item_remove_var.count(Ellipsis)
if ell_count == 0:
return item
elif ell_count > 1:
raise IndexError(
"An index can only have a single ellipsis ('...')")
ell_idx = item.index(Ellipsis)
if ell_idx == len(item) - 1:
return item[:-1]
else:
item[ell_idx:ell_idx + 1] = [slice(None)] * (
len(self.shape) - len(item) + 1)
return item
item = replace_ellipsis(item)
for dim, slice_item in enumerate(item):
if isinstance(slice_item, slice):
start = slice_item.start
end = slice_item.stop
step = slice_item.step
if start is None and end is None and step is None:
continue
step = 1 if step is None else step
# TODO: support cases when step < 1
if not isinstance(step, Variable) and step == 0:
raise ValueError(
"When assign a value to a paddle.Tensor, step can not be 0, "
"but received step is {}.".format(step))
if isinstance(step, Variable) and (start is None or
end is None):
raise ValueError(
"When assign a value to a paddle.Tensor, it's not supported that "
"the start or end is None when the type of step is paddle.Tensor."
)
if start is None:
start = 0 if step > 0 else max_integer
if end is None:
end = max_integer if step > 0 else (0 - max_integer)
else:
decrease_axes.append(dim)
start = slice_item
end = slice_item + 1 if slice_item != -1 else max_integer
step = 1
axes.append(dim)
starts.append(start)
ends.append(end)
steps.append(step)
attrs = {
'axes': axes,
'starts': starts,
'ends': ends,
'steps': steps,
'decrease_axes': decrease_axes
}
from .layers import utils
if utils._contain_var(starts):
inputs['StartsTensorList'] = utils._convert_to_tensor_list(starts)
del attrs['starts']
if utils._contain_var(ends):
inputs['EndsTensorList'] = utils._convert_to_tensor_list(ends)
del attrs['ends']
if utils._contain_var(steps):
inputs['StepsTensorList'] = utils._convert_to_tensor_list(steps)
del attrs['steps']
# 2. Parse value
dtype = self.dtype
attrs['dtype'] = dtype
from .data_feeder import convert_dtype
# 2.1 value is an integer of float
if isinstance(value, (int, float)):
value = np.array([value]).astype(convert_dtype(dtype))
# 2.2 value is a np.ndarray
if isinstance(value, np.ndarray):
shape = list(value.shape)
if dtype == core.VarDesc.VarType.BOOL:
value_name = "bool_values"
values = [bool(v) for v in value.flat]
elif dtype == core.VarDesc.VarType.FP32:
value_name = "fp32_values"
values = [float(v) for v in value.flat]
elif dtype == core.VarDesc.VarType.FP64:
value_name = "fp64_values"
values = [float(v) for v in value.flat]
elif dtype == core.VarDesc.VarType.INT32:
value_name = "int32_values"
values = [int(v) for v in value.flat]
elif dtype == core.VarDesc.VarType.INT64:
value_name = "int64_values"
values = [int(v) for v in value.flat]
else:
raise TypeError(
"When assign a numpy.ndarray, integer or float to a paddle.Tensor, "
"the data type of the paddle.Tensor must be bool, float32, int32 or int64, but "
"received %s." % convert_dtype(dtype))
attrs[value_name] = values
attrs["shape"] = shape
elif isinstance(value, Variable):
inputs["ValueTensor"] = value
else:
raise TypeError(
"Only support to assign an integer, float, numpy.ndarray or "
"paddle.Tensor to a paddle.Tensor, but received {}".format(
type(value)))
cur_block = default_main_program().current_block()
cur_block.append_op(
type="set_value", inputs=inputs, outputs={'Out': self}, attrs=attrs)
return self
def get_value(self, scope=None):
"""
Get the value of variable in given scope.
Args:
scope(Scope, optional) : If `scope` is None, it will be set to global scope
obtained through 'paddle.static.global_scope()'. Otherwise, use `scope`.
Default: None
Returns:
Tensor: the value in given scope.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
import numpy as np
paddle.enable_static()
x = static.data(name="x", shape=[10, 10], dtype='float32')
y = static.nn.fc(x, 10, name='fc')
place = paddle.CPUPlace()
exe = static.Executor(place)
prog = paddle.static.default_main_program()
exe.run(static.default_startup_program())
inputs = np.ones((10, 10), dtype='float32')
exe.run(prog, feed={'x': inputs}, fetch_list=[y, ])
path = 'temp/tensor_'
for var in prog.list_vars():
if var.persistable:
t = var.get_value()
paddle.save(t, path+var.name+'.pdtensor')
for var in prog.list_vars():
if var.persistable:
t_load = paddle.load(path+var.name+'.pdtensor')
var.set_value(t_load)
"""
# The 'framework' is a low-level module, and 'executor'
# can not be imported at the begainning of this file.
# Therefore, the above two modules are dynamically imported.
from .executor import global_scope
if scope is not None and not isinstance(scope, core._Scope):
raise TypeError(
"`scope` should be None or `paddle.static.Scope` type, but received {}.".
format(type(scope)))
if scope is None:
scope = global_scope()
var_temp = scope.find_var(self.name)
if var_temp is None:
raise ValueError("Can not find Variable '{}' in the Scope.".format(
self.name))
t = var_temp.get_tensor()
return t
def set_value(self, value, scope=None):
'''
Set the value to the tensor in given scope.
Args:
value(Tensor/ndarray) : The value to be set.
scope(Scope, optional) : If `scope` is None, it will be set to global scope
obtained through 'paddle.static.global_scope()'. Otherwise, use `scope`.
Default: None
Returns:
None
Examples:
.. code-block:: python
import paddle
import paddle.static as static
import numpy as np
paddle.enable_static()
x = static.data(name="x", shape=[10, 10], dtype='float32')
y = static.nn.fc(x, 10, name='fc')
place = paddle.CPUPlace()
exe = static.Executor(place)
prog = paddle.static.default_main_program()
exe.run(static.default_startup_program())
inputs = np.ones((10, 10), dtype='float32')
exe.run(prog, feed={'x': inputs}, fetch_list=[y, ])
path = 'temp/tensor_'
for var in prog.list_vars():
if var.persistable:
t = var.get_value()
paddle.save(t, path+var.name+'.pdtensor')
for var in prog.list_vars():
if var.persistable:
t_load = paddle.load(path+var.name+'.pdtensor')
var.set_value(t_load)
'''
# The 'framework' is a low-level module, and 'executor'
# can not be imported at the begainning of this file.
# Therefore, the above two modules are dynamically imported.
from .executor import global_scope
if not (isinstance(value, np.ndarray) or hasattr(value, '__array__')):
raise TypeError(
"`value` should be `numpy.ndarray` or `LoDTensor`, but received {}.".
format(type(value)))
if scope is not None and not isinstance(scope, core._Scope):
raise TypeError(
"`scope` should be None or `paddle.static.Scope` type, but received {}.".
format(type(scope)))
if scope is None:
scope = global_scope()
var_temp = scope.find_var(self.name)
if var_temp is None:
raise ValueError("Can not find Variable '{}' in the Scope.".format(
self.name))
t = var_temp.get_tensor()
if hasattr(value, 'shape'):
if isinstance(value.shape, (MethodType, FunctionType)):
value_shape = value.shape()
else:
value_shape = value.shape
if list(t.shape()) != list(value_shape):
raise ValueError(
"{} expected a shape {}, but the received shape is {}.".
format(self.name, list(t.shape()), list(value_shape)))
p = t._place()
if p.is_cpu_place():
place = core.CPUPlace()
elif p.is_cuda_pinned_place():
place = core.CUDAPinnedPlace()
elif p.is_xpu_place():
p = core.Place()
p.set_place(t._place())
place = core.XPUPlace(p.xpu_device_id())
else:
p = core.Place()
p.set_place(t._place())
place = core.CUDAPlace(p.gpu_device_id())
t.set(value, place)
def get_all_op_protos():
"""
Get all registered op proto from PaddlePaddle C++ end.
Returns:
list: list of OpProto.
"""
protostrs = core.get_all_op_protos()
ret_values = []
for pbstr in protostrs:
op_proto = framework_pb2.OpProto.FromString(six.binary_type(pbstr))
ret_values.append(op_proto)
return ret_values
class OpProtoHolder(object):
"""
A global variable to hold all OpProtos from C++ as a map
"""
@classmethod
def instance(cls):
if not hasattr(cls, '_instance'):
cls._instance = cls()
return cls._instance
def __init__(self):
assert not hasattr(
self.__class__,
'_instance'), 'Please use `instance()` to get OpProtoHolder object!'
op_protos = get_all_op_protos()
self.op_proto_map = {}
for proto in op_protos:
self.op_proto_map[proto.type] = proto
def get_op_proto(self, type):
"""
Get OpProto by a type string.
Args:
type(str): The type that operator registered in C++ side.
Returns(framework_pb2.OpProto): The OpProto
"""
if type not in self.op_proto_map:
raise ValueError("Operator \"%s\" has not been registered." % type)
return self.op_proto_map[type]
def update_op_proto(self):
op_protos = get_all_op_protos()
custom_op_names = []
for proto in op_protos:
if proto.type not in self.op_proto_map:
self.op_proto_map[proto.type] = proto
custom_op_names.append(proto.type)
return custom_op_names
@staticmethod
def generated_op_attr_names():
return {
core.op_proto_and_checker_maker.kOpRoleAttrName(),
core.op_proto_and_checker_maker.kOpRoleVarAttrName(),
core.op_proto_and_checker_maker.kOpNameScopeAttrName(),
core.op_proto_and_checker_maker.kOpCreationCallstackAttrName(),
core.op_proto_and_checker_maker.kOpDeviceAttrName()
}
class Operator(object):
"""
In Fluid, all the operation are represented by Operator, and Operator
is regarded as a build in an instruction of a Block. Users can use the
build in instructions to describe their neural network.
Args:
block(Block): The block has the current operator.
desc(core.OpDesc): The protobuf description of Operator.
type(str): The type of operator. Default None.
inputs(dict): The input of this Operator. it is a dictionary, for every
element, key is the input parameter name, and value is a list of
variables. Default None.
outputs(dict): The output of this Operator. it is a dictionary, for
every element, key is the input parameter name, and value is a list
of variables. Default None.
attrs(dict): The attributes of this Operator. it is a dictionary, for
every element, key is attribute name, and value is the attribute value.
The attribute type should be as same as the type registered in C++ side.
Default None.
Returns:
Operator: The initialized Operator.
Raises:
ValueError: If the passed input, output and attrs doesn't match the
initializing Operator's that registered in C++ side.
Notes:
The constructor of operator should not be invoked directly. Use
Block.append_op or Block._prepend_op instead.
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
# var1 += var2 + var3
cur_block.append_op(type="sum",
inputs={"X": [var1, var2, var3]},
outputs={"Out": [var1]})
"""
OP_WITHOUT_KERNEL_SET = {
'feed', 'fetch', 'recurrent', 'go', 'rnn_memory_helper_grad',
'conditional_block', 'while', 'send', 'recv', 'listen_and_serv',
'fl_listen_and_serv', 'ncclInit', 'select', 'checkpoint_notify',
'gen_bkcl_id', 'c_gen_bkcl_id', 'gen_nccl_id', 'c_gen_nccl_id',
'c_comm_init', 'c_sync_calc_stream', 'c_sync_comm_stream',
'queue_generator', 'dequeue', 'enqueue', 'heter_listen_and_serv',
'c_wait_comm', 'c_wait_compute'
}
def __init__(self,
block,
desc,
type=None,
inputs=None,
outputs=None,
attrs=None):
if in_dygraph_mode():
if type is None:
raise ValueError(
"`type` to initialized an Operator can not be None.")
self._type = type
self.attrs = attrs if attrs else {}
else:
self.block = block
self.desc = desc
# note: not add self.attrs here:
# https://github.com/PaddlePaddle/Paddle/pull/12583#pullrequestreview-145093173
op_attrs = attrs
if op_attrs is None:
op_attrs = dict()
del attrs
op_maker = core.op_proto_and_checker_maker
if op_maker.kOpRoleAttrName() not in op_attrs:
op_attrs[op_maker.kOpRoleAttrName(
)] = self.block.program._op_role
role_var_name = op_maker.kOpRoleVarAttrName()
if len(self.block.program.
_op_role_var) != 0 and role_var_name not in op_attrs:
op_attrs[role_var_name] = self.block.program._op_role_var
if role_var_name in op_attrs and len(op_attrs[role_var_name]) == 0:
del op_attrs[role_var_name]
if len(self.desc.type()) != 0:
return
if type is None:
raise ValueError(
"`type` to initialized an Operator can not be None.")
else:
callstack_var_name = op_maker.kOpCreationCallstackAttrName()
op_attrs[callstack_var_name] = []
for frame in traceback.extract_stack():
op_attrs[callstack_var_name].append(
' File "{}", line {}, in {}'.format(frame[0], frame[1],
frame[2]))
op_attrs[callstack_var_name].append(' {}'.format(frame[
3]))
self.desc.set_type(type)
proto = OpProtoHolder.instance().get_op_proto(type)
namescope_var_name = op_maker.kOpNameScopeAttrName()
op_attrs[namescope_var_name] = _full_name_scope()
# set device for op with kernels, give warning for op without kernels
# when force_cpu and device_guard are used at the same time, a warning will be given.
# TODO(zhangting2020): when force_cpu is removed, clear warning below.
if _current_device is not None:
if self._has_kernel(type):
op_device = op_maker.kOpDeviceAttrName()
op_attrs[op_device] = _current_device
else:
warnings.warn("The Op(%s) is not support to set device." %
type)
if 'force_cpu' in op_attrs:
if (type is 'less_than' and op_attrs['force_cpu'] != None
) or op_attrs['force_cpu'] != False:
warnings.warn(
"The Attr(force_cpu) of Op(%s) will be deprecated in the future, "
"please use 'device_guard' instead. 'device_guard' has higher priority when they are "
"used at the same time." % type)
def find_name(var_list, name):
for var_name in var_list:
if var_list[var_name] is not None and var_name == name:
return True
return False
if inputs is not None:
for in_proto in proto.inputs:
found = find_name(inputs, in_proto.name)
assert found or in_proto.dispensable, "Input {} not found".format(
in_proto.name)
if found:
in_args = inputs[in_proto.name]
if not isinstance(in_args, (list, tuple)):
in_args = [in_args]
if not in_proto.duplicable and len(in_args) > 1:
raise ValueError(
"Input %s expects only one input, but %d are given."
% (in_proto.name, len(in_args)))
in_arg_names = []
for index, arg in enumerate(in_args):
if isinstance(arg, six.string_types):
in_arg_names.append(arg)
elif isinstance(arg, six.binary_type):
in_arg_names.append(arg.decode())
elif isinstance(arg, (Variable, core.VarBase)):
in_arg_names.append(cpt.to_text(arg.name))
else:
raise TypeError(
"The type of '%s' in operator %s should be "
"one of [basestring(), str, Varibale] in python2, "
"or one of [str, bytes, Variable] in python3."
"but received : %s" %
(in_proto.name, type, arg))
self.desc.set_input(in_proto.name, in_arg_names)
else:
self.desc.set_input(in_proto.name, [])
if outputs is not None:
for m in proto.outputs:
if (m.name not in outputs) and m.dispensable:
continue
if not ((m.name in outputs) or m.dispensable):
raise ValueError(("Incorrect setting for output(s) of "
"operator \"%s\", should set: [%s].")
% (type, m.name))
for out_proto in proto.outputs:
if out_proto.name not in outputs:
continue
out_args = outputs[out_proto.name]
if not isinstance(out_args, list):
out_args = [out_args]
if not out_proto.duplicable and len(out_args) > 1:
raise ValueError(
"Output %s expects only one output, but %d are given."
% (out_proto.name, len(out_args)))
out_arg_names = []
for arg in out_args:
if isinstance(arg, six.string_types):
out_arg_names.append(arg)
else:
out_arg_names.append(cpt.to_text(arg.name))
# TODO(minqiyang): could we remove variable's op in static mode?
if not in_dygraph_mode():
if isinstance(arg, six.string_types):
block.var(arg).op = self
else:
arg.op = self
self.desc.set_output(out_proto.name, out_arg_names)
if op_attrs is not None:
if not isinstance(op_attrs, dict):
raise TypeError("'attrs' should be a dict.")
for attr in proto.attrs:
attr_name = attr.name
if (attr_name not in op_attrs) or (
op_attrs[attr_name] is None):
continue
attr_val = op_attrs[attr_name]
self._update_desc_attr(attr_name, attr_val)
self.desc.check_attrs()
if self._has_kernel(type):
self.desc.infer_var_type(self.block.desc)
self.desc.infer_shape(self.block.desc)
def _has_kernel(self, op_type):
return op_type not in self.OP_WITHOUT_KERNEL_SET
def to_string(self, throw_on_error):
"""
Get debug string.
Args:
throw_on_error(bool): Whether to raise exception if self is not
initialized.
Returns:
str: The debug string.
"""
protostr = self.desc.serialize_to_string()
proto = framework_pb2.OpDesc.FromString(six.binary_type(protostr))
return _debug_string_(proto, throw_on_error)
def _to_readable_code(self, skip_op_callstack=True):
"""
Get readable debug string of Operator.
.. note::
If you want to get the debug string in protobuf format,
please use :code:`to_string` method.
Args:
skip_op_callstack(bool): whether to skip parsing Operator's attribute
op_callstack, default value is True
Returns:
string: The formatted Operator string.
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
var = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
new_op = cur_block.append_op(type="abs",
inputs={"X": [var]},
outputs={"Out": [var]})
print(new_op._to_readable_code())
"""
assert isinstance(
skip_op_callstack, bool
), "skip_op_callstack parameter's type is error, expect bool, received %s".format(
type(skip_op_callstack))
outputs_str = "{"
for i in range(0, len(self.output_names)):
outputs_str += "{name}=".format(name=self.output_names[i])
o = self.output(self.output_names[i])
outputs_str += "{value}".format(value=o)
if i != len(self.output_names) - 1:
outputs_str += ", "
outputs_str += "}"
inputs_str = "{"
for i in range(0, len(self.input_names)):
inputs_str += "{name}=".format(name=self.input_names[i])
o = self.input(self.input_names[i])
inputs_str += "{value}".format(value=o)
if i != len(self.input_names) - 1:
inputs_str += ", "
inputs_str += "}"
attr_names = sorted(self.attr_names)
attrs_str = ""
for i in range(0, len(attr_names)):
name = attr_names[i]
if skip_op_callstack and name == "op_callstack":
continue
attr_type = self.desc.attr_type(name)
if attr_type == core.AttrType.BLOCK:
a = "{name} = block[{value}]".format(
name=name, type=attr_type, value=self._block_attr_id(name))
attrs_str += a
if i != len(attr_names) - 1:
attrs_str += ", "
continue
if attr_type == core.AttrType.BLOCKS:
a = "{name} = blocks{value}".format(
name=name,
type=attr_type,
value=self._blocks_attr_ids(name))
attrs_str += a
if i != len(attr_names) - 1:
attrs_str += ", "
continue
a = "{name} = {value}".format(
name=name, type=attr_type, value=self.desc.attr(name))
attrs_str += a
if i != len(attr_names) - 1:
attrs_str += ", "
if outputs_str != "{}":
op_str = "{outputs} = {op_type}(inputs={inputs}, {attrs})".\
format(outputs=outputs_str, op_type=self.type,
inputs=inputs_str, attrs=attrs_str)
else:
op_str = "{op_type}(inputs={inputs}, {attrs})".\
format(op_type=self.type, inputs=inputs_str, attrs=attrs_str)
return op_str
def __str__(self):
return self._to_readable_code()
__repr__ = __str__
@property
def type(self):
return self.desc.type()
def input(self, name):
r"""
Get the input arguments according to the input parameter name.
Args:
name(str): The input parameter name.
Returns:
list: return the list of argument names that associated with \
the specific parameter name.
"""
return self.desc.input(name)
def _rename_input(self, old_name, new_name):
"""
Rename the `old_name` to `new_name`.
Args:
old_name(str): The old name of the Operator's input.
new_name(str): The new name of the Operator's input.
Returns:
None
"""
self.desc._rename_input(old_name, new_name)
def _rename_output(self, old_name, new_name):
"""
Rename the `old_name` to `new_name`.
Args:
old_name(str): The old name of the Operator's output.
new_name(str): The new name of the Operator's output.
Returns:
None
"""
self.desc._rename_output(old_name, new_name)
@property
def input_names(self):
return self.desc.input_names()
@property
def input_arg_names(self):
return self.desc.input_arg_names()
@property
def output_arg_names(self):
return self.desc.output_arg_names()
def output(self, name):
r"""
Get output arguments by the output parameter name.
Args:
name(str): The output parameter name.
Returns:
list: return the list of argument names associated with \
the specific parameter name.
"""
return self.desc.output(name)
@property
def output_names(self):
return self.desc.output_names()
@property
def idx(self):
for i, op in enumerate(self.block.ops):
if op == self:
return i
raise ValueError(
"Can't find op itself in it's block. It could be a bug of Paddle.")
def has_attr(self, name):
"""
Whether this Operator has the attribute with name or not.
Args:
name(str): the attribute name.
Returns:
bool: True if has this attribute.
"""
return self.desc.has_attr(name)
def attr_type(self, name):
"""
Get the type of attribute by attribute's name.
Args:
name(str): the attribute name.
Returns:
core.AttrType: the attribute type.
"""
return self.desc.attr_type(name)
def _set_attr(self, name, val):
"""
Set the value of attribute by attribute's name.
Args:
name(str): the attribute name.
val(bool|int|str|float|list): the value of the attribute.
Raises:
ValueError: If the type of value doesn't match with desc.attr_type(name).
"""
self._update_desc_attr(name, val)
def _remove_attr(self, name):
self.desc.remove_attr(name)
def _update_desc_attr(self, name, val):
"""
Update the value of desc's attribute by attribute's name.
Args:
name(str): the attribute name.
val(bool|int|str|float|list): the value of the attribute.
Raises:
ValueError: If the type of value doesn't match with desc.attr_type(name).
"""
if isinstance(val, Block):
self.desc.set_block_attr(name, val.desc)
elif isinstance(val, list) and val and all(
isinstance(v, Block) for v in val):
self.desc.set_blocks_attr(name, [v.desc for v in val])
elif isinstance(val, core.BlockDesc) or \
isinstance(val, core.ProgramDesc):
self.desc.set_serialized_attr(name, val.serialize_to_string())
else:
self.desc._set_attr(name, val)
@property
def attr_names(self):
return self.desc.attr_names()
def attr(self, name):
"""
Get the attribute by name.
Args:
name(str): the attribute name.
Returns:
bool|int|str|float|list: The attribute value. The return value
can be any valid attribute type.
"""
return self.desc.attr(name)
def _block_attr_id(self, name):
"""
Get the block attribute's id by name.
Args:
name(str): the attribute name.
Returns:
int: the block index.
"""
return self.desc._block_attr_id(name)
def _block_attr(self, name):
"""
Get the block attribute by name.
Args:
name(str): the attribute name.
Returns:
block: the block attribute.
"""
id = self._block_attr_id(name)
assert (id >= 0 and id < len(self.block.program.blocks))
return self.block.program.blocks[id]
def _blocks_attr(self, name):
"""
Get the blocks attribute by name.
Args:
name(str): the attribute name.
Returns:
list: list of the blocks attribute.
"""
attrs = []
for i in self._blocks_attr_ids(name):
assert (i >= 0 and i < len(self.block.program.blocks))
attrs.append(self.block.program.blocks[i])
return attrs
def _blocks_attr_ids(self, name):
"""
Get the blocks attribute's ids by name.
Args:
name(str): the attribute name.
Returns:
list: list of the blocks ids.
"""
return self.desc._blocks_attr_ids(name)
def all_attrs(self):
"""
Get the attribute dict.
Returns:
dict: The Operator's attribute dict, name->attr.
"""
attr_names = self.attr_names
attr_map = {}
for n in attr_names:
attr_type = self.desc.attr_type(n)
if attr_type == core.AttrType.BLOCK:
attr_map[n] = self._block_attr(n)
continue
if attr_type == core.AttrType.BLOCKS:
attr_map[n] = self._blocks_attr(n)
continue
attr_map[n] = self.attr(n)
return attr_map
def _is_optimize_op(self):
op_maker = core.op_proto_and_checker_maker
OPTIMIZE = core.op_proto_and_checker_maker.OpRole.Optimize
if not self.desc.has_attr(op_maker.kOpRoleAttrName()):
return False
op_role = self.desc.attr(op_maker.kOpRoleAttrName())
if op_role & int(OPTIMIZE):
return True
return False
def _is_backward_op(self):
op_maker = core.op_proto_and_checker_maker
BACKWARD = core.op_proto_and_checker_maker.OpRole.Backward
if not self.desc.has_attr(op_maker.kOpRoleAttrName()):
return False
op_role = self.desc.attr(op_maker.kOpRoleAttrName())
if op_role & int(BACKWARD):
return True
return False
class Block(object):
"""
In Fluid, a Program is consistence of multi-Block, and Block stores
VarDesc and OpDesc. In a specific Block, a VarDesc have a unique name.
One block could have some child blocks, and child block's name scopes
should inherit the parent's so that OpDesc in child block can reference
a VarDesc that is stored in the parent block.
Please reference the framework.proto for details.
Args:
program(Program): The Program that the Block belongs to.
idx(int): The block's id in the Program.
Notes:
The constructor of Block should not be invoked directly. Please
use `Program._create_block()` to create a block.
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
var = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
cur_block.append_op(type="abs",
inputs={"X": [var]},
outputs={"Out": [var]})
"""
def __init__(self, program, idx):
self.desc = program.desc.block(idx)
self.vars = collections.OrderedDict() # var_name --> var
self.ops = list() # operator list
self.program = program
self.removed_vars = collections.OrderedDict()
def __str__(self):
return self._to_readable_code()
def _to_readable_code(self, skip_op_callstack=True):
"""
Get readable debug string of Block.
.. note::
If you want to get the debug string in protobuf format,
please use :code:`to_string` method.
Args:
skip_op_callstack(bool): whether to skip parsing Operator's attribute
op_callstack, default value is True
Returns:
string: The formatted Block string.
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_var = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
new_op = cur_block.append_op(type="abs",
inputs={"X": [new_var]},
outputs={"Out": [new_var]})
print(cur_block._to_readable_code())
"""
assert isinstance(
skip_op_callstack, bool
), "skip_op_callstack parameter's type is error, expect bool, received %s".format(
type(skip_op_callstack))
block_str = "{ // block "
block_str += "{}\n".format(self.idx)
for var in list(self.vars.values()):
block_str += " {}\n".format(var._to_readable_code())
block_str += "\n"
for op in self.ops:
block_str += " {}\n".format(
op._to_readable_code(skip_op_callstack))
block_str += "}"
return block_str
def to_string(self, throw_on_error, with_details=False):
"""
Get debug string.
Args:
throw_on_error(bool): raise exception when self is not initialized
when throw_on_error is True.
with_details(bool): more details about variables and parameters
(e.g. trainable, optimize_attr, ...) will be printed when
with_details is True. Default False.
Returns:
str: The debug string.
"""
assert isinstance(throw_on_error, bool) and isinstance(with_details,
bool)
if with_details:
re_add_indent = re.compile(r"\n(.)")
res_str = "blocks {\n idx: %d\n parent_idx: %d" % (
self.idx, self.parent_idx)
for var in list(self.vars.values()):
res_str += "\n vars {\n %s }" % re_add_indent.sub(
r"\n \1", var.to_string(throw_on_error, with_details))
for op in self.ops:
res_str += "\n ops {\n %s }" % re_add_indent.sub(
r"\n \1", op.to_string(throw_on_error))
res_str += "\n}"
else:
protostr = self.desc.serialize_to_string()
proto = framework_pb2.BlockDesc.FromString(
six.binary_type(protostr))
res_str = _debug_string_(proto, throw_on_error)
return res_str
__repr__ = __str__
@property
def parent_idx(self):
return self.desc.parent
@property
def forward_block_idx(self):
return self.desc.get_forward_block_idx()
def _set_forward_block_idx(self, idx):
"""
Set the forward block Idx.
Args:
idx(int): the block index.
Returns:
None
"""
self.desc._set_forward_block_idx(idx)
@property
def backward_block_idx(self):
cur_block_idx = self.idx
for block in self.program.blocks:
if block.forward_block_idx == cur_block_idx:
return block.idx
return -1
@property
def idx(self):
return self.desc.id
def var(self, name):
"""
Get a Variable by name from this block.
Args:
name(str): the Variable's name.
Raises:
ValueError: The If input's type is not str, or this block
doesn't have a Variable with the giving name.
Returns:
Variable: the Variable with the giving name.
"""
if not isinstance(name, six.string_types):
raise TypeError(
"var require string as parameter, but get %s instead." %
(type(name)))
v = self.vars.get(name, None)
if v is None:
raise ValueError("var %s not in this block" % name)
return v
def _find_var_recursive(self, name):
"""
Get a Variable by name from this block recursively.
Args:
name(str): the Variable's name.
Returns:
Variable: the Variable with the giving name. Or None if not found.
"""
frontier = list()
visited = set()
frontier.append(self)
prog = self.program
while len(frontier) != 0: # BFS
cur = frontier[0]
frontier = frontier[1:]
if id(cur) in visited:
continue
if cur.has_var(name):
return cur.var(name)
if cur.parent_idx != -1:
frontier.append(prog.block(cur.parent_idx))
if cur.forward_block_idx != -1:
frontier.append(prog.block(cur.forward_block_idx))
visited.add(id(cur))
return None
def _var_recursive(self, name):
"""
Get a Variable by name from this block recursively.
Args:
name(str): the Variable's name.
Raises:
ValueError: this block and this parent block doesn't
have a Variable with the giving name.
Returns:
Variable: the Variable with the giving name.
"""
var = self._find_var_recursive(name)
if var:
return var
else:
raise ValueError("Var {0} is not found recursively".format(name))
def all_parameters(self):
return list(self.iter_parameters())
def iter_parameters(self):
return (item[1] for item in six.iteritems(self.vars)
if isinstance(item[1], Parameter))
def create_var(self, *args, **kwargs):
if in_dygraph_mode():
var = _varbase_creator(*args, **kwargs)
else:
var = Variable(block=self, *args, **kwargs)
if 'initializer' in kwargs:
kwargs['initializer'](var, self)
return var
def has_var(self, name):
return name in self.vars
def _rename_var(self, name, new_name):
"""
Rename variable in vars and ops' inputs and outputs
Args:
name(str): the name that need to be renamed.
new_name(str): the name that need to rename to.
Raises:
ValueError: If this block doesn't have this the giving name,
or the type of the var with the giving name is not Parameter
or Variable.
Returns:
Variable: the Variable with the giving name.
"""
name = cpt.to_text(name)
new_name = cpt.to_text(new_name)
if not self.has_var(name):
raise ValueError("var %s is not in current block" % name)
v = self.var(name)
if type(v) == Parameter:
var_type = "Parameter"
stop_gradient = v.stop_gradient
trainable = v.trainable
optimize_attr = v.optimize_attr
regularizer = v.regularizer
error_clip = v.error_clip
elif type(v) == Variable:
var_type = "Variable"
error_clip = v.error_clip
stop_gradient = v.stop_gradient
else:
raise ValueError("unsupported var type: %s", type(v))
orig_var_type = v.type
self.desc._rename_var(cpt.to_bytes(name), cpt.to_bytes(new_name))
# NOTE: v is destroyed by C++ after calling _rename_var.
d = self.desc.find_var(cpt.to_bytes(new_name))
if var_type == "Parameter":
if in_dygraph_mode():
var = ParamBase(
d.shape(),
d.dtype(),
type=orig_var_type,
name=new_name,
stop_gradient=stop_gradient,
trainable=trainable,
optimize_attr=optimize_attr,
regularizer=regularizer,
error_clip=error_clip)
else:
var = Parameter(
self,
d.shape(),
d.dtype(),
type=orig_var_type,
name=new_name,
stop_gradient=stop_gradient,
trainable=trainable,
optimize_attr=optimize_attr,
regularizer=regularizer,
error_clip=error_clip)
elif var_type == "Variable":
var = Variable(
self,
type=orig_var_type,
name=new_name,
error_clip=error_clip,
stop_gradient=stop_gradient)
# rename the python side, _sync_with_cpp will only add
# new vars/ops to python side.
self.vars[new_name] = var
del self.vars[name]
self._sync_with_cpp()
return var
def _remove_var(self, name, sync=True):
if sync == True:
self._sync_with_cpp()
self.desc._remove_var(cpt.to_bytes(name))
del self.vars[name]
def create_parameter(self, *args, **kwargs):
global_block = self.program.global_block()
param = None
if in_dygraph_mode():
param = ParamBase(*args, **kwargs)
else:
param = Parameter(global_block, *args, **kwargs)
# NOTE: Why only set stop_gradient=False in static mode
# Because in dygraph mode, the `stop_gradient` and `trainable`
# are related, and `trainable` default vallue is `True` or
# it is specified by users, there is no need to set
# `stop_gradient` for ParamBase here.
param.stop_gradient = False
if 'initializer' in kwargs:
def _is_inited_by(block, var):
init_ops = []
for op in block.ops:
if var.name in op.output_arg_names:
# In startup_program, "c_broadcast" and "c_sync_comm_stream"
# are treated as initialization ops that cause error.
# Think of "c_broadcast" and "c_sync_comm_stream" as a special case here.
# NOTE: "coalesce_tensor" is a special case for rnn with cudnn support
if op.type in [
"c_broadcast", "c_sync_comm_stream",
"coalesce_tensor"
]:
continue
init_ops.append(op)
return init_ops
initializer = kwargs['initializer']
init_ops = _is_inited_by(global_block, param)
init_ops_len = len(init_ops)
if init_ops_len > 1:
raise RuntimeError("param " + param.name +
" is inited by multiple init ops " + str(
init_ops))
elif init_ops_len == 1:
# TODO already inited, do nothing, should log a warning
pass
else:
initializer(param, self)
return param
def append_op(self, *args, **kwargs):
"""
Appends a new Operator according to the giving arguments.
Returns:
Operator: the append Operator.
"""
if in_dygraph_mode():
attrs = kwargs.get("attrs", {})
type = kwargs.get("type", None)
op = Operator(
block=self,
desc=None,
type=type,
inputs=None,
outputs=None,
attrs=attrs)
# record ops in tracer rather than blocks
#
# TODO(minqiyang): add op stop_gradient support in static mode too.
# currently, we only support stop_gradient in dygraph mode.
_dygraph_tracer().trace_op(type,
kwargs.get("inputs", {}),
kwargs.get("outputs", {}), attrs
if attrs else {},
kwargs.get("stop_gradient", False))
else:
op_desc = self.desc.append_op()
op = Operator(
block=self,
desc=op_desc,
type=kwargs.get("type", None),
inputs=kwargs.get("inputs", None),
outputs=kwargs.get("outputs", None),
attrs=kwargs.get("attrs", None))
self.ops.append(op)
return op
def _insert_op(self, index, *args, **kwargs):
"""
Insert a Operator according to the giving arguments.
Args:
index(int): the place that the operator to insert.
Returns:
Operator: the insert Operator.
"""
self._sync_with_cpp()
return self._insert_op_without_sync(index, *args, **kwargs)
def _insert_op_without_sync(self, index, *args, **kwargs):
"""
Insert an Operator according to the giving arguments,
without sync_with_cpp to meke the compilation faster.
Args:
index(int): the place that the operator to insert.
Returns:
Operator: the insert Operator.
"""
op_desc = self.desc._insert_op(index)
op = Operator(block=self, desc=op_desc, *args, **kwargs)
self.ops.insert(index, op)
return op
def _remove_op(self, index, sync=True):
"""
Remove the specific position operator.
Args:
index(int): the position that the operator to insert.
Returns:
None
"""
if sync == True:
self._sync_with_cpp()
self.desc._remove_op(index, index + 1)
del self.ops[index]
def _slice_ops(self, start, end):
"""
Return the Operator between start and end.
Args:
start(int): the start position.
end(int): the end position.
Returns:
list: the Operators between start and end.
"""
return self.ops[start:end]
def _prepend_op(self, *args, **kwargs):
if in_dygraph_mode():
type = kwargs.get("type", None)
attrs = kwargs.get("attrs", {})
op = Operator(
self, None, type=type, inputs=None, outputs=None, attrs=attrs)
_dygraph_tracer().trace_op(type,
kwargs.get("inputs", {}),
kwargs.get("outputs", {}), attrs
if attrs else {},
kwargs.get("stop_gradient", False))
else:
op_desc = self.desc._prepend_op()
op = Operator(
self,
op_desc,
type=kwargs.get("type", None),
inputs=kwargs.get("inputs", None),
outputs=kwargs.get("outputs", None),
attrs=kwargs.get("attrs", None))
self.ops.insert(0, op)
return op
def _sync_with_cpp(self):
"""
Sync from the desc on the c++ end. This method is used to synchronize
the c++ desc instance generated by backward.
"""
# sync variables from cpp
for var in self.desc.all_vars():
if not self.has_var(var.name()):
self.create_var(name=var.name(), desc=var, type=var.type())
# sync variables removed from c++ end
for var in list(self.vars.keys()):
if not self.desc.find_var(cpt.to_bytes(var)):
self.vars.pop(var)
# sync operators from cpp
ops_in_cpp = []
for op_idx in range(0, self.desc.op_size()):
ops_in_cpp.append(self.desc.op(op_idx))
if len(self.ops) != 0:
first_op_in_python = self.ops[0].desc
last_op_in_python = self.ops[len(self.ops) - 1].desc
start_index = None
end_index = None
for index in range(len(ops_in_cpp)):
if first_op_in_python == ops_in_cpp[index]:
start_index = index
if last_op_in_python == ops_in_cpp[index]:
end_index = index
assert start_index is not None
assert end_index is not None
assert start_index <= end_index
else:
start_index = 0
end_index = -1
# sync ops append to the head of cpp_ops
for index in range((start_index - 1 - 1), -1, -1):
op_desc = ops_in_cpp[index]
op = Operator(self, op_desc)
self.ops.insert(0, op)
# sync ops append to the end of cpp_ops
for index in range((end_index + 1), len(ops_in_cpp)):
op_desc = ops_in_cpp[index]
op = Operator(self, op_desc)
self.ops.append(op)
# sync ops removed from c++ end
if end_index != -1 and end_index < len(self.ops):
ops_in_cpp_index = 0
ops_in_python_index = 0
while ops_in_python_index < len(
self.ops) and ops_in_cpp_index < len(ops_in_cpp):
if self.ops[ops_in_python_index].desc != ops_in_cpp[
ops_in_cpp_index]:
del self.ops[ops_in_python_index]
else:
ops_in_cpp_index += 1
ops_in_python_index += 1
assert len(self.ops) == len(ops_in_cpp)
for index in range(len(self.ops)):
assert self.ops[index].desc == ops_in_cpp[index]
def _copy_param_info_from(self, other):
"""
Copy the information of parameters from the other block.
Args:
other(Block): the other block.
Raises:
ValueError: If type of input is not Block, or the `other` and this
block is not in the same topology.
Returns:
None
"""
if not isinstance(other, Block):
raise TypeError(
"_copy_param_info_from should be invoked with Block")
for p in other.iter_parameters():
assert isinstance(p, Parameter)
v = self.vars.get(p.name, None)
if v is None:
# if the Parameter is pruned, v may be None
continue
assert isinstance(v, Variable)
new_p = None
if in_dygraph_mode():
new_p = ParamBase(
shape=v.shape,
dtype=v.dtype,
type=v.type,
lod_level=v.lod_level,
stop_gradient=p.stop_gradient,
trainable=p.trainable,
optimize_attr=p.optimize_attr,
regularizer=p.regularizer,
error_clip=p.error_clip,
name=v.name)
else:
new_p = Parameter(
block=self,
shape=v.shape,
dtype=v.dtype,
type=v.type,
lod_level=v.lod_level
if v.type == core.VarDesc.VarType.LOD_TENSOR else None,
stop_gradient=p.stop_gradient,
trainable=p.trainable,
optimize_attr=p.optimize_attr,
regularizer=p.regularizer,
error_clip=p.error_clip,
name=v.name)
self.vars[new_p.name] = new_p
def _clone_variable(self, var, force_persistable=True):
"""
Clone a variable into current block.
Args:
var: the variable to be cloned.
force_persistable(bool): True means setting the result variable to being persistable.
False means setting the persistable the same with that of input var.
default: True.
Returns:
Variable: the new variable cloned from 'var' in current block.
"""
assert isinstance(var, Variable)
ret_var = None
# make STEP_SCOPES var can be safely cloned.
if var.type == core.VarDesc.VarType.STEP_SCOPES:
ret_var = self.create_var(
name=var.name, persistable=var.persistable, type=var.type)
elif var.type == core.VarDesc.VarType.RAW:
ret_var = self.create_var(
name=var.name, persistable=var.persistable, type=var.type)
elif var.type == core.VarDesc.VarType.SELECTED_ROWS:
ret_var = self.create_var(
name=var.name,
shape=var.shape,
dtype=var.dtype,
type=var.type,
persistable=True if force_persistable else var.persistable,
is_data=var.is_data,
need_check_feed=var.desc.need_check_feed())
else:
ret_var = self.create_var(
name=var.name,
shape=var.shape,
dtype=var.dtype,
type=var.type,
lod_level=var.lod_level,
persistable=True if force_persistable else var.persistable,
is_data=var.is_data,
need_check_feed=var.desc.need_check_feed())
return ret_var
class IrNode(object):
"""
Python IrNode. Beneath it is a core.Node, which is used for Ir Pass.
"""
def __init__(self, node):
"""
Construct an IrNode using core.Node.
Args:
node(core.Node): C++ Node.
"""
assert isinstance(node,
core.Node), 'node must be the instance of core.Node.'
self.node = node
def name(self):
"""
Return the node name.
Returns:
str: node name.
"""
return self.node.name()
def node_type(self):
"""
Return the node type.
Returns:
core.Node.Type: node type(core.Node.Type.Operation or core.Node.Type.Variable).
"""
return self.node.node_type()
def var(self):
"""
Return the node variable description.
Returns:
core.VarDesc: node variable description.
"""
return self.node.var()
def op(self):
"""
Return the node operator description.
Returns:
core.OpDesc: node operator description.
"""
return self.node.op()
def id(self):
"""
Return the node id.
Returns:
int: node id.
"""
return self.node.id()
def is_op(self):
"""
If the node is an operator, then return true.
Returns:
bool: indicate whether the node is an operator.
"""
return self.node.is_op()
def is_var(self):
"""
If the node is a variable, then return true.
Returns:
bool: indicate whether the node is a variable.
"""
return self.node.is_var()
def is_ctrl_var(self):
"""
If the node is a control dependence variable, then return true.
Returns:
bool: indicate whether the node is a control dependence variable.
"""
return self.node.is_ctrl_var()
def clear_inputs(self):
"""
Clear the node inputs. After executing the `clear_inputs` function,
the node inputs will be empty.
"""
self.node.clear_inputs()
def remove_input_by_id(self, node_id):
"""
Remove a node from inputs by the given node id.
Args:
node_id(int): the given node id.
"""
self.node.remove_input(node_id)
def remove_input(self, node):
"""
Remove a node from inputs.
Args:
node(IrNode): the node being removed.
"""
self.node.remove_input(node.node)
def append_input(self, node):
"""
Append a node in inputs.
Args:
node(IrNode): the node being appended.
"""
self.node.append_input(node.node)
def clear_outputs(self):
"""
Clear the node outputs. After executing the `clear_outputs` function,
the node outputs will be empty.
"""
self.node.clear_outputs()
def remove_output_by_id(self, node_id):
"""
Remove a node from outputs by the given node id.
Args:
node_id(int): the given node id.
"""
self.node.remove_output(node_id)
def remove_output(self, node):
"""
Remove a node from outputs.
Args:
node(IrNode): the node being removed.
"""
self.node.remove_output(node.node)
def append_output(self, node):
"""
Append a node in outputs.
Args:
node(IrNode): the node being appended.
"""
self.node.append_output(node.node)
@property
def inputs(self):
"""
Return the node inputs.
Returns:
list(IrNode): node inputs wrapped by IrNode.
"""
return [IrNode(n) for n in self.node.inputs]
@property
def outputs(self):
"""
Return the node outputs.
Returns:
list(IrNode): node outputs wrapped by IrNode.
"""
return [IrNode(n) for n in self.node.outputs]
class IrVarNode(IrNode):
"""
Python IrVarNode. Beneath it is a core.Node, it inherits from IrNode.
"""
def __init__(self, node):
"""
Construct an IrVarNode using core.Node.
Args:
node(core.Node): C++ Node.
"""
assert isinstance(node, core.Node) and node.is_var(), \
'node must be the instance of core.Node and it must be a variable node.'
super(IrVarNode, self).__init__(node)
self.node = node
def set_shape(self, shape):
"""
Set the node variable shape.
Args:
shape(list): shape to be set.
"""
assert self.node.var() is not None, \
"The node variable description can not be None."
self.node.var().set_shape(shape)
def persistable(self):
"""
If the variable node is a persistable variable, then return true.
Returns:
bool: indicate whether the variable is persistable.
"""
assert self.node.var() is not None, \
"The node variable description can not be None."
return self.node.var().persistable()
def type(self):
"""
Return the variable type.
Returns:
core.VarDesc.VarType: the variable type.
"""
assert self.node.var() is not None, \
"The node variable description can not be None."
return self.node.var().type()
def dtype(self):
"""
Return the variable data type.
Returns:
core.VarDesc.VarType: the variable data type.
"""
assert self.node.var() is not None, \
"The node variable description can not be None."
return self.node.var().dtype()
def shape(self):
"""
Return the variable shape.
Returns:
list: the variable shape.
"""
assert self.node.var() is not None, \
"The node variable description can not be None."
return self.node.var().shape()
@property
def inputs(self):
"""
Return the node inputs.
Returns:
list(IrOpNode): node inputs wrapped by IrOpNode.
"""
return [IrOpNode(n) for n in self.node.inputs]
@property
def outputs(self):
"""
Return the node outputs.
Returns:
list(IrOpNode): node outputs wrapped by IrOpNode.
"""
return [IrOpNode(n) for n in self.node.outputs]
class IrOpNode(IrNode):
"""
Python IrOpNode. Beneath it is a core.Node, it inherits from IrNode.
"""
def __init__(self, node):
"""
Construct an IrOpNode using core.Node.
Args:
node(core.Node): C++ Node.
"""
assert isinstance(node, core.Node) and node.is_op(), \
'node must be the instance of core.Node and it must be a operator node.'
super(IrOpNode, self).__init__(node)
self.node = node
def rename_input(self, old_input_name, new_input_name):
"""
Rename the input of this node.
Args:
old_input_name(str): the old input name.
new_input_name(str): the new input name.
"""
assert self.node.op() is not None, \
"The node operator description can not be None."
self.node.op()._rename_input(old_input_name, new_input_name)
def rename_output(self, old_output_name, new_output_name):
"""
Rename the output of this node.
Args:
old_output_name(str): the old output name.
new_output_name(str): the new output name.
"""
assert self.node.op() is not None, \
"The node operator description can not be None."
self.node.op()._rename_output(old_output_name, new_output_name)
def input(self, name):
"""
Get the argument name list by the parameter name for input.
Args:
name(str): the parameter name.
Returns:
list(str): the argument name list.
"""
assert self.node.op() is not None, \
"The node operator description can not be None."
return self.node.op().input(name)
def output(self, name):
"""
Get the argument name list by the parameter name for output.
Args:
name(str): the parameter name.
Returns:
list(str): the argument name list.
"""
assert self.node.op() is not None, \
"The node operator description can not be None."
return self.node.op().output(name)
def set_type(self, new_type):
"""
Change the operator type into new type.
Args:
new_type(str): new operator type to be set.
"""
assert self.node.op() is not None, \
"The node operator description can not be None."
return self.node.op().set_type(new_type)
def set_attr(self, name, val):
"""
Set the value of attribute by attribute's name.
Args:
name(str): the attribute name.
val(bool|int|str|float|list): the value of the attribute.
"""
self._update_desc_attr(name, val)
def _update_desc_attr(self, name, val):
"""
Update the value of the op desc's attribute by attribute's name.
"""
assert self.node.op() is not None, \
"The node operator description can not be None."
desc = self.node.op()
if isinstance(val, Block):
desc.set_block_attr(name, val.desc)
elif isinstance(val, list) and val and \
all(isinstance(v, Block) for v in val):
desc.set_blocks_attr(name, [v.desc for v in val])
elif isinstance(val, core.BlockDesc) or \
isinstance(val, core.ProgramDesc):
desc.set_serialized_attr(name, val.serialize_to_string())
else:
desc._set_attr(name, val)
def input_arg_names(self):
"""
Return input arguments' names of this op node.
Returns:
list(str): input arguments' names of this op node.
"""
assert self.node.op() is not None, \
"The node operator description can not be None."
return self.node.op().input_arg_names()
def output_arg_names(self):
"""
Return output arguments' names of this op node.
Returns:
list(str): output arguments' names of this op node.
"""
assert self.node.op() is not None, \
"The node operator description can not be None."
return self.node.op().output_arg_names()
@property
def inputs(self):
"""
Return the node inputs.
Returns:
list(IrVarNode): node inputs wrapped by IrVarNode.
"""
return [IrVarNode(n) for n in self.node.inputs]
@property
def outputs(self):
"""
Return the node outputs.
Returns:
list(IrVarNode): node outputs wrapped by IrVarNode.
"""
return [IrVarNode(n) for n in self.node.outputs]
class IrGraph(object):
"""
Python IrGraph. Beneath it is a core.Graph, which is used for
creating a c++ Ir Pass Graph. An IrGraph is just a graph view of
a Program. In an IrGraph, both Variables and Operators are graph
nodes.
"""
def __init__(self, graph, for_test=False):
"""
Construct an IrGraph using core.Graph.
Args:
graph(core.Graph): C++ Graph.
for_test(bool): True for the test graph and false for the train graph.
"""
assert isinstance(
graph, core.Graph), 'graph must be the instance of core.Graph.'
self.graph = graph
self._for_test = for_test
def clone(self):
"""
Create a new and duplicated IrGraph.
Warns:
The method only clones the graph structure, not its attributes.
Returns:
IrGraph: A new and duplicated graph.
"""
g = self.graph.clone()
return IrGraph(g, self._for_test)
def is_test(self):
"""
If the graph is used for testing, the function returns true. Otherwise, returns false.
"""
return self._for_test
def all_nodes(self):
"""
Return all nodes included in the graph as a set.
"""
return {IrNode(node) for node in self.graph.nodes()}
def all_var_nodes(self):
"""
Return all variable nodes included in the graph as a set.
"""
return {IrVarNode(node) for node in self.graph.nodes() if node.is_var()}
def all_persistable_nodes(self):
"""
Return all persistable variable nodes included in the graph as a set.
"""
persistable_nodes = set()
for node in self.graph.nodes():
if node.is_var() and node.var() is not None and node.var(
).persistable():
persistable_nodes.add(node)
return {IrVarNode(p) for p in persistable_nodes}
def all_op_nodes(self):
"""
Return all operator nodes included in the graph as a set.
"""
return {IrOpNode(node) for node in self.graph.nodes() if node.is_op()}
def create_persistable_node(self, name, var_type, shape, var_dtype):
"""
Create a persistable variable node in the graph. In IrGraph,
it can not distinguish between persistable variables and parameters.
Args:
name(str): the name of the persistable variable node.
vart_type(core.VarDesc.VarType): the type of the persistable variable node.
shape(list): the shape of the persistable variable node.
var_dtype(core.VarDesc.VarType): the data type of the persistable variable node.
Returns:
IrVarNode: the created persistable variable node.
"""
var_desc = core.VarDesc(name)
var_desc.set_type(var_type)
var_desc.set_shape(shape)
var_desc.set_dtype(var_dtype)
var_desc.set_persistable(True)
return IrVarNode(self.graph.create_var_node(var_desc))
def create_var_node(self, name, var_type, shape, var_dtype):
"""
Create a variable node in the graph. The created variable node is
not persistable.
Args:
name(str): the name of the variable node.
vart_type(core.VarDesc.VarType): the type of the variable node.
shape(list): the shape of the variable node.
var_dtype(core.VarDesc.VarType): the data type of the variable node.
Returns:
IrVarNode: the created variable node.
"""
var_desc = core.VarDesc(name)
var_desc.set_type(var_type)
var_desc.set_shape(shape)
var_desc.set_dtype(var_dtype)
return IrVarNode(self.graph.create_var_node(var_desc))
def create_control_dep_var(self):
"""
create a control var
"""
return IrVarNode(self.graph.create_control_dep_var())
def create_var_node_from_desc(self, var_desc):
"""
Create a variable node by using an existing VarDesc in the graph.
Depend on the giving VarDesc, the created variable node may be persistable.
Args:
var_desc(core.VarDesc): the giving variable description.
Returns:
IrVarNode: the created variable node.
"""
return IrVarNode(self.graph.create_var_node(var_desc))
def create_op_node(self, op_type, attrs, inputs, outputs):
"""
Create a operator node in the graph.
Args:
op_type(str): the type of the operator node.
attrs(dict): the attributes of the operator node.
inputs(dict): the inputs of the operator node.
outputs(dict): the outputs of the operator node.
Returns:
IrOpNode: the created operator node.
"""
op_desc = core.OpDesc()
op_desc.set_type(op_type)
for attr, value in six.iteritems(attrs):
self._update_desc_attr(op_desc, attr, value)
for input_name, var_nodes in six.iteritems(inputs):
if not isinstance(var_nodes, list):
var_nodes = [var_nodes]
op_desc.set_input(input_name,
[var_node.name() for var_node in var_nodes])
for output_name, var_nodes in six.iteritems(outputs):
if not isinstance(var_nodes, list):
var_nodes = [var_nodes]
op_desc.set_output(output_name,
[var_node.name() for var_node in var_nodes])
return IrOpNode(self.graph.create_op_node(op_desc))
def create_op_node_from_desc(self, op_desc):
"""
Create a operator node by using an existing OpDesc in the graph.
Args:
op_desc(core.VarDesc): the giving operator description.
Returns:
IrOpNode: the created operator node.
"""
return IrOpNode(self.graph.create_op_node(op_desc))
def update_input_link(self, old_input_node, new_input_node, op_node):
"""
Update the input's link of a operator node.
Args:
old_input_node(IrNode): the old input node of the giving op_node.
new_input_node(IrNode): the new input node of the giving op_node.
op_node(IrOpNode): the operator node that is needed to update input's link.
"""
assert old_input_node.node in self.graph.nodes() and new_input_node.node in \
self.graph.nodes() and op_node.node in self.graph.nodes(), \
'The three arguments(old_input_node&new_input_node&op_node) must be in the graph nodes.'
old_input_node.remove_output(op_node)
op_node.remove_input(old_input_node)
new_input_node.append_output(op_node)
op_node.append_input(new_input_node)
op_node.rename_input(old_input_node.name(), new_input_node.name())
def update_output_link(self, old_output_node, new_output_node, op_node):
"""
Update the output's link of an operator node.
Args:
old_output_node(IrNode): the old output node of the giving op_node.
new_output_node(IrNode): the new output node of the giving op_node.
op_node(IrOpNode): the operator node that is needed to update input's link.
"""
assert old_output_node.node in self.graph.nodes() and new_output_node.node in \
self.graph.nodes() and op_node.node in self.graph.nodes(), \
'The three arguments(old_output_node &new_output_node &op_node) must be in the graph nodes.'
old_output_node.remove_input(op_node)
op_node.remove_output(old_output_node)
new_output_node.append_input(op_node)
op_node.append_output(new_output_node)
op_node.rename_output(old_output_node.name(), new_output_node.name())
def link_to(self, node_in, node_out):
"""
Connect two nodes.
Args:
node_in(IrNode): the input node.
node_out(IrNode): the output node.
"""
assert node_in.node in self.graph.nodes() and node_out.node in self.graph.nodes(), \
'The two arguments(node_in&node_out) must be in the graph nodes.'
node_in.append_output(node_out)
node_out.append_input(node_in)
def safe_remove_nodes(self, remove_nodes):
"""
Remove nodes safely since links connected to these removed nodes are
also removed.
Args:
remove_nodes(set): the nodes prepared to be removed.
"""
if not isinstance(remove_nodes, set):
if isinstance(remove_nodes, Iterable):
remove_nodes = set(remove_nodes)
else:
remove_nodes = {remove_nodes}
original_nodes = {n.node for n in remove_nodes}
core.graph_safe_remove_nodes(self.graph, original_nodes)
def resolve_hazard(self):
ordered_nodes = core.topology_sort(self.graph)
var_nodes = dict()
for node in ordered_nodes:
if node.is_op() and node.op() is not None:
for each_var_name in node.op().input_arg_names():
if each_var_name not in var_nodes:
var_nodes[each_var_name] = [
self._find_node_by_name(node.inputs, each_var_name)
]
for each_var_name in node.op().output_arg_names():
if each_var_name not in var_nodes:
var_nodes[each_var_name] = [
self._find_node_by_name(node.outputs, each_var_name)
]
else:
var_nodes[each_var_name].append(
self._find_node_by_name(node.outputs,
each_var_name))
self.graph.resolve_hazard(var_nodes)
def has_circle(self):
"""
Check if the graph has a circle.
Returns:
bool: True if the graph has a circle else False.
"""
return core.has_circle(self.graph)
def graph_num(self):
"""
Count the number of unconnected graphs in this graph.
Returns:
int: the number of unconnected graphs.
"""
return core.graph_num(self.graph)
def topology_sort(self):
"""
Perform the topology sort operation on the graph.
Notes: the `graph` can not contain a circle.
Returns:
list(IrNode): nodes in topology order.
"""
ordered_nodes = core.topology_sort(self.graph)
return [IrNode(n) for n in ordered_nodes]
def build_adjacency_list(self):
"""
Build an adjacency list of operations for the `graph`.
Returns:
dict{IrNode: set(IrNode)}: the adjacency list.
"""
adj_list = core.build_adjacency_list(self.graph)
wrapped_adj_list = dict()
for k, v in six.iteritems(adj_list):
wrapped_adj_list[IrNode(k)] = {IrNode(n) for n in v}
return wrapped_adj_list
def draw(self, save_path, name, marked_nodes=None, remove_ctr_var=True):
"""
Draw the graph. If `dot` command is installed, the drawn graph
will be saved as pdf file type, otherwise dot file type is used.
Args:
save_path(str): the save path of drawn graph.
name(str): the name of drawn graph.
marked_nodes(set(IrNode)): nodes that are needed to be marked.
Default value is None.
remove_ctr_var(bool): If it is set True, all control variable nodes
in the graph will be removed. Default value is True.
"""
def _convert_to_pdf(dot_file_path):
pdf_save_path = os.path.splitext(dot_file_path)[0] + '.pdf'
exited_code = subprocess.call(
'dot -Tpdf ' + dot_file_path + ' -o ' + pdf_save_path,
shell=True)
if exited_code != 0:
print('The dot command is needed for creating pdf files.')
print('The {} is saved as the dot filetype.'.format(
dot_file_path))
remove_ctr_vars = set()
if remove_ctr_var:
for node in self.all_var_nodes():
if node.is_ctrl_var():
remove_ctr_vars.add(node)
self.safe_remove_nodes(remove_ctr_vars)
print('Total ops num = {}.'.format(len(self.all_op_nodes())))
if marked_nodes is not None:
if not isinstance(marked_nodes, set):
if isinstance(marked_nodes, Iterable):
marked_nodes = set(marked_nodes)
else:
marked_nodes = {marked_nodes}
marked_nodes = {n.node for n in marked_nodes}
remove_ctr_vars = {n.node for n in remove_ctr_vars}
marked_nodes = marked_nodes - remove_ctr_vars
if self.graph.has('__graphviz__marked_node__'):
self.graph.erase('__graphviz__marked_node__')
self.graph.set('__graphviz__marked_node__', marked_nodes)
if not os.path.exists(save_path):
os.makedirs(save_path)
viz_dot_path = os.path.join(save_path, name) + '.dot'
viz_pass = core.get_pass('graph_viz_pass')
viz_pass.set('graph_viz_path', viz_dot_path)
viz_pass.apply(self.graph)
_convert_to_pdf(viz_dot_path)
def to_program(self):
"""
Convert the graph into a Program.
WARN: When the graph includes backward operator nodes, the
conversion process may be failed. Usually, this function is
only used to convert a test graph.
Returns:
Program: a program converted from the graph.
"""
convert_pass = core.get_pass('graph_to_program_pass')
desc = core.ProgramDesc()
convert_pass.set_not_owned('program', desc)
convert_pass.apply(self.graph)
program = Program._construct_from_desc(desc)
return program
def _find_node_by_name(self, nodes, node_name):
"""
Find a node in the giving nodes set by the name.
"""
target_node = None
for n in nodes:
if n.name() == node_name:
target_node = n
assert target_node is not None, "Cannot find the target node in the giving set."
return target_node
def _update_desc_attr(self, desc, name, val):
"""
Update the value of desc's attribute by attribute's name.
"""
if isinstance(val, Block):
desc.set_block_attr(name, val.desc)
elif isinstance(val, list) and val and all(
isinstance(v, Block) for v in val):
desc.set_blocks_attr(name, [v.desc for v in val])
elif isinstance(val, core.BlockDesc) or \
isinstance(val, core.ProgramDesc):
desc.set_serialized_attr(name, val.serialize_to_string())
else:
desc._set_attr(name, val)
class Program(object):
"""
Create Python Program. It has at least one :ref:`api_guide_Block_en`, when the
control flow op like conditional_block, while :ref:`api_paddle_fluid_layers_While` is included,
it will contain nested block.
Please reference the
`framework.proto <https://github.com/PaddlePaddle/Paddle/blob/develop/paddle/fluid/framework/framework.proto>`_
for details.
A set of Program usually contains startup program and main program.
A startup program is set to contain some initial work, eg. initialize the ``Parameter``, and the main
program will contain the network structure and vars for train.
A set of Program can be used for test or train, in train program ,
Paddle will contain all content to build a train network, in test
program Paddle will prune some content which is irrelevant to test, eg.
backward ops and vars.
**Notes**:
**we have** :ref:`api_paddle_fluid_framework_default_startup_program` **and** :ref:`api_paddle_fluid_framework_default_main_program`
**by default, a pair of them will shared the parameters. The** :ref:`api_paddle_fluid_framework_default_startup_program` **only run once to initialize parameters,**
:ref:`api_paddle_fluid_framework_default_main_program` **run in every mini batch and adjust the weights.**
Returns:
Program: An empty Program.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
main_program = static.Program()
startup_program = static.Program()
with static.program_guard(main_program=main_program, startup_program=startup_program):
x = static.data(name="x", shape=[-1, 784], dtype='float32')
y = static.data(name="y", shape=[-1, 1], dtype='int32')
z = static.nn.fc(name="fc", x=x, size=10, activation="relu")
print("main program is: {}".format(main_program))
print("start up program is: {}".format(startup_program))
"""
def __init__(self):
self.desc = core.ProgramDesc()
self.blocks = [Block(self, 0)]
self.current_block_idx = 0
global global_prog_seed
self._seed = global_prog_seed
self._current_role = core.op_proto_and_checker_maker.OpRole.Forward
self.__op_role_var = []
# for distribute training
# _is_distributed = True if under distributed training
self._is_distributed = False
# _is_chief = True if the trainer is the first one, usually No.0
self._is_chief = False
# _parameters_on_pservers records all the parameters distributed on parameter servers.
self._parameters_on_pservers = None
# _endpoints is a list about parameter servers ip:port, such as ["ip:port","ip:port"]
self._endpoints = []
# if current role is parameter server, the _ps_endpoint is its "ip:port"
self._ps_endpoint = None
# trainers_endpoints, it is used for distribution.
self._trainers_endpoints = []
# the distributed lookup table names
self._distributed_lookup_table = None
# use Deep gradient comrepssion or not
self._enable_dgc = False
self._use_lamb = False
self._nccl_comm_num = 1
self._use_hierarchical_allreduce = False
self._hierarchical_allreduce_inter_nranks = 0
# if this program has been optimized by distributed optimizer
# fleet_opt will be given a value
self._fleet_opt = None
self._program_config = None
# assigned if this program has been parsed by a pipeline optimizer
self._pipeline_opt = None
# appending gradients times
self._appending_grad_times = 0
# identifier for auto checkpoint
self._auto_checkpoint_name = unique_name.generate(
"__auto_checkpoint_program__")
# compiled program, i.e. Graph
self._graph = None
def global_seed(self, seed=0):
"""
Set global seed for Program
Returns:
None.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
prog = static.default_main_program()
print(prog.random_seed)
## 0
## the default random seed is 0
prog.global_seed(102)
prog1 = static.default_main_program()
print(prog1.random_seed)
## 102
## the random seed is 102
"""
global global_prog_seed
global_prog_seed = seed
self._seed = global_prog_seed
@property
def _op_role(self):
"""
The operator role. In a enum {Forward, Backward, Optimize}.
Notes: this is a low level API. It is used only for ParallelExecutor to
duplicate or schedule operator to devices.
For example, the forward operator should be executed on every device.
The backward operator should be executed on every device and the
parameter gradient of backward (use :code:`_op_role_var` to get this
variable) operator should be merged to one device. The optimization
operators should be executed on only one device and broadcast the
optimization result, i.e., the new parameter, to every other device.
"""
return self._current_role
@_op_role.setter
def _op_role(self, role):
self._current_role = role
@property
def _op_role_var(self):
"""
The auxiliary variables for :code:`_op_role` property.
See Also: :code:`Program._op_role`'s documentation for details.
Notes: This is a very low-level API. Users should not use it directly.
"""
return self.__op_role_var
@signature_safe_contextmanager
def _backward_role_guard(self):
tmp_role = self._current_role
OpRole = core.op_proto_and_checker_maker.OpRole
self._current_role = OpRole.Backward
try:
yield
finally:
self._current_role = tmp_role
@signature_safe_contextmanager
def _optimized_guard(self, param_and_grads):
"""
A with guard to set :code:`Optimization` :code:`OpRole` and
:code:`OpRoleVar` automatically.
Notes: This is a very low level API. Users should not use it directly.
Args:
param_and_grads(list): The variables (names) to be optimized.
Examples:
>>> import paddle.fluid as fluid
>>> p, g = backward(...)
>>> with program._optimized_guard([p,g]):
>>> p = p - 0.001 * g
"""
tmp_role = self._current_role
tmp_var = self.__op_role_var
OpRole = core.op_proto_and_checker_maker.OpRole
self._current_role = OpRole.Optimize
self.__op_role_var = [
var.name if isinstance(var, Variable) else var
for var in param_and_grads
]
try:
yield
finally:
self.__op_role_var = tmp_var
self._current_role = tmp_role
@signature_safe_contextmanager
def _lr_schedule_guard(self, is_with_opt=False):
"""
A with guard to set :code:`LRSched` :code:`OpRole` and
:code:`OpRoleVar` automatically. The :code:`OpRoleVar` is
set to the target learning rate.
Notes: This is a very low level API. Users should not use it directly.
Args:
is_with_opt: Only set to true if these ops a in the middle
of a bunch of optimize ops so that it can be treated
correctly. For example, sgd->lr_op->sgd->lr_op->sgd.
Examples:
>>> import paddle.fluid as fluid
>>> p, g = backward(...)
>>> with program.lr_schedule_guard():
>>> lr = lr * decay
"""
tmp_role = self._current_role
tmp_var = self.__op_role_var
OpRole = core.op_proto_and_checker_maker.OpRole
self._current_role = OpRole.LRSched
if is_with_opt:
self._current_role = int(OpRole.LRSched) | int(OpRole.Optimize)
# TODO(typhoonzero): how to set target learning rate var
self.__op_role_var = []
try:
yield
finally:
self.__op_role_var = tmp_var
self._current_role = tmp_role
def __str__(self):
"""
Get the protobuf debug string of this Program.
Returns:
(str): The protobuf debug string.
Raises:
ValueError: If any of required fields is not set.
"""
return self._to_readable_code()
def _to_readable_code(self, skip_op_callstack=True):
"""
Get readable debug string of Program.
.. note::
If you want to get the debug string in protobuf format,
please use :code:`to_string` method.
Args:
skip_op_callstack(bool): whether to skip parsing Operator's attribute
op_callstack, default value is True
Returns:
string: The formatted Program string.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
cur_program = static.Program()
cur_block = cur_program.current_block()
new_var = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
new_op = cur_block.append_op(type="abs",
inputs={"X": [new_var]},
outputs={"Out": [new_var]})
print(cur_program._to_readable_code())
"""
assert isinstance(
skip_op_callstack, bool
), "skip_op_callstack parameter's type is error, expect bool, received %s".format(
type(skip_op_callstack))
program_str = ""
for block in self.blocks:
program_str += block._to_readable_code(skip_op_callstack)
program_str += '\n'
return program_str
def to_string(self, throw_on_error, with_details=False):
"""
To debug string.
Args:
throw_on_error (bool): raise Value error when any of required fields is not set.
with_details (bool): True if more details about variables and parameters, e.g., :code:`trainable`, :code:`optimize_attr`, need to print.
Returns:
str: The debug string describe current Program.
Raises:
ValueError: If any of required fields is not set and throw_on_error is True.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
prog = static.default_main_program()
x = static.data(name="X", shape=[2,3], dtype="float32")
pred = static.nn.fc(x, size=3)
prog_string = prog.to_string(throw_on_error=True, with_details=False)
prog_string_with_details = prog.to_string(throw_on_error=False, with_details=True)
print("program string without detail: {}".format(prog_string))
print("program string with detail: {}".format(prog_string_with_details))
"""
assert isinstance(
throw_on_error, bool
), "The type of throw_on_error parameter is wrong, expected bool, but received {}.".format(
type(throw_on_error))
assert isinstance(
with_details, bool
), "The type of with_details parameter is wrong, expected bool, but received {}.".format(
type(with_details))
if with_details:
res_str = ""
for block in self.blocks:
res_str += block.to_string(throw_on_error, with_details)
else:
protostr = self.desc.serialize_to_string()
proto = framework_pb2.ProgramDesc.FromString(
six.binary_type(protostr))
res_str = _debug_string_(proto, throw_on_error)
return res_str
def _get_desc(self):
"""
Get the C++ side of `ProgramDesc` object pointer. The C++ object is
exposed by :code:`pybind`.
Notes: This is a very low level API. Users should not use this API
directly.
"""
return self.desc
def _version(self):
return self.desc._version()
def clone(self, for_test=False):
"""
.. note:::
1. :code:`Program.clone()` method DOES NOT clone :ref:`api_paddle_io_DataLoader` .
2. Recommend you to use :code:`clone` before using :code:`Opimizer.minimize` .
3. This API has no effect in Dygraph Mode.
Create a new Program with forward content of original one when ``for_test=True``.
Create a new Program as same as the original one when ``for_test=False``.
Some operators, e.g., :ref:`api_paddle_fluid_layers_batch_norm` , behave differently between
training and testing. They have an attribute, :code:`is_test`, to
control this behaviour. This method will change the :code:`is_test`
attribute of them to :code:`True` when :code:`for_test=True`.
* Set for_test to False when you want to clone the program for training.
* Set for_test to True when you want to clone the program for testing.
We will prune the backward and optimize part of the program when you
use :code:`clone` after :code:`Opimizer.minimize`, but we still
recommend you to use :code:`clone` before using :code:`Opimizer.minimize`.
For Example:
::
import paddle
import paddle.static as static
paddle.enable_static()
img = static.data(name='image', shape=[None, 784])
pred = static.nn.fc(x=img, size=10, actvation='relu')
loss = paddle.mean(pred)
# Here we use clone before Momentum
test_program = static.default_main_program().clone(for_test=True)
optimizer = paddle.optimizer.Momentum(learning_rate=0.01, momentum=0.9)
optimizer.minimize(loss)
Args:
for_test (bool): True if change the :code:`is_test` attribute of operators to :code:`True`
and prune the backward and optimize part of the program. The default value is :code:`False` .
Returns:
Program: A new Program with forward content of original one when ``for_test=True``. A new Program as same as the original one when ``for_test=False``
Examples:
.. note::
The Program's order maybe different after :code:`clone` and
this will not affect your training or testing progress. In the following
example we give you an simple method :code:`print_prog(program)` to
print Program Descs inorder to make sure you have same print result
after :code:`clone`:
.. code-block:: python
import six
def print_prog(prog):
for name, value in sorted(six.iteritems(prog.block(0).vars)):
print(value)
for op in prog.block(0).ops:
print("op type is {}".format(op.type))
print("op inputs are {}".format(op.input_arg_names))
print("op outputs are {}".format(op.output_arg_names))
for key, value in sorted(six.iteritems(op.all_attrs())):
if key not in ['op_callstack', 'op_role_var']:
print(" [ attrs: {}: {} ]".format(key, value))
1. To clone a test program, the sample code is:
.. code-block:: python
import six
import paddle
import paddle.static as static
import paddle.utils as utils
import paddle.nn.functional as F
paddle.enable_static()
def print_prog(prog):
for name, value in sorted(six.iteritems(prog.block(0).vars)):
print(value)
for op in prog.block(0).ops:
print("op type is {}".format(op.type))
print("op inputs are {}".format(op.input_arg_names))
print("op outputs are {}".format(op.output_arg_names))
for key, value in sorted(six.iteritems(op.all_attrs())):
if key not in ['op_callstack', 'op_role_var']:
print(" [ attrs: {}: {} ]".format(key, value))
train_program = static.Program()
startup_program = static.Program()
# startup_program is used to do some parameter init work,
# and main program is used to hold the network
with static.program_guard(train_program, startup_program):
with utils.unique_name.guard():
img = static.data(name='image', shape=[None, 784])
hidden = static.nn.fc(x=img, size=200, activation='relu')
hidden = F.dropout(hidden, p=0.5)
loss = F.cross_entropy(
input=static.nn.fc(x=hidden, size=10, activation='softmax'),
label=static.data(name='label', shape=[1], dtype='int64'))
avg_loss = paddle.mean(loss)
test_program = train_program.clone(for_test=True)
print_prog(test_program)
# Due to parameter sharing usage for train and test, so we need to use startup program of train
# instead of using test startup program, while nothing is in test's startup program
# In Paddle we will share weights by using the same Tensor name. In train and test program
# all parameters will have the same name and this can make train and test program sharing parameters,
# that's why we need to use startup program of train. And for startup program of test, it has nothing,
# since it is a new program.
with static.program_guard(train_program, startup_program):
with utils.unique_name.guard():
sgd = paddle.optimizer.SGD(learning_rate=1e-3)
sgd.minimize(avg_loss)
2. The clone method can be avoid if you create program for training and program for testing individually.
.. code-block:: python
import six
import paddle
import paddle.static as static
import paddle.utils as utils
import paddle.nn.functional as F
paddle.enable_static()
def print_prog(prog):
for name, value in sorted(six.iteritems(prog.block(0).vars)):
print(value)
for op in prog.block(0).ops:
print("op type is {}".format(op.type))
print("op inputs are {}".format(op.input_arg_names))
print("op outputs are {}".format(op.output_arg_names))
for key, value in sorted(six.iteritems(op.all_attrs())):
if key not in ['op_callstack', 'op_role_var']:
print(" [ attrs: {}: {} ]".format(key, value))
def network():
img = static.data(name='image', shape=[None, 784])
hidden = static.nn.fc(x=img, size=200, activation='relu')
hidden = F.dropout(hidden, p=0.5)
loss = F.cross_entropy(
input=static.nn.fc(x=hidden, size=10, activation='softmax'),
label=static.data(name='label', shape=[1], dtype='int64'))
avg_loss = paddle.mean(loss)
return avg_loss
train_program_2 = static.Program()
startup_program_2 = static.Program()
test_program_2 = static.Program()
with static.program_guard(train_program_2, startup_program_2):
with utils.unique_name.guard():
avg_loss = network()
sgd = paddle.optimizer.SGD(learning_rate=1e-3)
sgd.minimize(avg_loss)
# the test startup program is not used.
with static.program_guard(test_program_2, startup_program_2):
with utils.unique_name.guard():
avg_loss = network()
print_prog(test_program_2)
The two code snippets above will generate and print same programs.
"""
# NOTE(zhiqiu): we sync the original program first, since its program may diff with
# its desc due to modifying desc in c++ space. E.g. save op will add kLookupTablePath in desc.
self._sync_with_cpp()
pruned_origin_block_id_map = None
if for_test:
forward_prog = Program()
forward_prog.desc, pruned_origin_block_id_map = core.prune_backward(
self.desc)
forward_prog.blocks = [
Block(forward_prog, i)
for i in six.moves.range(forward_prog.desc.num_blocks())
]
forward_prog._sync_with_cpp()
p = forward_prog._inference_optimize(prune_read_op=False)
else:
p = Program()
p.current_block_idx = self.current_block_idx
p._seed = self._seed
p.desc = core.ProgramDesc(self.desc)
p.blocks = [
Block(p, i) for i in six.moves.range(self.desc.num_blocks())
]
p._current_role = self._current_role
p.__op_role_var = self.__op_role_var
p._appending_grad_times = self._appending_grad_times
if hasattr(self, 'lr_sheduler'):
p.lr_sheduler = self.lr_sheduler
# NOTE(zhiqiu): we sync the cloned program, to update its program by
# its desc.
p._sync_with_cpp()
p._copy_param_info_from(self)
p._copy_data_info_from(self, pruned_origin_block_id_map)
p._copy_dist_param_info_from(self)
return p
def _prune(self, targets):
"""
Prune operators and variables which are not needed to generate
:code:`targets`.
Notes: This is a very low level API. Users should not use this API
directly. This API is in flux and not stable.
Args:
targets(list|Variable|Operator): A list of variables, operators, or variable names
need to be pruned
Returns:
Program: A new, pruned program.
"""
return self._prune_with_input([], targets)
def _prune_with_input(self, feeded_var_names, targets):
"""
Prune operators and variables which are not needed to generate
:code:`targets`. Prune operators and variables which are needed
to generate feeded_var
Notes: This is a very low level API. Users should not use this API
directly. This API is in flux and not stable.
Args:
feeded_var_names(list|str): A list of variable names from where
pruning start. If it is set as [], this API works just like _prune()
targets(list|Variable|Operator): A list of variables, operators, or variable names
need to be pruned
Returns:
Program: A new, pruned program.
"""
# NOTE(zhiqiu): we sync the original program first, since its program may diff with
# its desc due to modifying desc in c++ space. E.g. save op will add kLookupTablePath in desc.
self._sync_with_cpp()
if not isinstance(feeded_var_names, list):
feeded_var_names = [feeded_var_names]
if not isinstance(targets, list):
targets = [targets]
for var in feeded_var_names:
if not isinstance(var, six.string_types):
raise ValueError(
"All feeded_var_names of Program._prune_with_input() can only be "
"str, but received %s." % type(var))
targets_idx = []
for t in targets:
if not isinstance(t, Operator):
if isinstance(t, Variable):
name = t.name
elif isinstance(t, six.string_types):
name = str(t)
else:
raise ValueError(
"All targets of Program._prune_with_input() can only be "
"Variable or Operator, but received %s." % type(t))
# NOTEZ(zhiqiu): For variable to be fed in fetch_list, there two cases:
# (1) the variable is leaf, it has no op that generates it;
# (2) the variable is not leaf, and we need to prune the op that generates it.
# In both cases, wo can just skip target_op of that it.
if name in feeded_var_names:
continue
# After transpiler processing, the op that output this
# variable maybe has been changed, so t.op is not reliable
# and we need to find the current op that generate this
# variable here.
target_op = None
global_block = self.global_block()
for idx, op in enumerate(global_block.ops):
if name in op.output_arg_names:
# NOTE(zhiqiu): Find op that generate target name.
# Skip optimize op except for optimize op in targets,
# since optimize op generates parameters.
if op._is_optimize_op() and op not in targets:
continue
else:
target_op = op
break
if target_op is None:
raise ValueError(
"The target variable used for pruning should have an "
"associated operator that generates it.")
else:
targets_idx.append([target_op.block.idx, target_op.idx])
else:
targets_idx.append([t.block.idx, t.idx])
res = Program()
res.desc, pruned_origin_block_id_map = core.prune(self.desc,
set(feeded_var_names),
targets_idx)
res.blocks = [
Block(res, i) for i in six.moves.range(res.desc.num_blocks())
]
res._sync_with_cpp()
res._copy_param_info_from(self)
res._copy_data_info_from(self, pruned_origin_block_id_map)
res._copy_dist_param_info_from(self)
return res
def _inference_optimize(self, prune_read_op=True):
"""
This method will create a new program and do following adjustments on it:
1. Remove all reader variables and their creator ops if exist.
2. Remove the :code:`read_op` if exists.
3. change the :code:`is_test`
attribute of operators to :code:`True`. All the :code:`Parameter`
information will be lost.
Args:
prune_read_op(bool): remove the read ops that are added by py_reader
for cpp inference library
Notes: This API is a very low level API. Use
:code:`Program.clone(for_test=True)` instead.
Returns:
Program: The new program.
"""
res = Program()
res.desc = core.ProgramDesc(self.desc)
# remove all readers and the read_op if exist
read_op_idx = 0
root_block = res.desc.block(0)
if prune_read_op:
while True:
if read_op_idx >= root_block.op_size() or root_block.op(
read_op_idx).type() == 'read':
break
read_op_idx += 1
if read_op_idx < root_block.op_size():
root_block._remove_op(0, read_op_idx + 1)
for var in root_block.all_vars():
if var.type() == core.VarDesc.VarType.READER:
root_block._remove_var(cpt.to_bytes(var.name()))
# change all `is_test` attributes to True
for i in six.moves.range(res.desc.num_blocks()):
block = res.desc.block(i)
for j in six.moves.range(block.op_size()):
op = block.op(j)
if op.has_attr('is_test'):
op._set_attr('is_test', True)
res.blocks = [
Block(res, i) for i in six.moves.range(res.desc.num_blocks())
]
res._sync_with_cpp()
return res
@staticmethod
def parse_from_string(binary_str):
"""
.. note::
1. All information about parameters will be lost after serialization;
2. This API has no effect in Dygraph mode.
Deserialize a Program from `protobuf <https://en.wikipedia.org/wiki/Protocol_Buffers>`_ binary string.
This method always use to save and load model
Args:
binary_str_type (str): the binary prootbuf string.
Returns:
Program: A deserialized Program.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
startup_prog = static.Program()
main_prog = static.Program()
with static.program_guard(startup_prog, main_prog):
x = static.data(name='X', shape=[1000, 784], dtype='float32')
y = static.data(name='Y', shape=[784, 100], dtype='float32')
z = paddle.matmul(x=x, y=y)
binary_str = static.default_main_program().desc.serialize_to_string()
prog_restored = static.default_main_program().parse_from_string(binary_str)
print(static.default_main_program())
print(prog_restored)
"""
p = Program()
p.desc = core.ProgramDesc(binary_str)
p.blocks = [Block(p, i) for i in six.moves.range(p.desc.num_blocks())]
p._sync_with_cpp()
return p
@staticmethod
def _construct_from_desc(desc):
"""
Construct a program from program desc.
Args:
desc(core.ProgramDesc): The program desc for constructing.
Returns:
Program: A program.
"""
p = Program()
p.desc = desc
p.blocks = [Block(p, i) for i in six.moves.range(p.desc.num_blocks())]
p._sync_with_cpp()
return p
@property
def random_seed(self):
"""
The default random seed for random operators in Program. ``0`` means get
the random seed from random device.
.. note::
It must be set before the operators have been added.
Returns:
int64: Random seed in current Program
Examples:
.. code-block:: python
import paddle
import paddle.static as static
import paddle.nn.functional as F
paddle.enable_static()
prog = static.default_main_program()
random_seed = prog.random_seed
x_var = static.data(name="X", shape=[3,3], dtype="float32")
print(random_seed)
## 0
## the default random seed is 0
# Here we need to set random seed before we use paddle.nn.functional.dropout
prog.random_seed = 1
z_var = F.dropout(x_var, 0.7)
print(prog.random_seed)
## 1
## the random seed is change to 1
"""
return self._seed
@property
def num_blocks(self):
"""
The number of :ref:`api_guide_Block_en` in this Program.
.. note::
This API has no effect in Dygraph mode.
Returns:
int(Platform-dependent size): num of :ref:`api_guide_Block_en` in current Program
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
prog = static.default_main_program()
num_blocks = prog.num_blocks
print(num_blocks)
# print result:
# 1
"""
return self.desc.num_blocks()
@random_seed.setter
def random_seed(self, seed):
if not isinstance(seed, int):
raise ValueError(
"Program.random_seed's input seed must be an integer, but received %s."
% type(seed))
self._seed = seed
def __repr__(self):
return self.__str__()
def global_block(self):
"""
.. note::
This API has no effect in Dygraph mode.
Get the first :ref:`api_guide_Block_en` of this Program.
Returns:
:ref:`api_guide_Block_en`: The first :ref:`api_guide_Block_en` of this Program.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
prog = static.default_main_program()
gb_block = prog.global_block()
print(gb_block)
"""
return self.blocks[0]
def block(self, index):
"""
.. note::
This API has no effect in Dygraph mode.
Get the :code:`index` :ref:`api_guide_Block_en` of this Program
Args:
index (int) - The index of :ref:`api_guide_Block_en` to get
Returns:
:ref:`api_guide_Block_en`: The :code:`index` block
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
prog = static.default_main_program()
block_0 = prog.block(0)
print(block_0)
"""
return self.blocks[index]
def current_block(self):
"""
.. note::
This API has no effect in Dygraph mode.
Get the current :ref:`api_guide_Block_en` . The :code:`current` :ref:`api_guide_Block_en`
is the :ref:`api_guide_Block_en` to append operators.
Returns:
:ref:`api_guide_Block_en`: The :code:`index` :ref:`api_guide_Block_en`
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
prog = static.default_main_program()
current_blk = prog.current_block()
print(current_blk)
"""
return self.blocks[self.current_block_idx]
def _create_block(self, parent_idx=None):
"""
Create a new block with the :code:`parent_idx` and change the current block
to new block.
Args:
parent_idx(int): The parent block index.
Returns:
Block: The new block.
"""
new_block_idx = len(self.blocks)
parent = self.current_block() if parent_idx is None else self.block(
parent_idx)
self.desc.append_block(parent.desc)
self.current_block_idx = new_block_idx
self.blocks.append(Block(self, self.current_block_idx))
return self.current_block()
def _rollback(self):
"""
Exit a code block, i.e., roll back to the parent block.
Returns:
None
"""
self.current_block_idx = self.current_block().parent_idx
def _sync_with_cpp(self):
"""
Synchronize Python instance to its binding C++ object instance.
If the program is modified in C++ space, this method should be invoked.
Notes: This is a very low level API. Users should not invoke it
directly.
Returns:
None
"""
for block_idx in range(len(self.blocks), self.desc.num_blocks()):
self.blocks.append(Block(self, block_idx))
for block in self.blocks:
block._sync_with_cpp()
def _copy_param_info_from(self, other):
"""
Copy the information of parameters from other program.
Notes: This is a very low level API. Users should not invoke it
directly.
Args:
other(Program): Other program
Returns:
None
"""
if not isinstance(other, Program):
raise TypeError(
"Function Program._copy_param_info_from() needs to pass in a source Program, but received %s"
% type(other))
self.global_block()._copy_param_info_from(other.global_block())
def _copy_dist_param_info_from(self, other):
"""
Copy the information of distributed information from other program.
Args:
other(Program): Other program
Returns:
None
"""
if not isinstance(other, Program):
raise TypeError(
"Function Program._copy_param_info_from() needs to pass in a source Program, but received %s"
% type(other))
self._is_distributed = other._is_distributed
self._is_chief = other._is_chief
self._parameters_on_pservers = other._parameters_on_pservers
self._endpoints = other._endpoints
self._ps_endpoint = other._ps_endpoint
self._distributed_lookup_table = other._distributed_lookup_table
def _copy_data_info_from(self, other, pruned_origin_block_id_map=None):
"""
Copy the information of data variables from other program.
Notes: This is a very low level API. Users should not invoke it
directly.
Args:
other(Program): Other program
pruned_origin_block_id_map(dict{int:int}): A dict which maps the block id in program
self to the block id in program other. For example, {0:0, 1:1, 2:3} means block 0 in self is
cloned from block 0 in other, etc. Default is None, which means default mapped,
{0:0, 1:1,..., n:n}.
Returns:
None
"""
if not isinstance(other, Program):
raise TypeError(
"Function Program._copy_param_info_from() needs to pass in a source Program, but received %s"
% type(other))
if not pruned_origin_block_id_map:
pruned_origin_block_id_map = {
i: i
for i in six.moves.range(self.desc.num_blocks())
}
# NOTE(zhiqiu): All vars in cloned program exist in original program.
# The reverse is not true, due to backward pruning.
for i, block in enumerate(self.blocks):
other_block = other.blocks[pruned_origin_block_id_map[i]]
for var in list(block.vars.values()):
other_var = other_block.var(var.name)
if other_var.is_data:
var.is_data = True
if other_var.desc.need_check_feed():
var.desc.set_need_check_feed(True)
if other_var.stop_gradient:
var.stop_gradient = True
def list_vars(self):
"""
Get all Tensors from this Program. A iterable object is returned.
Returns:
iterable Tensors: The Generator will yield every Tensor in this program.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
prog = static.default_main_program()
img = static.data(name='img', shape=[None, 1,28,28], dtype='float32')
label = static.data(name='label', shape=[None,1], dtype='int64')
for var in prog.list_vars():
print(var)
# var img : paddle.VarType.LOD_TENSOR.shape(-1, 1, 28, 28).astype(VarType.FP32)
# var label : paddle.VarType.LOD_TENSOR.shape(-1, 1).astype(VarType.INT64)
"""
for each_block in self.blocks:
for each_var in list(each_block.vars.values()):
yield each_var
def all_parameters(self):
"""
Get all :ref:`api_guide_parameter_en` from this Program. A list object is returned.
Returns:
list[ :ref:`api_guide_parameter_en` ]: The list contians all parameters in this program.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
program = static.default_main_program()
data = static.data(name='x', shape=[None, 13], dtype='float32')
hidden = static.nn.fc(x=data, size=10)
loss = paddle.mean(hidden)
paddle.optimizer.SGD(learning_rate=0.01).minimize(loss)
for param in program.all_parameters():
print(param)
# Here will print all parameters in current program, in this example,
# the result is like:
#
# persist trainable param fc_0.w_0 : paddle.VarType.LOD_TENSOR.shape(13, 10).astype(VarType.FP32)
# persist trainable param fc_0.b_0 : paddle.VarType.LOD_TENSOR.shape(10,).astype(VarType.FP32)
#
# Here print(param) will print out all the properties of a parameter,
# including name, type and persistable, you can access to specific
# property of a parameter, such as param.name, param.type
"""
parameters = []
for each_block in self.blocks:
parameters.extend(each_block.all_parameters())
return parameters
def state_dict(self, mode='all', scope=None):
"""
Get parameters and persistable buffers of program as a dict. The key is the name of the parameter or the name of the buffer.
The value is the tensor of this variable in the given scope.
.. note::
This function MUST called after run start_up_program
Args:
mode(str, optional): Source of the obtained parameters and buffers.
'opt' : The return value only contains the variable in the optimizer.
'param' : The return value only contains the variable in the network, not the variable in the optimizer.
'all' : The return value contains the variable in the network and optimizer.
Default: 'all'
scope(Scope, optional) : If scope is None, state_dict will be set to global scope
obtained through 'paddle.static.global_scope()'. Otherwise, value will be set to scope.
Default: None
Retruns:
dict: a dict contains the parameters and persistable buffers.
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
x = static.data(name="x", shape=[10, 10], dtype='float32')
y = static.nn.fc(x, 10)
z = static.nn.fc(y, 10)
place = paddle.CPUPlace()
exe = static.Executor(place)
exe.run(static.default_startup_program())
prog = static.default_main_program()
path = "./temp/model.pdparams"
paddle.save(prog.state_dict(), path)
"""
# The 'framework' is a low-level module, and 'executor'
# can not be imported at the begainning of this file.
# Therefore, the above two modules are dynamically imported.
from .executor import global_scope
if scope is not None and not isinstance(scope, core._Scope):
raise TypeError(
"`scope` should be None or `paddle.static.Scope'` type, but received {}.".
format(type(scope)))
if scope is None:
scope = global_scope()
if not isinstance(mode, str):
raise TypeError("Type of `mode` should be string, but received {}.".
format(type(mode)))
def is_parameter(var):
return isinstance(var, Parameter)
def is_persistable(var):
if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
var.desc.type() == core.VarDesc.VarType.FETCH_LIST or \
var.desc.type() == core.VarDesc.VarType.READER:
return False
return var.persistable
def is_belong_to_optimizer(var):
if not (isinstance(var, Parameter) or var.desc.need_check_feed()):
return is_persistable(var)
return False
def condition(var):
if mode == 'param':
return is_parameter(var)
elif mode == 'opt':
return is_belong_to_optimizer(var)
elif mode == 'all':
return is_parameter(var) or is_belong_to_optimizer(var)
else:
raise ValueError(
"`mode` string should be 'param', 'opt' or 'all', but received {}.".
format(mode))
var_list = filter(condition, self.list_vars())
state_dict = dict()
for var in var_list:
var_temp = scope.find_var(var.name)
if var_temp is None:
raise ValueError(
"Can not find Variable '{}' in the scope. Make sure it is initialized".
format(var.name))
state_dict[var.name] = var_temp.get_tensor()
return state_dict
def set_state_dict(self, state_dict, scope=None):
"""
Set parameters and persistable buffers in state_dict to program.
An exception will throw if shape or dtype of the parameters is not match.
.. note::
This function MUST called after run start_up_program
Args:
state_dict(dict): the dict store parameters and persistable buffers.
The key is the name of the parameter or the name of the buffer.
The value is the tensor of this variable in the given scope.
scope(Scope, optional) : If scope is None, state_dict will be set to global scope
obtained through 'paddle.static.global_scope()'. Otherwise, value will be set to scope.
Default: None
Returns:
None
Examples:
.. code-block:: python
import paddle
import paddle.static as static
paddle.enable_static()
x = static.data(name="x", shape=[10, 10], dtype='float32')
y = static.nn.fc(x, 10)
z = static.nn.fc(y, 10)
place = paddle.CPUPlace()
exe = static.Executor(place)
exe.run(static.default_startup_program())
prog = static.default_main_program()
path = "./temp/model.pdparams"
paddle.save(prog.state_dict(), path)
state_dict_load = paddle.load(path)
prog.set_state_dict(state_dict_load)
"""
if not isinstance(state_dict, dict):
raise TypeError(
"Type of `state_dict` should be dict, but received {}.".format(
type(state_dict)))
vars_dict = {var.name: var for var in self.list_vars()}
condition = True if 'StructuredToParameterName@@' in state_dict else False
for name, value in state_dict.items():
if condition:
if name == "StructuredToParameterName@@":
continue
if name in state_dict['StructuredToParameterName@@']:
name = state_dict['StructuredToParameterName@@'][name]
if name in vars_dict:
try:
vars_dict[name].set_value(value, scope)
except ValueError as err:
warnings.warn(
("Skip loading for '{}'. ".format(name) + str(err)))
except TypeError as err:
warnings.warn(
("Skip loading for '{}'. ".format(name) + str(err)))
else:
warnings.warn((
"Skip loading for '{0}'. Because '{0}' not in the program.".
format(name)))
@six.add_metaclass(ParameterMetaClass)
class Parameter(Variable):
"""
Parameter is derived from Variable. A parameter is a persistable
Variable, and will be updated by optimizers after each iteration.
The training of a neural network is essentially the updating of
its parameters.
Relative to a general Variable, a Parameter has several its own
member variables:
Args:
trainable(bool): True if the parameter need to be updated after
iterations.
optimize_attr(map): Parameter attributes related with optimizing.
Currently, it only contains 'learning_rate'.
Default: {'learning_rate': 1.0}
regularizer(WeightDecayRegularizer): The Regularizer which will
be applied on the parameter. Default: None
do_model_average(bool): True if the model average strategy will
be applied on this parameter.
need_clip (bool): Whether the parameter gradient need to be cliped
in optimizer. Default is True.
"""
def __init__(self,
block,
shape,
dtype,
type=core.VarDesc.VarType.LOD_TENSOR,
**kwargs):
if shape is None:
raise ValueError("The shape of Parameter should not be None")
if dtype is None:
raise ValueError("The dtype of Parameter should not be None")
if len(shape) == 0:
raise ValueError(
"The dimensions of shape for Parameter must be greater than 0")
for each in shape:
if each < 0:
raise ValueError(
"Each dimension of shape for Parameter must be greater than 0, but received %s"
% list(shape))
Variable.__init__(
self,
block,
persistable=True,
shape=shape,
dtype=dtype,
type=type,
**kwargs)
self.trainable = kwargs.get('trainable', True)
self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})
self.regularizer = kwargs.get('regularizer', None)
self.do_model_average = kwargs.get('do_model_average', None)
self.need_clip = kwargs.get('need_clip', True)
self.is_distributed = False
def __str__(self):
return self._to_readable_code()
def to_string(self, throw_on_error, with_details=False):
"""
To debug string.
Args:
throw_on_error(bool): raise exception when self is not initialized
when throw_on_error is True
with_details(bool): more details about variables and parameters
(e.g. trainable, optimize_attr, ...) will be printed when with_details is True
Returns(str): The debug string.
Examples:
.. code-block:: python
import paddle.fluid as fluid
prog = fluid.default_main_program()
rlt = fluid.layers.data("fake_data", shape=[1,1], dtype='float32')
debug_str = prog.to_string(throw_on_error=True, with_details=False)
print(debug_str)
"""
assert isinstance(throw_on_error, bool) and isinstance(with_details,
bool)
if with_details:
res_str = Variable.to_string(self, throw_on_error, True)
additional_attr = ("trainable", "optimize_attr", "regularizer",
"do_model_average", "need_clip")
for attr_name in additional_attr:
res_str += "%s: %s\n" % (attr_name,
cpt.to_text(getattr(self, attr_name)))
else:
res_str = Variable.to_string(self, throw_on_error, False)
return res_str
__repr__ = __str__
class ParamBase(core.VarBase):
"""
ParamBase is derived from Tensor( Which is the concept in Dygraph Mode).
A ParamBase is a persistable Tensor, and will be updated by optimizers
after each iteration.
The training of a neural network is essentially the updating of
its ParamBase.
Relative to a general Tensor, a ParamBase has several its own
member variables:
Args:
trainable(bool): True if the ParamBase need to be updated after
iterations.
optimize_attr(map): ParamBase attributes related with optimizing.
Currently, it only contains 'learning_rate'.
Default: {'learning_rate': 1.0}
regularizer(WeightDecayRegularizer): The Regularizer which will
be applied on the ParamBase. Default: None
do_model_average(bool): True if the model average strategy will
be applied on this ParamBase.
need_clip (bool): Whether the parameter gradient need to be cliped
in optimizer. Default is True.
"""
@dygraph_only
def __init__(self, shape, dtype, **kwargs):
if shape is None:
raise ValueError("The shape of Parameter should not be None")
if dtype is None:
raise ValueError("The dtype of Parameter should not be None")
if len(shape) == 0:
raise ValueError(
"The dimensions of shape for Parameter must be greater than 0")
for each in shape:
if each < 0:
raise ValueError(
"Each dimension of shape for Parameter must be greater than 0, but received %s"
% list(shape))
if dtype is not None:
if not isinstance(dtype, core.VarDesc.VarType):
dtype = convert_np_dtype_to_dtype_(dtype)
name = kwargs.get('name', unique_name.generate('_param_base'))
super(ParamBase, self).__init__(dtype
if dtype else core.VarDesc.VarType.FP32,
list(shape) if shape else [], name,
core.VarDesc.VarType.LOD_TENSOR, True)
trainable = kwargs.get('trainable', True)
self.stop_gradient = not trainable
self.optimize_attr = kwargs.get('optimize_attr', {'learning_rate': 1.0})
self.regularizer = kwargs.get('regularizer', None)
self.do_model_average = kwargs.get('do_model_average', None)
self.need_clip = kwargs.get('need_clip', True)
self.is_distributed = False
# self.block = default_main_program().global_block()
@property
def trainable(self):
return not self.stop_gradient
@trainable.setter
def trainable(self, trainable):
if isinstance(trainable, bool):
self.stop_gradient = not trainable
else:
raise ValueError(
"The type of trainable MUST be bool, but the type is ",
type(trainable))
def __str__(self):
"""
Convert a ParamBase object to a readable string.
Returns(str): A readable string.
Examples:
.. code-block:: python
import paddle
linear = paddle.nn.Linear(3, 3)
print(linear.weight)
# Parameter containing:
# Tensor(shape=[3, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=False,
# [[ 0.48948765, 0.05829060, -0.25524026],
# [-0.70368278, 0.52986908, -0.68742192],
# [-0.54217887, 0.48439729, 0.34082305]])
"""
return "Parameter containing:\n{tensor}".format(
tensor=super(ParamBase, self).__str__())
def __deepcopy__(self, memo):
"""
Deep copy parameter, it will always performs Tensor copy.
Examples:
.. code-block:: python
import paddle
import copy
linear = paddle.nn.Linear(1, 3)
linear_copy = copy.deepcopy(linear)
print(linear.weight)
# Parameter containing:
# Tensor(shape=[1, 3], dtype=float32, place=CPUPlace, stop_gradient=False,
# [[-0.30929261, -0.90929240, -1.07851017]])
print(linear_copy.weight)
# Parameter containing:
# Tensor(shape=[1, 3], dtype=float32, place=CPUPlace, stop_gradient=False,
# [[-0.30929261, -0.90929240, -1.07851017]])
"""
state = copy.deepcopy(self.__dict__, memo)
state["name"] = self.name + unique_name.generate("_deepcopy")
new_param = ParamBase(self.shape, self.dtype, **state)
memo[id(self)] = new_param
new_param.copy_(self, True)
return new_param
__repr__ = __str__
# program is a global instance.
_main_program_ = Program()
_startup_program_ = Program()
def default_startup_program():
"""
Get default/global startup program.
The :code:`paddle.nn` function will append the initialization operators into startup program.
The :code:`startup_program` will initialize the parameters by the OPs.
This method will return the default or the current startup program. Users can use
:ref:`api_paddle_fluid_framework_program_guard` to switch :ref:`api_paddle_fluid_framework_Program` .
Returns:
Program: current default startup program.
Returns type:
Examples:
.. code-block:: python
import paddle
paddle.enable_static()
x = paddle.static.data(name="x", shape=[-1, 784], dtype='float32')
out = paddle.static.nn.fc(name="fc", x=x, size=10, activation="relu")
print("main program is: {}".format(paddle.static.default_main_program()))
print("start up program is: {}".format(paddle.static.default_startup_program()))
"""
return _startup_program_
def default_main_program():
"""
This API can be used to get ``default main program`` which store the
descriptions of Ops and tensors.
For example ``z = paddle.add(x, y)`` will create a new ``add``
Op and a new ``z`` tensor, and they will be recorded in ``default main program`` .
The ``default main program`` is the default value for ``Program`` parameter in
a lot of APIs. For example, the :code:`Executor.run()` will execute the
:code:`default_main_program` when the program is not specified.
If you want to switch the ``default main program``, you can use :ref:`api_paddle_fluid_framework_program_guard` .
Returns:
Program: A ``Program`` which holding the descriptions of OPs and tensors in the network.
Examples:
.. code-block:: python
import paddle
paddle.enable_static()
# Sample Network:
x = paddle.static.data(name='x', shape=[100, 100], dtype='float32')
y = paddle.static.data(name='x', shape=[100, 100], dtype='float32')
out = paddle.add(x, y)
#print the number of blocks in the program, 1 in this case
print(paddle.static.default_main_program().num_blocks) # 1
#print the default_main_program
print(paddle.static.default_main_program())
"""
return _main_program_
def switch_main_program(program):
"""
Switch the main program to a new program.
Args:
program(Program): The new main program
Returns:
Program: The previous main program
"""
global _main_program_
prev_program = _main_program_
_main_program_ = program
return prev_program
def switch_startup_program(program):
"""
Switch the startup program to a new program
Args:
program(Program): The new startup program
Returns:
Program: The previous startup program
"""
global _startup_program_
prev_program = _startup_program_
_startup_program_ = program
return prev_program
@signature_safe_contextmanager
def program_guard(main_program, startup_program=None):
"""
:api_attr: Static Graph
Change the global main program and startup program with ``with`` statement.
Layer functions in the Python ``with`` block will append operators and
Tensors to the new main programs.
Args:
main_program(Program): New main program inside ``with`` statement.
startup_program(Program, optional): New startup program inside ``with``
statement. :code:`None` means not changing startup program,
default_startup_program is still used.
Default: None.
Examples:
.. code-block:: python
import paddle
paddle.enable_static()
main_program = paddle.static.Program()
startup_program = paddle.static.Program()
with paddle.static.program_guard(main_program, startup_program):
data = paddle.static.data(name='image', shape=[None, 784, 784], dtype='float32')
hidden = paddle.static.nn.fc(x=data, size=10, activation='relu')
Notes: The temporary :code:`Program` can be used if the user does not need
to construct either of startup program or main program.
Examples:
.. code-block:: python
import paddle
paddle.enable_static()
main_program = paddle.static.Program()
# does not care about startup program. Just pass a temporary value.
with paddle.static.program_guard(main_program, paddle.static.Program()):
data = paddle.static.data(name='image', shape=[None, 784, 784], dtype='float32')
"""
from .data_feeder import check_type
check_type(main_program, 'main_program', Program,
'paddle.static.program_guard')
main_program = switch_main_program(main_program)
if startup_program is not None:
check_type(startup_program, 'startup_program', Program,
'paddle.static.program_guard')
startup_program = switch_startup_program(startup_program)
try:
yield
finally:
switch_main_program(main_program)
if startup_program is not None:
switch_startup_program(startup_program)
def _get_var(name, program=None):
"""
Get a variable by name from the global block of a program.
Args:
name(str): name of the variable
program(Program|None): program object.
If None, default_global_program() will be used.
Returns:
Variable
"""
if program is None:
program = default_main_program()
assert isinstance(name, str)
assert isinstance(program, Program)
return program.global_block().var(name)
@signature_safe_contextmanager
def _dygraph_guard(tracer):
global _dygraph_tracer_
tmp_tracer = _dygraph_tracer_
_dygraph_tracer_ = tracer
core._switch_tracer(tracer)
try:
yield
finally:
core._switch_tracer(tmp_tracer)
_dygraph_tracer_ = tmp_tracer
@signature_safe_contextmanager
def _dygraph_place_guard(place):
global _global_expected_place_
tmp_place = _global_expected_place_
_global_expected_place_ = place
_set_dygraph_tracer_expected_place(place)
try:
yield
finally:
_global_expected_place_ = tmp_place
_set_dygraph_tracer_expected_place(tmp_place)
def switch_device(device):
global _current_device
pre_device = _current_device
_current_device = device
return pre_device
@signature_safe_contextmanager
def device_guard(device=None):
"""
**Notes**:
**The API only supports static mode.**
A context manager that specifies the device on which the OP will be placed.
Args:
device(str|None): Specify the device to use in the context. It should be 'cpu' or 'gpu',
When it is set to 'cpu' or 'gpu', all OPs created in the context will be
placed on CPUPlace or CUDAPlace. When 'gpu' is set and the program runs on
single-card, the device index will be the same as the device on which the
executor runs. Default: None, OPs in this context will be automatically
assigned devices.
Examples:
.. code-block:: python
import paddle
paddle.enable_static()
support_gpu = paddle.is_compiled_with_cuda()
place = paddle.CPUPlace()
if support_gpu:
place = paddle.CUDAPlace(0)
# if GPU is supported, the three OPs below will be automatically assigned to CUDAPlace(0)
data1 = paddle.full(shape=[1, 3, 8, 8], fill_value=0.5, dtype='float32')
data2 = paddle.full(shape=[1, 3, 64], fill_value=0.5, dtype='float32')
shape = paddle.shape(data2)
with paddle.static.device_guard("cpu"):
# Ops created here will be placed on CPUPlace
shape = paddle.slice(shape, axes=[0], starts=[0], ends=[4])
with paddle.static.device_guard('gpu'):
# if GPU is supported, OPs created here will be placed on CUDAPlace(0), otherwise on CPUPlace
out = paddle.reshape(data1, shape=shape)
exe = paddle.static.Executor(place)
exe.run(paddle.static.default_startup_program())
result = exe.run(fetch_list=[out])
"""
index = None
if device and ':' in device:
device, index = device.split(':')
if device == 'cpu':
raise ValueError("Should not set device id for cpu.")
if device not in ['cpu', 'gpu', '', None]:
raise ValueError(
"The Attr(device) should be 'cpu' or 'gpu', and it can also be empty string or None "
"when there is no need to specify device. But received %s" % device)
if index:
device = ":".join([device, index])
pre_device = switch_device(device)
try:
yield
finally:
switch_device(pre_device)
def set_flags(flags):
"""
This function sets the GFlags value in Paddle.
Args:
flags (dict): A dict contains flags and its value.
Examples:
.. code-block:: python
import paddle.fluid as fluid
fluid.set_flags({'FLAGS_eager_delete_tensor_gb': 1.0})
"""
if not isinstance(flags, dict):
raise TypeError('flags in set_flags should be a dict')
for key, value in flags.items():
if core.globals().is_public(key):
core.globals()[key] = value
else:
raise ValueError(
"Flag %s cannot set its value through this function." % (key))
def get_flags(flags):
"""
This function gets the GFlags value in Paddle.
Args:
flags(list|tuple|str): A list/tuple of string or a string which is the flag's name.
Returns:
flag's value in Paddle.
Examples:
.. code-block:: python
import paddle.fluid as fluid
flags = ['FLAGS_eager_delete_tensor_gb', 'FLAGS_check_nan_inf']
res = fluid.get_flags(flags)
print(res)
# {'FLAGS_eager_delete_tensor_gb': 0.0, 'FLAGS_check_nan_inf': False}
"""
flags_value = {}
if isinstance(flags, (list, tuple)):
for key in flags:
if (core.globals().is_public(key)):
value = core.globals()[key]
temp = {key: value}
flags_value.update(temp)
else:
raise ValueError(
'Flag %s cannot get its value through this function.' %
(key))
elif isinstance(flags, str):
if (core.globals().is_public(flags)):
value = core.globals()[flags]
temp = {flags: value}
flags_value.update(temp)
else:
raise ValueError(
'Flag %s cannot get its value through this function.' % (flags))
else:
raise TypeError('Flags in get_flags should be a list, tuple or string.')
return flags_value
def _get_paddle_place(place):
"convert the string to paddle Place"
if place is None:
return place
if isinstance(place, (core.Place, core.XPUPlace, core.CPUPlace,
core.CUDAPinnedPlace, core.CUDAPlace, core.NPUPlace)):
return place
if not isinstance(place, str):
raise ValueError(
"place only support string which is 'Place' and so on.")
place = place.lower()
if (place == "cpu"):
return core.CPUPlace()
if (place == "device"):
return core.Place()
# GPU
avaliable_gpu_place = re.match(r'gpu:\d+', place)
if place == "gpu_pinned" or place == "gpu" or avaliable_gpu_place:
if not core.is_compiled_with_cuda():
raise ValueError(
"The device should not be {}, since PaddlePaddle is " \
"not compiled with CUDA".format(avaliable_gpu_place))
if place == "gpu_pinned":
return core.CUDAPinnedPlace()
elif place == "gpu":
return core.CUDAPlace(0)
else:
place_info_list = place.split(':', 1)
device_id = place_info_list[1]
device_id = int(device_id)
return core.CUDAPlace(device_id)
# XPU
avaliable_xpu_place = re.match(r'xpu:\d+', place)
if avaliable_xpu_place:
if not core.is_compiled_with_xpu():
raise ValueError(
"The device should not be {}, since PaddlePaddle is " \
"not compiled with XPU".format(avaliable_xpu_place))
place_info_list = place.split(':', 1)
device_id = place_info_list[1]
device_id = int(device_id)
return core.XPUPlace(device_id)
# NPU
avaliable_npu_place = re.match(r'npu:\d+', place)
if avaliable_npu_place:
if not core.is_compiled_with_npu():
raise ValueError(
"The device should not be {}, since PaddlePaddle is " \
"not compiled with NPU".format(avaliable_npu_place))
place_info_list = place.split(':', 1)
device_id = place_info_list[1]
device_id = int(device_id)
return core.NPUPlace(device_id)
raise ValueError(
"Paddle supports CPUPlace, CUDAPlace,CUDAPinnedPlace, XPUPlace and NPUPlace, but received {}.".
format(place))
def _get_paddle_place_list(places):
if not isinstance(places, (list, tuple)):
raise TypeError("places must to be List or Tuple")
ret = []
for p in places:
p = _get_paddle_place(p)
ret.append(p)
return ret
|
Xeleste/namebench
|
refs/heads/master
|
libnamebench/version.py
|
174
|
VERSION = '1.5-DEVEL'
|
mateon1/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/wptserve/tests/functional/test_request.py
|
23
|
import unittest
import pytest
wptserve = pytest.importorskip("wptserve")
from .base import TestUsingServer
class TestInputFile(TestUsingServer):
def test_seek(self):
@wptserve.handlers.handler
def handler(request, response):
rv = []
f = request.raw_input
f.seek(5)
rv.append(f.read(2))
rv.append(f.tell())
f.seek(0)
rv.append(f.readline())
rv.append(f.tell())
rv.append(f.read(-1))
rv.append(f.tell())
f.seek(0)
rv.append(f.read())
f.seek(0)
rv.extend(f.readlines())
return " ".join(str(item) for item in rv)
route = ("POST", "/test/test_seek", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="POST", body="12345ab\ncdef")
self.assertEqual(200, resp.getcode())
self.assertEqual(["ab", "7", "12345ab\n", "8", "cdef", "12",
"12345ab\ncdef", "12345ab\n", "cdef"],
resp.read().split(" "))
def test_iter(self):
@wptserve.handlers.handler
def handler(request, response):
f = request.raw_input
return " ".join(line for line in f)
route = ("POST", "/test/test_iter", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="POST", body="12345\nabcdef\r\nzyxwv")
self.assertEqual(200, resp.getcode())
self.assertEqual(["12345\n", "abcdef\r\n", "zyxwv"], resp.read().split(" "))
class TestRequest(TestUsingServer):
def test_body(self):
@wptserve.handlers.handler
def handler(request, response):
request.raw_input.seek(5)
return request.body
route = ("POST", "/test/test_body", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="POST", body="12345ab\ncdef")
self.assertEqual("12345ab\ncdef", resp.read())
def test_route_match(self):
@wptserve.handlers.handler
def handler(request, response):
return request.route_match["match"] + " " + request.route_match["*"]
route = ("GET", "/test/{match}_*", handler)
self.server.router.register(*route)
resp = self.request("/test/some_route")
self.assertEqual("some route", resp.read())
class TestAuth(TestUsingServer):
def test_auth(self):
@wptserve.handlers.handler
def handler(request, response):
return " ".join((request.auth.username, request.auth.password))
route = ("GET", "/test/test_auth", handler)
self.server.router.register(*route)
resp = self.request(route[1], auth=("test", "PASS"))
self.assertEqual(200, resp.getcode())
self.assertEqual(["test", "PASS"], resp.read().split(" "))
if __name__ == '__main__':
unittest.main()
|
gauribhoite/personfinder
|
refs/heads/master
|
env/google_appengine/lib/django-1.5/django/contrib/localflavor/ar/forms.py
|
96
|
# -*- coding: utf-8 -*-
"""
AR-specific Form helpers.
"""
from __future__ import absolute_import, unicode_literals
from django.contrib.localflavor.ar.ar_provinces import PROVINCE_CHOICES
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import RegexField, CharField, Select
from django.utils.translation import ugettext_lazy as _
class ARProvinceSelect(Select):
"""
A Select widget that uses a list of Argentinean provinces/autonomous cities
as its choices.
"""
def __init__(self, attrs=None):
super(ARProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class ARPostalCodeField(RegexField):
"""
A field that accepts a 'classic' NNNN Postal Code or a CPA.
See:
http://www.correoargentino.com.ar/cpa/que_es
http://www.correoargentino.com.ar/cpa/como_escribirlo
"""
default_error_messages = {
'invalid': _("Enter a postal code in the format NNNN or ANNNNAAA."),
}
def __init__(self, max_length=8, min_length=4, *args, **kwargs):
super(ARPostalCodeField, self).__init__(r'^\d{4}$|^[A-HJ-NP-Za-hj-np-z]\d{4}\D{3}$',
max_length, min_length, *args, **kwargs)
def clean(self, value):
value = super(ARPostalCodeField, self).clean(value)
if value in EMPTY_VALUES:
return ''
if len(value) not in (4, 8):
raise ValidationError(self.error_messages['invalid'])
if len(value) == 8:
return '%s%s%s' % (value[0].upper(), value[1:5], value[5:].upper())
return value
class ARDNIField(CharField):
"""
A field that validates 'Documento Nacional de Identidad' (DNI) numbers.
"""
default_error_messages = {
'invalid': _("This field requires only numbers."),
'max_digits': _("This field requires 7 or 8 digits."),
}
def __init__(self, max_length=10, min_length=7, *args, **kwargs):
super(ARDNIField, self).__init__(max_length, min_length, *args,
**kwargs)
def clean(self, value):
"""
Value can be a string either in the [X]X.XXX.XXX or [X]XXXXXXX formats.
"""
value = super(ARDNIField, self).clean(value)
if value in EMPTY_VALUES:
return ''
if not value.isdigit():
value = value.replace('.', '')
if not value.isdigit():
raise ValidationError(self.error_messages['invalid'])
if len(value) not in (7, 8):
raise ValidationError(self.error_messages['max_digits'])
return value
class ARCUITField(RegexField):
"""
This field validates a CUIT (Código Único de Identificación Tributaria). A
CUIT is of the form XX-XXXXXXXX-V. The last digit is a check digit.
"""
default_error_messages = {
'invalid': _('Enter a valid CUIT in XX-XXXXXXXX-X or XXXXXXXXXXXX format.'),
'checksum': _("Invalid CUIT."),
'legal_type': _('Invalid legal type. Type must be 27, 20, 23 or 30.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(ARCUITField, self).__init__(r'^\d{2}-?\d{8}-?\d$',
max_length, min_length, *args, **kwargs)
def clean(self, value):
"""
Value can be either a string in the format XX-XXXXXXXX-X or an
11-digit number.
"""
value = super(ARCUITField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value, cd = self._canon(value)
if not value[:2] in ['27', '20', '23', '30']:
raise ValidationError(self.error_messages['legal_type'])
if self._calc_cd(value) != cd:
raise ValidationError(self.error_messages['checksum'])
return self._format(value, cd)
def _canon(self, cuit):
cuit = cuit.replace('-', '')
return cuit[:-1], cuit[-1]
def _calc_cd(self, cuit):
# Calculation code based on:
# http://es.wikipedia.org/wiki/C%C3%B3digo_%C3%9Anico_de_Identificaci%C3%B3n_Tributaria
mults = (5, 4, 3, 2, 7, 6, 5, 4, 3, 2)
tmp = sum([m * int(cuit[idx]) for idx, m in enumerate(mults)])
result = 11 - (tmp % 11)
if result == 11:
result = 0
elif result == 10:
result = 9
return str(result)
def _format(self, cuit, check_digit=None):
if check_digit is None:
check_digit = cuit[-1]
cuit = cuit[:-1]
return '%s-%s-%s' % (cuit[:2], cuit[2:], check_digit)
|
justinvh/gitpaste
|
refs/heads/master
|
saic/paste/management/commands/purge_expired_pastes.py
|
2
|
from django.core.management.base import BaseCommand, CommandError
from saic.paste.models import Set
from datetime import datetime
class Command(BaseCommand):
def handle(self, *args, **kwargs):
expired_sets = Set.objects.filter(expires__lte=datetime.now())
num_purged = expired_sets.count()
expired_sets.delete()
self.stdout.write(
str.format('{0} expired sets were purged.\n', num_purged)
)
|
pophaax/sailingrobot
|
refs/heads/master
|
NavigationSystem/Tests/cxxtest/python/cxxtest/cxx_parser.py
|
55
|
#-------------------------------------------------------------------------
# CxxTest: A lightweight C++ unit testing library.
# Copyright (c) 2008 Sandia Corporation.
# This software is distributed under the LGPL License v3
# For more information, see the COPYING file in the top CxxTest directory.
# Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
# the U.S. Government retains certain rights in this software.
#-------------------------------------------------------------------------
# vim: fileencoding=utf-8
#
# This is a PLY parser for the entire ANSI C++ grammar. This grammar was
# adapted from the FOG grammar developed by E. D. Willink. See
#
# http://www.computing.surrey.ac.uk/research/dsrg/fog/
#
# for further details.
#
# The goal of this grammar is to extract information about class, function and
# class method declarations, along with their associated scope. Thus, this
# grammar can be used to analyze classes in an inheritance heirarchy, and then
# enumerate the methods in a derived class.
#
# This grammar parses blocks of <>, (), [] and {} in a generic manner. Thus,
# There are several capabilities that this grammar does not support:
#
# 1. Ambiguous template specification. This grammar cannot parse template
# specifications that do not have paired <>'s in their declaration. In
# particular, ambiguous declarations like
#
# foo<A, c<3 >();
#
# cannot be correctly parsed.
#
# 2. Template class specialization. Although the goal of this grammar is to
# extract class information, specialization of templated classes is
# not supported. When a template class definition is parsed, it's
# declaration is archived without information about the template
# parameters. Class specializations will be stored separately, and
# thus they can be processed after the fact. However, this grammar
# does not attempt to correctly process properties of class inheritence
# when template class specialization is employed.
#
#
# TODO: document usage of this file
#
from __future__ import division
import os
import ply.lex as lex
import ply.yacc as yacc
import re
try:
from collections import OrderedDict
except ImportError: #pragma: no cover
from ordereddict import OrderedDict
# global data
lexer = None
scope_lineno = 0
identifier_lineno = {}
_parse_info=None
_parsedata=None
noExceptionLogic = True
def ply_init(data):
global _parsedata
_parsedata=data
class Scope(object):
def __init__(self,name,abs_name,scope_t,base_classes,lineno):
self.function=[]
self.name=name
self.scope_t=scope_t
self.sub_scopes=[]
self.base_classes=base_classes
self.abs_name=abs_name
self.lineno=lineno
def insert(self,scope):
self.sub_scopes.append(scope)
class CppInfo(object):
def __init__(self, filter=None):
self.verbose=0
if filter is None:
self.filter=re.compile("[Tt][Ee][Ss][Tt]|createSuite|destroySuite")
else:
self.filter=filter
self.scopes=[""]
self.index=OrderedDict()
self.index[""]=Scope("","::","namespace",[],1)
self.function=[]
def push_scope(self,ns,scope_t,base_classes=[]):
name = self.scopes[-1]+"::"+ns
if self.verbose>=2:
print "-- Starting "+scope_t+" "+name
self.scopes.append(name)
self.index[name] = Scope(ns,name,scope_t,base_classes,scope_lineno-1)
def pop_scope(self):
scope = self.scopes.pop()
if self.verbose>=2:
print "-- Stopping "+scope
return scope
def add_function(self, fn):
fn = str(fn)
if self.filter.search(fn):
self.index[self.scopes[-1]].function.append((fn, identifier_lineno.get(fn,lexer.lineno-1)))
tmp = self.scopes[-1]+"::"+fn
if self.verbose==2:
print "-- Function declaration "+fn+" "+tmp
elif self.verbose==1:
print "-- Function declaration "+tmp
def get_functions(self,name,quiet=False):
if name == "::":
name = ""
scope = self.index[name]
fns=scope.function
for key in scope.base_classes:
cname = self.find_class(key,scope)
if cname is None:
if not quiet:
print "Defined classes: ",list(self.index.keys())
print "WARNING: Unknown class "+key
else:
fns += self.get_functions(cname,quiet)
return fns
def find_class(self,name,scope):
if ':' in name:
if name in self.index:
return name
else:
return None
tmp = scope.abs_name.split(':')
name1 = ":".join(tmp[:-1] + [name])
if name1 in self.index:
return name1
name2 = "::"+name
if name2 in self.index:
return name2
return None
def __repr__(self):
return str(self)
def is_baseclass(self,cls,base):
'''Returns true if base is a base-class of cls'''
if cls in self.index:
bases = self.index[cls]
elif "::"+cls in self.index:
bases = self.index["::"+cls]
else:
return False
#raise IOError, "Unknown class "+cls
if base in bases.base_classes:
return True
for name in bases.base_classes:
if self.is_baseclass(name,base):
return True
return False
def __str__(self):
ans=""
keys = list(self.index.keys())
keys.sort()
for key in keys:
scope = self.index[key]
ans += scope.scope_t+" "+scope.abs_name+"\n"
if scope.scope_t == "class":
ans += " Base Classes: "+str(scope.base_classes)+"\n"
for fn in self.get_functions(scope.abs_name):
ans += " "+fn+"\n"
else:
for fn in scope.function:
ans += " "+fn+"\n"
return ans
def flatten(x):
"""Flatten nested list"""
try:
strtypes = basestring
except: # for python3 etc
strtypes = (str, bytes)
result = []
for el in x:
if hasattr(el, "__iter__") and not isinstance(el, strtypes):
result.extend(flatten(el))
else:
result.append(el)
return result
#
# The lexer (and/or a preprocessor) is expected to identify the following
#
# Punctuation:
#
#
literals = "+-*/%^&|~!<>=:()?.\'\"\\@$;,"
#
reserved = {
'private' : 'PRIVATE',
'protected' : 'PROTECTED',
'public' : 'PUBLIC',
'bool' : 'BOOL',
'char' : 'CHAR',
'double' : 'DOUBLE',
'float' : 'FLOAT',
'int' : 'INT',
'long' : 'LONG',
'short' : 'SHORT',
'signed' : 'SIGNED',
'unsigned' : 'UNSIGNED',
'void' : 'VOID',
'wchar_t' : 'WCHAR_T',
'class' : 'CLASS',
'enum' : 'ENUM',
'namespace' : 'NAMESPACE',
'struct' : 'STRUCT',
'typename' : 'TYPENAME',
'union' : 'UNION',
'const' : 'CONST',
'volatile' : 'VOLATILE',
'auto' : 'AUTO',
'explicit' : 'EXPLICIT',
'export' : 'EXPORT',
'extern' : 'EXTERN',
'__extension__' : 'EXTENSION',
'friend' : 'FRIEND',
'inline' : 'INLINE',
'mutable' : 'MUTABLE',
'register' : 'REGISTER',
'static' : 'STATIC',
'template' : 'TEMPLATE',
'typedef' : 'TYPEDEF',
'using' : 'USING',
'virtual' : 'VIRTUAL',
'asm' : 'ASM',
'break' : 'BREAK',
'case' : 'CASE',
'catch' : 'CATCH',
'const_cast' : 'CONST_CAST',
'continue' : 'CONTINUE',
'default' : 'DEFAULT',
'delete' : 'DELETE',
'do' : 'DO',
'dynamic_cast' : 'DYNAMIC_CAST',
'else' : 'ELSE',
'false' : 'FALSE',
'for' : 'FOR',
'goto' : 'GOTO',
'if' : 'IF',
'new' : 'NEW',
'operator' : 'OPERATOR',
'reinterpret_cast' : 'REINTERPRET_CAST',
'return' : 'RETURN',
'sizeof' : 'SIZEOF',
'static_cast' : 'STATIC_CAST',
'switch' : 'SWITCH',
'this' : 'THIS',
'throw' : 'THROW',
'true' : 'TRUE',
'try' : 'TRY',
'typeid' : 'TYPEID',
'while' : 'WHILE',
'"C"' : 'CLiteral',
'"C++"' : 'CppLiteral',
'__attribute__' : 'ATTRIBUTE',
'__cdecl__' : 'CDECL',
'__typeof' : 'uTYPEOF',
'typeof' : 'TYPEOF',
'CXXTEST_STD' : 'CXXTEST_STD'
}
tokens = [
"CharacterLiteral",
"FloatingLiteral",
"Identifier",
"IntegerLiteral",
"StringLiteral",
"RBRACE",
"LBRACE",
"RBRACKET",
"LBRACKET",
"ARROW",
"ARROW_STAR",
"DEC",
"EQ",
"GE",
"INC",
"LE",
"LOG_AND",
"LOG_OR",
"NE",
"SHL",
"SHR",
"ASS_ADD",
"ASS_AND",
"ASS_DIV",
"ASS_MOD",
"ASS_MUL",
"ASS_OR",
"ASS_SHL",
"ASS_SHR",
"ASS_SUB",
"ASS_XOR",
"DOT_STAR",
"ELLIPSIS",
"SCOPE",
] + list(reserved.values())
t_ignore = " \t\r"
t_LBRACE = r"(\{)|(<%)"
t_RBRACE = r"(\})|(%>)"
t_LBRACKET = r"(\[)|(<:)"
t_RBRACKET = r"(\])|(:>)"
t_ARROW = r"->"
t_ARROW_STAR = r"->\*"
t_DEC = r"--"
t_EQ = r"=="
t_GE = r">="
t_INC = r"\+\+"
t_LE = r"<="
t_LOG_AND = r"&&"
t_LOG_OR = r"\|\|"
t_NE = r"!="
t_SHL = r"<<"
t_SHR = r">>"
t_ASS_ADD = r"\+="
t_ASS_AND = r"&="
t_ASS_DIV = r"/="
t_ASS_MOD = r"%="
t_ASS_MUL = r"\*="
t_ASS_OR = r"\|="
t_ASS_SHL = r"<<="
t_ASS_SHR = r">>="
t_ASS_SUB = r"-="
t_ASS_XOR = r"^="
t_DOT_STAR = r"\.\*"
t_ELLIPSIS = r"\.\.\."
t_SCOPE = r"::"
# Discard comments
def t_COMMENT(t):
r'(/\*(.|\n)*?\*/)|(//.*?\n)|(\#.*?\n)'
t.lexer.lineno += t.value.count("\n")
t_IntegerLiteral = r'(0x[0-9A-F]+)|([0-9]+(L){0,1})'
t_FloatingLiteral = r"[0-9]+[eE\.\+-]+[eE\.\+\-0-9]+"
t_CharacterLiteral = r'\'([^\'\\]|\\.)*\''
#t_StringLiteral = r'"([^"\\]|\\.)*"'
def t_StringLiteral(t):
r'"([^"\\]|\\.)*"'
t.type = reserved.get(t.value,'StringLiteral')
return t
def t_Identifier(t):
r"[a-zA-Z_][a-zA-Z_0-9\.]*"
t.type = reserved.get(t.value,'Identifier')
return t
def t_error(t):
print "Illegal character '%s'" % t.value[0]
#raise IOError, "Parse error"
#t.lexer.skip()
def t_newline(t):
r'[\n]+'
t.lexer.lineno += len(t.value)
precedence = (
( 'right', 'SHIFT_THERE', 'REDUCE_HERE_MOSTLY', 'SCOPE'),
( 'nonassoc', 'ELSE', 'INC', 'DEC', '+', '-', '*', '&', 'LBRACKET', 'LBRACE', '<', ':', ')')
)
start = 'translation_unit'
#
# The %prec resolves the 14.2-3 ambiguity:
# Identifier '<' is forced to go through the is-it-a-template-name test
# All names absorb TEMPLATE with the name, so that no template_test is
# performed for them. This requires all potential declarations within an
# expression to perpetuate this policy and thereby guarantee the ultimate
# coverage of explicit_instantiation.
#
# The %prec also resolves a conflict in identifier : which is forced to be a
# shift of a label for a labeled-statement rather than a reduction for the
# name of a bit-field or generalised constructor. This is pretty dubious
# syntactically but correct for all semantic possibilities. The shift is
# only activated when the ambiguity exists at the start of a statement.
# In this context a bit-field declaration or constructor definition are not
# allowed.
#
def p_identifier(p):
'''identifier : Identifier
| CXXTEST_STD '(' Identifier ')'
'''
if p[1][0] in ('t','T','c','d'):
identifier_lineno[p[1]] = p.lineno(1)
p[0] = p[1]
def p_id(p):
'''id : identifier %prec SHIFT_THERE
| template_decl
| TEMPLATE id
'''
p[0] = get_rest(p)
def p_global_scope(p):
'''global_scope : SCOPE
'''
p[0] = get_rest(p)
def p_id_scope(p):
'''id_scope : id SCOPE'''
p[0] = get_rest(p)
def p_id_scope_seq(p):
'''id_scope_seq : id_scope
| id_scope id_scope_seq
'''
p[0] = get_rest(p)
#
# A :: B :: C; is ambiguous How much is type and how much name ?
# The %prec maximises the (type) length which is the 7.1-2 semantic constraint.
#
def p_nested_id(p):
'''nested_id : id %prec SHIFT_THERE
| id_scope nested_id
'''
p[0] = get_rest(p)
def p_scoped_id(p):
'''scoped_id : nested_id
| global_scope nested_id
| id_scope_seq
| global_scope id_scope_seq
'''
global scope_lineno
scope_lineno = lexer.lineno
data = flatten(get_rest(p))
if data[0] != None:
p[0] = "".join(data)
#
# destructor_id has to be held back to avoid a conflict with a one's
# complement as per 5.3.1-9, It gets put back only when scoped or in a
# declarator_id, which is only used as an explicit member name.
# Declarations of an unscoped destructor are always parsed as a one's
# complement.
#
def p_destructor_id(p):
'''destructor_id : '~' id
| TEMPLATE destructor_id
'''
p[0]=get_rest(p)
#def p_template_id(p):
# '''template_id : empty
# | TEMPLATE
# '''
# pass
def p_template_decl(p):
'''template_decl : identifier '<' nonlgt_seq_opt '>'
'''
#
# WEH: should we include the lt/gt symbols to indicate that this is a
# template class? How is that going to be used later???
#
#p[0] = [p[1] ,"<",">"]
p[0] = p[1]
def p_special_function_id(p):
'''special_function_id : conversion_function_id
| operator_function_id
| TEMPLATE special_function_id
'''
p[0]=get_rest(p)
def p_nested_special_function_id(p):
'''nested_special_function_id : special_function_id
| id_scope destructor_id
| id_scope nested_special_function_id
'''
p[0]=get_rest(p)
def p_scoped_special_function_id(p):
'''scoped_special_function_id : nested_special_function_id
| global_scope nested_special_function_id
'''
p[0]=get_rest(p)
# declarator-id is all names in all scopes, except reserved words
def p_declarator_id(p):
'''declarator_id : scoped_id
| scoped_special_function_id
| destructor_id
'''
p[0]=p[1]
#
# The standard defines pseudo-destructors in terms of type-name, which is
# class/enum/typedef, of which class-name is covered by a normal destructor.
# pseudo-destructors are supposed to support ~int() in templates, so the
# grammar here covers built-in names. Other names are covered by the lack
# of identifier/type discrimination.
#
def p_built_in_type_id(p):
'''built_in_type_id : built_in_type_specifier
| built_in_type_id built_in_type_specifier
'''
pass
def p_pseudo_destructor_id(p):
'''pseudo_destructor_id : built_in_type_id SCOPE '~' built_in_type_id
| '~' built_in_type_id
| TEMPLATE pseudo_destructor_id
'''
pass
def p_nested_pseudo_destructor_id(p):
'''nested_pseudo_destructor_id : pseudo_destructor_id
| id_scope nested_pseudo_destructor_id
'''
pass
def p_scoped_pseudo_destructor_id(p):
'''scoped_pseudo_destructor_id : nested_pseudo_destructor_id
| global_scope scoped_pseudo_destructor_id
'''
pass
#-------------------------------------------------------------------------------
# A.2 Lexical conventions
#-------------------------------------------------------------------------------
#
def p_literal(p):
'''literal : IntegerLiteral
| CharacterLiteral
| FloatingLiteral
| StringLiteral
| TRUE
| FALSE
'''
pass
#-------------------------------------------------------------------------------
# A.3 Basic concepts
#-------------------------------------------------------------------------------
def p_translation_unit(p):
'''translation_unit : declaration_seq_opt
'''
pass
#-------------------------------------------------------------------------------
# A.4 Expressions
#-------------------------------------------------------------------------------
#
# primary_expression covers an arbitrary sequence of all names with the
# exception of an unscoped destructor, which is parsed as its unary expression
# which is the correct disambiguation (when ambiguous). This eliminates the
# traditional A(B) meaning A B ambiguity, since we never have to tack an A
# onto the front of something that might start with (. The name length got
# maximised ab initio. The downside is that semantic interpretation must split
# the names up again.
#
# Unification of the declaration and expression syntax means that unary and
# binary pointer declarator operators:
# int * * name
# are parsed as binary and unary arithmetic operators (int) * (*name). Since
# type information is not used
# ambiguities resulting from a cast
# (cast)*(value)
# are resolved to favour the binary rather than the cast unary to ease AST
# clean-up. The cast-call ambiguity must be resolved to the cast to ensure
# that (a)(b)c can be parsed.
#
# The problem of the functional cast ambiguity
# name(arg)
# as call or declaration is avoided by maximising the name within the parsing
# kernel. So primary_id_expression picks up
# extern long int const var = 5;
# as an assignment to the syntax parsed as "extern long int const var". The
# presence of two names is parsed so that "extern long into const" is
# distinguished from "var" considerably simplifying subsequent
# semantic resolution.
#
# The generalised name is a concatenation of potential type-names (scoped
# identifiers or built-in sequences) plus optionally one of the special names
# such as an operator-function-id, conversion-function-id or destructor as the
# final name.
#
def get_rest(p):
return [p[i] for i in range(1, len(p))]
def p_primary_expression(p):
'''primary_expression : literal
| THIS
| suffix_decl_specified_ids
| abstract_expression %prec REDUCE_HERE_MOSTLY
'''
p[0] = get_rest(p)
#
# Abstract-expression covers the () and [] of abstract-declarators.
#
def p_abstract_expression(p):
'''abstract_expression : parenthesis_clause
| LBRACKET bexpression_opt RBRACKET
| TEMPLATE abstract_expression
'''
pass
def p_postfix_expression(p):
'''postfix_expression : primary_expression
| postfix_expression parenthesis_clause
| postfix_expression LBRACKET bexpression_opt RBRACKET
| postfix_expression LBRACKET bexpression_opt RBRACKET attributes
| postfix_expression '.' declarator_id
| postfix_expression '.' scoped_pseudo_destructor_id
| postfix_expression ARROW declarator_id
| postfix_expression ARROW scoped_pseudo_destructor_id
| postfix_expression INC
| postfix_expression DEC
| DYNAMIC_CAST '<' nonlgt_seq_opt '>' '(' expression ')'
| STATIC_CAST '<' nonlgt_seq_opt '>' '(' expression ')'
| REINTERPRET_CAST '<' nonlgt_seq_opt '>' '(' expression ')'
| CONST_CAST '<' nonlgt_seq_opt '>' '(' expression ')'
| TYPEID parameters_clause
'''
#print "HERE",str(p[1])
p[0] = get_rest(p)
def p_bexpression_opt(p):
'''bexpression_opt : empty
| bexpression
'''
pass
def p_bexpression(p):
'''bexpression : nonbracket_seq
| nonbracket_seq bexpression_seq bexpression_clause nonbracket_seq_opt
| bexpression_seq bexpression_clause nonbracket_seq_opt
'''
pass
def p_bexpression_seq(p):
'''bexpression_seq : empty
| bexpression_seq bexpression_clause nonbracket_seq_opt
'''
pass
def p_bexpression_clause(p):
'''bexpression_clause : LBRACKET bexpression_opt RBRACKET
'''
pass
def p_expression_list_opt(p):
'''expression_list_opt : empty
| expression_list
'''
pass
def p_expression_list(p):
'''expression_list : assignment_expression
| expression_list ',' assignment_expression
'''
pass
def p_unary_expression(p):
'''unary_expression : postfix_expression
| INC cast_expression
| DEC cast_expression
| ptr_operator cast_expression
| suffix_decl_specified_scope star_ptr_operator cast_expression
| '+' cast_expression
| '-' cast_expression
| '!' cast_expression
| '~' cast_expression
| SIZEOF unary_expression
| new_expression
| global_scope new_expression
| delete_expression
| global_scope delete_expression
'''
p[0] = get_rest(p)
def p_delete_expression(p):
'''delete_expression : DELETE cast_expression
'''
pass
def p_new_expression(p):
'''new_expression : NEW new_type_id new_initializer_opt
| NEW parameters_clause new_type_id new_initializer_opt
| NEW parameters_clause
| NEW parameters_clause parameters_clause new_initializer_opt
'''
pass
def p_new_type_id(p):
'''new_type_id : type_specifier ptr_operator_seq_opt
| type_specifier new_declarator
| type_specifier new_type_id
'''
pass
def p_new_declarator(p):
'''new_declarator : ptr_operator new_declarator
| direct_new_declarator
'''
pass
def p_direct_new_declarator(p):
'''direct_new_declarator : LBRACKET bexpression_opt RBRACKET
| direct_new_declarator LBRACKET bexpression RBRACKET
'''
pass
def p_new_initializer_opt(p):
'''new_initializer_opt : empty
| '(' expression_list_opt ')'
'''
pass
#
# cast-expression is generalised to support a [] as well as a () prefix. This covers the omission of
# DELETE[] which when followed by a parenthesised expression was ambiguous. It also covers the gcc
# indexed array initialisation for free.
#
def p_cast_expression(p):
'''cast_expression : unary_expression
| abstract_expression cast_expression
'''
p[0] = get_rest(p)
def p_pm_expression(p):
'''pm_expression : cast_expression
| pm_expression DOT_STAR cast_expression
| pm_expression ARROW_STAR cast_expression
'''
p[0] = get_rest(p)
def p_multiplicative_expression(p):
'''multiplicative_expression : pm_expression
| multiplicative_expression star_ptr_operator pm_expression
| multiplicative_expression '/' pm_expression
| multiplicative_expression '%' pm_expression
'''
p[0] = get_rest(p)
def p_additive_expression(p):
'''additive_expression : multiplicative_expression
| additive_expression '+' multiplicative_expression
| additive_expression '-' multiplicative_expression
'''
p[0] = get_rest(p)
def p_shift_expression(p):
'''shift_expression : additive_expression
| shift_expression SHL additive_expression
| shift_expression SHR additive_expression
'''
p[0] = get_rest(p)
# | relational_expression '<' shift_expression
# | relational_expression '>' shift_expression
# | relational_expression LE shift_expression
# | relational_expression GE shift_expression
def p_relational_expression(p):
'''relational_expression : shift_expression
'''
p[0] = get_rest(p)
def p_equality_expression(p):
'''equality_expression : relational_expression
| equality_expression EQ relational_expression
| equality_expression NE relational_expression
'''
p[0] = get_rest(p)
def p_and_expression(p):
'''and_expression : equality_expression
| and_expression '&' equality_expression
'''
p[0] = get_rest(p)
def p_exclusive_or_expression(p):
'''exclusive_or_expression : and_expression
| exclusive_or_expression '^' and_expression
'''
p[0] = get_rest(p)
def p_inclusive_or_expression(p):
'''inclusive_or_expression : exclusive_or_expression
| inclusive_or_expression '|' exclusive_or_expression
'''
p[0] = get_rest(p)
def p_logical_and_expression(p):
'''logical_and_expression : inclusive_or_expression
| logical_and_expression LOG_AND inclusive_or_expression
'''
p[0] = get_rest(p)
def p_logical_or_expression(p):
'''logical_or_expression : logical_and_expression
| logical_or_expression LOG_OR logical_and_expression
'''
p[0] = get_rest(p)
def p_conditional_expression(p):
'''conditional_expression : logical_or_expression
| logical_or_expression '?' expression ':' assignment_expression
'''
p[0] = get_rest(p)
#
# assignment-expression is generalised to cover the simple assignment of a braced initializer in order to
# contribute to the coverage of parameter-declaration and init-declaration.
#
# | logical_or_expression assignment_operator assignment_expression
def p_assignment_expression(p):
'''assignment_expression : conditional_expression
| logical_or_expression assignment_operator nonsemicolon_seq
| logical_or_expression '=' braced_initializer
| throw_expression
'''
p[0]=get_rest(p)
def p_assignment_operator(p):
'''assignment_operator : '='
| ASS_ADD
| ASS_AND
| ASS_DIV
| ASS_MOD
| ASS_MUL
| ASS_OR
| ASS_SHL
| ASS_SHR
| ASS_SUB
| ASS_XOR
'''
pass
#
# expression is widely used and usually single-element, so the reductions are arranged so that a
# single-element expression is returned as is. Multi-element expressions are parsed as a list that
# may then behave polymorphically as an element or be compacted to an element.
#
def p_expression(p):
'''expression : assignment_expression
| expression_list ',' assignment_expression
'''
p[0] = get_rest(p)
def p_constant_expression(p):
'''constant_expression : conditional_expression
'''
pass
#---------------------------------------------------------------------------------------------------
# A.5 Statements
#---------------------------------------------------------------------------------------------------
# Parsing statements is easy once simple_declaration has been generalised to cover expression_statement.
#
#
# The use of extern here is a hack. The 'extern "C" {}' block gets parsed
# as a function, so when nested 'extern "C"' declarations exist, they don't
# work because the block is viewed as a list of statements... :(
#
def p_statement(p):
'''statement : compound_statement
| declaration_statement
| try_block
| labeled_statement
| selection_statement
| iteration_statement
| jump_statement
'''
pass
def p_compound_statement(p):
'''compound_statement : LBRACE statement_seq_opt RBRACE
'''
pass
def p_statement_seq_opt(p):
'''statement_seq_opt : empty
| statement_seq_opt statement
'''
pass
#
# The dangling else conflict is resolved to the innermost if.
#
def p_selection_statement(p):
'''selection_statement : IF '(' condition ')' statement %prec SHIFT_THERE
| IF '(' condition ')' statement ELSE statement
| SWITCH '(' condition ')' statement
'''
pass
def p_condition_opt(p):
'''condition_opt : empty
| condition
'''
pass
def p_condition(p):
'''condition : nonparen_seq
| nonparen_seq condition_seq parameters_clause nonparen_seq_opt
| condition_seq parameters_clause nonparen_seq_opt
'''
pass
def p_condition_seq(p):
'''condition_seq : empty
| condition_seq parameters_clause nonparen_seq_opt
'''
pass
def p_labeled_statement(p):
'''labeled_statement : identifier ':' statement
| CASE constant_expression ':' statement
| DEFAULT ':' statement
'''
pass
def p_try_block(p):
'''try_block : TRY compound_statement handler_seq
'''
global noExceptionLogic
noExceptionLogic=False
def p_jump_statement(p):
'''jump_statement : BREAK ';'
| CONTINUE ';'
| RETURN nonsemicolon_seq ';'
| GOTO identifier ';'
'''
pass
def p_iteration_statement(p):
'''iteration_statement : WHILE '(' condition ')' statement
| DO statement WHILE '(' expression ')' ';'
| FOR '(' nonparen_seq_opt ')' statement
'''
pass
def p_declaration_statement(p):
'''declaration_statement : block_declaration
'''
pass
#---------------------------------------------------------------------------------------------------
# A.6 Declarations
#---------------------------------------------------------------------------------------------------
def p_compound_declaration(p):
'''compound_declaration : LBRACE declaration_seq_opt RBRACE
'''
pass
def p_declaration_seq_opt(p):
'''declaration_seq_opt : empty
| declaration_seq_opt declaration
'''
pass
def p_declaration(p):
'''declaration : block_declaration
| function_definition
| template_declaration
| explicit_specialization
| specialised_declaration
'''
pass
def p_specialised_declaration(p):
'''specialised_declaration : linkage_specification
| namespace_definition
| TEMPLATE specialised_declaration
'''
pass
def p_block_declaration(p):
'''block_declaration : simple_declaration
| specialised_block_declaration
'''
pass
def p_specialised_block_declaration(p):
'''specialised_block_declaration : asm_definition
| namespace_alias_definition
| using_declaration
| using_directive
| TEMPLATE specialised_block_declaration
'''
pass
def p_simple_declaration(p):
'''simple_declaration : ';'
| init_declaration ';'
| init_declarations ';'
| decl_specifier_prefix simple_declaration
'''
global _parse_info
if len(p) == 3:
if p[2] == ";":
decl = p[1]
else:
decl = p[2]
if decl is not None:
fp = flatten(decl)
if len(fp) >= 2 and fp[0] is not None and fp[0]!="operator" and fp[1] == '(':
p[0] = fp[0]
_parse_info.add_function(fp[0])
#
# A decl-specifier following a ptr_operator provokes a shift-reduce conflict for * const name which is resolved in favour of the pointer, and implemented by providing versions of decl-specifier guaranteed not to start with a cv_qualifier. decl-specifiers are implemented type-centrically. That is the semantic constraint that there must be a type is exploited to impose structure, but actually eliminate very little syntax. built-in types are multi-name and so need a different policy.
#
# non-type decl-specifiers are bound to the left-most type in a decl-specifier-seq, by parsing from the right and attaching suffixes to the right-hand type. Finally residual prefixes attach to the left.
#
def p_suffix_built_in_decl_specifier_raw(p):
'''suffix_built_in_decl_specifier_raw : built_in_type_specifier
| suffix_built_in_decl_specifier_raw built_in_type_specifier
| suffix_built_in_decl_specifier_raw decl_specifier_suffix
'''
pass
def p_suffix_built_in_decl_specifier(p):
'''suffix_built_in_decl_specifier : suffix_built_in_decl_specifier_raw
| TEMPLATE suffix_built_in_decl_specifier
'''
pass
# | id_scope_seq
# | SCOPE id_scope_seq
def p_suffix_named_decl_specifier(p):
'''suffix_named_decl_specifier : scoped_id
| elaborate_type_specifier
| suffix_named_decl_specifier decl_specifier_suffix
'''
p[0]=get_rest(p)
def p_suffix_named_decl_specifier_bi(p):
'''suffix_named_decl_specifier_bi : suffix_named_decl_specifier
| suffix_named_decl_specifier suffix_built_in_decl_specifier_raw
'''
p[0] = get_rest(p)
#print "HERE",get_rest(p)
def p_suffix_named_decl_specifiers(p):
'''suffix_named_decl_specifiers : suffix_named_decl_specifier_bi
| suffix_named_decl_specifiers suffix_named_decl_specifier_bi
'''
p[0] = get_rest(p)
def p_suffix_named_decl_specifiers_sf(p):
'''suffix_named_decl_specifiers_sf : scoped_special_function_id
| suffix_named_decl_specifiers
| suffix_named_decl_specifiers scoped_special_function_id
'''
#print "HERE",get_rest(p)
p[0] = get_rest(p)
def p_suffix_decl_specified_ids(p):
'''suffix_decl_specified_ids : suffix_built_in_decl_specifier
| suffix_built_in_decl_specifier suffix_named_decl_specifiers_sf
| suffix_named_decl_specifiers_sf
'''
if len(p) == 3:
p[0] = p[2]
else:
p[0] = p[1]
def p_suffix_decl_specified_scope(p):
'''suffix_decl_specified_scope : suffix_named_decl_specifiers SCOPE
| suffix_built_in_decl_specifier suffix_named_decl_specifiers SCOPE
| suffix_built_in_decl_specifier SCOPE
'''
p[0] = get_rest(p)
def p_decl_specifier_affix(p):
'''decl_specifier_affix : storage_class_specifier
| function_specifier
| FRIEND
| TYPEDEF
| cv_qualifier
'''
pass
def p_decl_specifier_suffix(p):
'''decl_specifier_suffix : decl_specifier_affix
'''
pass
def p_decl_specifier_prefix(p):
'''decl_specifier_prefix : decl_specifier_affix
| TEMPLATE decl_specifier_prefix
'''
pass
def p_storage_class_specifier(p):
'''storage_class_specifier : REGISTER
| STATIC
| MUTABLE
| EXTERN %prec SHIFT_THERE
| EXTENSION
| AUTO
'''
pass
def p_function_specifier(p):
'''function_specifier : EXPLICIT
| INLINE
| VIRTUAL
'''
pass
def p_type_specifier(p):
'''type_specifier : simple_type_specifier
| elaborate_type_specifier
| cv_qualifier
'''
pass
def p_elaborate_type_specifier(p):
'''elaborate_type_specifier : class_specifier
| enum_specifier
| elaborated_type_specifier
| TEMPLATE elaborate_type_specifier
'''
pass
def p_simple_type_specifier(p):
'''simple_type_specifier : scoped_id
| scoped_id attributes
| built_in_type_specifier
'''
p[0] = p[1]
def p_built_in_type_specifier(p):
'''built_in_type_specifier : Xbuilt_in_type_specifier
| Xbuilt_in_type_specifier attributes
'''
pass
def p_attributes(p):
'''attributes : attribute
| attributes attribute
'''
pass
def p_attribute(p):
'''attribute : ATTRIBUTE '(' parameters_clause ')'
'''
def p_Xbuilt_in_type_specifier(p):
'''Xbuilt_in_type_specifier : CHAR
| WCHAR_T
| BOOL
| SHORT
| INT
| LONG
| SIGNED
| UNSIGNED
| FLOAT
| DOUBLE
| VOID
| uTYPEOF parameters_clause
| TYPEOF parameters_clause
'''
pass
#
# The over-general use of declaration_expression to cover decl-specifier-seq_opt declarator in a function-definition means that
# class X { };
# could be a function-definition or a class-specifier.
# enum X { };
# could be a function-definition or an enum-specifier.
# The function-definition is not syntactically valid so resolving the false conflict in favour of the
# elaborated_type_specifier is correct.
#
def p_elaborated_type_specifier(p):
'''elaborated_type_specifier : class_key scoped_id %prec SHIFT_THERE
| elaborated_enum_specifier
| TYPENAME scoped_id
'''
pass
def p_elaborated_enum_specifier(p):
'''elaborated_enum_specifier : ENUM scoped_id %prec SHIFT_THERE
'''
pass
def p_enum_specifier(p):
'''enum_specifier : ENUM scoped_id enumerator_clause
| ENUM enumerator_clause
'''
pass
def p_enumerator_clause(p):
'''enumerator_clause : LBRACE enumerator_list_ecarb
| LBRACE enumerator_list enumerator_list_ecarb
| LBRACE enumerator_list ',' enumerator_definition_ecarb
'''
pass
def p_enumerator_list_ecarb(p):
'''enumerator_list_ecarb : RBRACE
'''
pass
def p_enumerator_definition_ecarb(p):
'''enumerator_definition_ecarb : RBRACE
'''
pass
def p_enumerator_definition_filler(p):
'''enumerator_definition_filler : empty
'''
pass
def p_enumerator_list_head(p):
'''enumerator_list_head : enumerator_definition_filler
| enumerator_list ',' enumerator_definition_filler
'''
pass
def p_enumerator_list(p):
'''enumerator_list : enumerator_list_head enumerator_definition
'''
pass
def p_enumerator_definition(p):
'''enumerator_definition : enumerator
| enumerator '=' constant_expression
'''
pass
def p_enumerator(p):
'''enumerator : identifier
'''
pass
def p_namespace_definition(p):
'''namespace_definition : NAMESPACE scoped_id push_scope compound_declaration
| NAMESPACE push_scope compound_declaration
'''
global _parse_info
scope = _parse_info.pop_scope()
def p_namespace_alias_definition(p):
'''namespace_alias_definition : NAMESPACE scoped_id '=' scoped_id ';'
'''
pass
def p_push_scope(p):
'''push_scope : empty'''
global _parse_info
if p[-2] == "namespace":
scope=p[-1]
else:
scope=""
_parse_info.push_scope(scope,"namespace")
def p_using_declaration(p):
'''using_declaration : USING declarator_id ';'
| USING TYPENAME declarator_id ';'
'''
pass
def p_using_directive(p):
'''using_directive : USING NAMESPACE scoped_id ';'
'''
pass
# '''asm_definition : ASM '(' StringLiteral ')' ';'
def p_asm_definition(p):
'''asm_definition : ASM '(' nonparen_seq_opt ')' ';'
'''
pass
def p_linkage_specification(p):
'''linkage_specification : EXTERN CLiteral declaration
| EXTERN CLiteral compound_declaration
| EXTERN CppLiteral declaration
| EXTERN CppLiteral compound_declaration
'''
pass
#---------------------------------------------------------------------------------------------------
# A.7 Declarators
#---------------------------------------------------------------------------------------------------
#
# init-declarator is named init_declaration to reflect the embedded decl-specifier-seq_opt
#
def p_init_declarations(p):
'''init_declarations : assignment_expression ',' init_declaration
| init_declarations ',' init_declaration
'''
p[0]=get_rest(p)
def p_init_declaration(p):
'''init_declaration : assignment_expression
'''
p[0]=get_rest(p)
def p_star_ptr_operator(p):
'''star_ptr_operator : '*'
| star_ptr_operator cv_qualifier
'''
pass
def p_nested_ptr_operator(p):
'''nested_ptr_operator : star_ptr_operator
| id_scope nested_ptr_operator
'''
pass
def p_ptr_operator(p):
'''ptr_operator : '&'
| nested_ptr_operator
| global_scope nested_ptr_operator
'''
pass
def p_ptr_operator_seq(p):
'''ptr_operator_seq : ptr_operator
| ptr_operator ptr_operator_seq
'''
pass
#
# Independently coded to localise the shift-reduce conflict: sharing just needs another %prec
#
def p_ptr_operator_seq_opt(p):
'''ptr_operator_seq_opt : empty %prec SHIFT_THERE
| ptr_operator ptr_operator_seq_opt
'''
pass
def p_cv_qualifier_seq_opt(p):
'''cv_qualifier_seq_opt : empty
| cv_qualifier_seq_opt cv_qualifier
'''
pass
# TODO: verify that we should include attributes here
def p_cv_qualifier(p):
'''cv_qualifier : CONST
| VOLATILE
| attributes
'''
pass
def p_type_id(p):
'''type_id : type_specifier abstract_declarator_opt
| type_specifier type_id
'''
pass
def p_abstract_declarator_opt(p):
'''abstract_declarator_opt : empty
| ptr_operator abstract_declarator_opt
| direct_abstract_declarator
'''
pass
def p_direct_abstract_declarator_opt(p):
'''direct_abstract_declarator_opt : empty
| direct_abstract_declarator
'''
pass
def p_direct_abstract_declarator(p):
'''direct_abstract_declarator : direct_abstract_declarator_opt parenthesis_clause
| direct_abstract_declarator_opt LBRACKET RBRACKET
| direct_abstract_declarator_opt LBRACKET bexpression RBRACKET
'''
pass
def p_parenthesis_clause(p):
'''parenthesis_clause : parameters_clause cv_qualifier_seq_opt
| parameters_clause cv_qualifier_seq_opt exception_specification
'''
p[0] = ['(',')']
def p_parameters_clause(p):
'''parameters_clause : '(' condition_opt ')'
'''
p[0] = ['(',')']
#
# A typed abstract qualifier such as
# Class * ...
# looks like a multiply, so pointers are parsed as their binary operation equivalents that
# ultimately terminate with a degenerate right hand term.
#
def p_abstract_pointer_declaration(p):
'''abstract_pointer_declaration : ptr_operator_seq
| multiplicative_expression star_ptr_operator ptr_operator_seq_opt
'''
pass
def p_abstract_parameter_declaration(p):
'''abstract_parameter_declaration : abstract_pointer_declaration
| and_expression '&'
| and_expression '&' abstract_pointer_declaration
'''
pass
def p_special_parameter_declaration(p):
'''special_parameter_declaration : abstract_parameter_declaration
| abstract_parameter_declaration '=' assignment_expression
| ELLIPSIS
'''
pass
def p_parameter_declaration(p):
'''parameter_declaration : assignment_expression
| special_parameter_declaration
| decl_specifier_prefix parameter_declaration
'''
pass
#
# function_definition includes constructor, destructor, implicit int definitions too. A local destructor is successfully parsed as a function-declaration but the ~ was treated as a unary operator. constructor_head is the prefix ambiguity between a constructor and a member-init-list starting with a bit-field.
#
def p_function_definition(p):
'''function_definition : ctor_definition
| func_definition
'''
pass
def p_func_definition(p):
'''func_definition : assignment_expression function_try_block
| assignment_expression function_body
| decl_specifier_prefix func_definition
'''
global _parse_info
if p[2] is not None and p[2][0] == '{':
decl = flatten(p[1])
#print "HERE",decl
if decl[-1] == ')':
decl=decl[-3]
else:
decl=decl[-1]
p[0] = decl
if decl != "operator":
_parse_info.add_function(decl)
else:
p[0] = p[2]
def p_ctor_definition(p):
'''ctor_definition : constructor_head function_try_block
| constructor_head function_body
| decl_specifier_prefix ctor_definition
'''
if p[2] is None or p[2][0] == "try" or p[2][0] == '{':
p[0]=p[1]
else:
p[0]=p[1]
def p_constructor_head(p):
'''constructor_head : bit_field_init_declaration
| constructor_head ',' assignment_expression
'''
p[0]=p[1]
def p_function_try_block(p):
'''function_try_block : TRY function_block handler_seq
'''
global noExceptionLogic
noExceptionLogic=False
p[0] = ['try']
def p_function_block(p):
'''function_block : ctor_initializer_opt function_body
'''
pass
def p_function_body(p):
'''function_body : LBRACE nonbrace_seq_opt RBRACE
'''
p[0] = ['{','}']
def p_initializer_clause(p):
'''initializer_clause : assignment_expression
| braced_initializer
'''
pass
def p_braced_initializer(p):
'''braced_initializer : LBRACE initializer_list RBRACE
| LBRACE initializer_list ',' RBRACE
| LBRACE RBRACE
'''
pass
def p_initializer_list(p):
'''initializer_list : initializer_clause
| initializer_list ',' initializer_clause
'''
pass
#---------------------------------------------------------------------------------------------------
# A.8 Classes
#---------------------------------------------------------------------------------------------------
#
# An anonymous bit-field declaration may look very like inheritance:
# const int B = 3;
# class A : B ;
# The two usages are too distant to try to create and enforce a common prefix so we have to resort to
# a parser hack by backtracking. Inheritance is much the most likely so we mark the input stream context
# and try to parse a base-clause. If we successfully reach a { the base-clause is ok and inheritance was
# the correct choice so we unmark and continue. If we fail to find the { an error token causes
# back-tracking to the alternative parse in elaborated_type_specifier which regenerates the : and
# declares unconditional success.
#
def p_class_specifier_head(p):
'''class_specifier_head : class_key scoped_id ':' base_specifier_list LBRACE
| class_key ':' base_specifier_list LBRACE
| class_key scoped_id LBRACE
| class_key LBRACE
'''
global _parse_info
base_classes=[]
if len(p) == 6:
scope = p[2]
base_classes = p[4]
elif len(p) == 4:
scope = p[2]
elif len(p) == 5:
base_classes = p[3]
else:
scope = ""
_parse_info.push_scope(scope,p[1],base_classes)
def p_class_key(p):
'''class_key : CLASS
| STRUCT
| UNION
'''
p[0] = p[1]
def p_class_specifier(p):
'''class_specifier : class_specifier_head member_specification_opt RBRACE
'''
scope = _parse_info.pop_scope()
def p_member_specification_opt(p):
'''member_specification_opt : empty
| member_specification_opt member_declaration
'''
pass
def p_member_declaration(p):
'''member_declaration : accessibility_specifier
| simple_member_declaration
| function_definition
| using_declaration
| template_declaration
'''
p[0] = get_rest(p)
#print "Decl",get_rest(p)
#
# The generality of constructor names (there need be no parenthesised argument list) means that that
# name : f(g), h(i)
# could be the start of a constructor or the start of an anonymous bit-field. An ambiguity is avoided by
# parsing the ctor-initializer of a function_definition as a bit-field.
#
def p_simple_member_declaration(p):
'''simple_member_declaration : ';'
| assignment_expression ';'
| constructor_head ';'
| member_init_declarations ';'
| decl_specifier_prefix simple_member_declaration
'''
global _parse_info
decl = flatten(get_rest(p))
if len(decl) >= 4 and decl[-3] == "(":
_parse_info.add_function(decl[-4])
def p_member_init_declarations(p):
'''member_init_declarations : assignment_expression ',' member_init_declaration
| constructor_head ',' bit_field_init_declaration
| member_init_declarations ',' member_init_declaration
'''
pass
def p_member_init_declaration(p):
'''member_init_declaration : assignment_expression
| bit_field_init_declaration
'''
pass
def p_accessibility_specifier(p):
'''accessibility_specifier : access_specifier ':'
'''
pass
def p_bit_field_declaration(p):
'''bit_field_declaration : assignment_expression ':' bit_field_width
| ':' bit_field_width
'''
if len(p) == 4:
p[0]=p[1]
def p_bit_field_width(p):
'''bit_field_width : logical_or_expression
| logical_or_expression '?' bit_field_width ':' bit_field_width
'''
pass
def p_bit_field_init_declaration(p):
'''bit_field_init_declaration : bit_field_declaration
| bit_field_declaration '=' initializer_clause
'''
pass
#---------------------------------------------------------------------------------------------------
# A.9 Derived classes
#---------------------------------------------------------------------------------------------------
def p_base_specifier_list(p):
'''base_specifier_list : base_specifier
| base_specifier_list ',' base_specifier
'''
if len(p) == 2:
p[0] = [p[1]]
else:
p[0] = p[1]+[p[3]]
def p_base_specifier(p):
'''base_specifier : scoped_id
| access_specifier base_specifier
| VIRTUAL base_specifier
'''
if len(p) == 2:
p[0] = p[1]
else:
p[0] = p[2]
def p_access_specifier(p):
'''access_specifier : PRIVATE
| PROTECTED
| PUBLIC
'''
pass
#---------------------------------------------------------------------------------------------------
# A.10 Special member functions
#---------------------------------------------------------------------------------------------------
def p_conversion_function_id(p):
'''conversion_function_id : OPERATOR conversion_type_id
'''
p[0] = ['operator']
def p_conversion_type_id(p):
'''conversion_type_id : type_specifier ptr_operator_seq_opt
| type_specifier conversion_type_id
'''
pass
#
# Ctor-initialisers can look like a bit field declaration, given the generalisation of names:
# Class(Type) : m1(1), m2(2) { }
# NonClass(bit_field) : int(2), second_variable, ...
# The grammar below is used within a function_try_block or function_definition.
# See simple_member_declaration for use in normal member function_definition.
#
def p_ctor_initializer_opt(p):
'''ctor_initializer_opt : empty
| ctor_initializer
'''
pass
def p_ctor_initializer(p):
'''ctor_initializer : ':' mem_initializer_list
'''
pass
def p_mem_initializer_list(p):
'''mem_initializer_list : mem_initializer
| mem_initializer_list_head mem_initializer
'''
pass
def p_mem_initializer_list_head(p):
'''mem_initializer_list_head : mem_initializer_list ','
'''
pass
def p_mem_initializer(p):
'''mem_initializer : mem_initializer_id '(' expression_list_opt ')'
'''
pass
def p_mem_initializer_id(p):
'''mem_initializer_id : scoped_id
'''
pass
#---------------------------------------------------------------------------------------------------
# A.11 Overloading
#---------------------------------------------------------------------------------------------------
def p_operator_function_id(p):
'''operator_function_id : OPERATOR operator
| OPERATOR '(' ')'
| OPERATOR LBRACKET RBRACKET
| OPERATOR '<'
| OPERATOR '>'
| OPERATOR operator '<' nonlgt_seq_opt '>'
'''
p[0] = ["operator"]
#
# It is not clear from the ANSI standard whether spaces are permitted in delete[]. If not then it can
# be recognised and returned as DELETE_ARRAY by the lexer. Assuming spaces are permitted there is an
# ambiguity created by the over generalised nature of expressions. operator new is a valid delarator-id
# which we may have an undimensioned array of. Semantic rubbish, but syntactically valid. Since the
# array form is covered by the declarator consideration we can exclude the operator here. The need
# for a semantic rescue can be eliminated at the expense of a couple of shift-reduce conflicts by
# removing the comments on the next four lines.
#
def p_operator(p):
'''operator : NEW
| DELETE
| '+'
| '-'
| '*'
| '/'
| '%'
| '^'
| '&'
| '|'
| '~'
| '!'
| '='
| ASS_ADD
| ASS_SUB
| ASS_MUL
| ASS_DIV
| ASS_MOD
| ASS_XOR
| ASS_AND
| ASS_OR
| SHL
| SHR
| ASS_SHR
| ASS_SHL
| EQ
| NE
| LE
| GE
| LOG_AND
| LOG_OR
| INC
| DEC
| ','
| ARROW_STAR
| ARROW
'''
p[0]=p[1]
# | IF
# | SWITCH
# | WHILE
# | FOR
# | DO
def p_reserved(p):
'''reserved : PRIVATE
| CLiteral
| CppLiteral
| IF
| SWITCH
| WHILE
| FOR
| DO
| PROTECTED
| PUBLIC
| BOOL
| CHAR
| DOUBLE
| FLOAT
| INT
| LONG
| SHORT
| SIGNED
| UNSIGNED
| VOID
| WCHAR_T
| CLASS
| ENUM
| NAMESPACE
| STRUCT
| TYPENAME
| UNION
| CONST
| VOLATILE
| AUTO
| EXPLICIT
| EXPORT
| EXTERN
| FRIEND
| INLINE
| MUTABLE
| REGISTER
| STATIC
| TEMPLATE
| TYPEDEF
| USING
| VIRTUAL
| ASM
| BREAK
| CASE
| CATCH
| CONST_CAST
| CONTINUE
| DEFAULT
| DYNAMIC_CAST
| ELSE
| FALSE
| GOTO
| OPERATOR
| REINTERPRET_CAST
| RETURN
| SIZEOF
| STATIC_CAST
| THIS
| THROW
| TRUE
| TRY
| TYPEID
| ATTRIBUTE
| CDECL
| TYPEOF
| uTYPEOF
'''
if p[1] in ('try', 'catch', 'throw'):
global noExceptionLogic
noExceptionLogic=False
#---------------------------------------------------------------------------------------------------
# A.12 Templates
#---------------------------------------------------------------------------------------------------
def p_template_declaration(p):
'''template_declaration : template_parameter_clause declaration
| EXPORT template_declaration
'''
pass
def p_template_parameter_clause(p):
'''template_parameter_clause : TEMPLATE '<' nonlgt_seq_opt '>'
'''
pass
#
# Generalised naming makes identifier a valid declaration, so TEMPLATE identifier is too.
# The TEMPLATE prefix is therefore folded into all names, parenthesis_clause and decl_specifier_prefix.
#
# explicit_instantiation: TEMPLATE declaration
#
def p_explicit_specialization(p):
'''explicit_specialization : TEMPLATE '<' '>' declaration
'''
pass
#---------------------------------------------------------------------------------------------------
# A.13 Exception Handling
#---------------------------------------------------------------------------------------------------
def p_handler_seq(p):
'''handler_seq : handler
| handler handler_seq
'''
pass
def p_handler(p):
'''handler : CATCH '(' exception_declaration ')' compound_statement
'''
global noExceptionLogic
noExceptionLogic=False
def p_exception_declaration(p):
'''exception_declaration : parameter_declaration
'''
pass
def p_throw_expression(p):
'''throw_expression : THROW
| THROW assignment_expression
'''
global noExceptionLogic
noExceptionLogic=False
def p_exception_specification(p):
'''exception_specification : THROW '(' ')'
| THROW '(' type_id_list ')'
'''
global noExceptionLogic
noExceptionLogic=False
def p_type_id_list(p):
'''type_id_list : type_id
| type_id_list ',' type_id
'''
pass
#---------------------------------------------------------------------------------------------------
# Misc productions
#---------------------------------------------------------------------------------------------------
def p_nonsemicolon_seq(p):
'''nonsemicolon_seq : empty
| nonsemicolon_seq nonsemicolon
'''
pass
def p_nonsemicolon(p):
'''nonsemicolon : misc
| '('
| ')'
| '<'
| '>'
| LBRACKET nonbracket_seq_opt RBRACKET
| LBRACE nonbrace_seq_opt RBRACE
'''
pass
def p_nonparen_seq_opt(p):
'''nonparen_seq_opt : empty
| nonparen_seq_opt nonparen
'''
pass
def p_nonparen_seq(p):
'''nonparen_seq : nonparen
| nonparen_seq nonparen
'''
pass
def p_nonparen(p):
'''nonparen : misc
| '<'
| '>'
| ';'
| LBRACKET nonbracket_seq_opt RBRACKET
| LBRACE nonbrace_seq_opt RBRACE
'''
pass
def p_nonbracket_seq_opt(p):
'''nonbracket_seq_opt : empty
| nonbracket_seq_opt nonbracket
'''
pass
def p_nonbracket_seq(p):
'''nonbracket_seq : nonbracket
| nonbracket_seq nonbracket
'''
pass
def p_nonbracket(p):
'''nonbracket : misc
| '<'
| '>'
| '('
| ')'
| ';'
| LBRACKET nonbracket_seq_opt RBRACKET
| LBRACE nonbrace_seq_opt RBRACE
'''
pass
def p_nonbrace_seq_opt(p):
'''nonbrace_seq_opt : empty
| nonbrace_seq_opt nonbrace
'''
pass
def p_nonbrace(p):
'''nonbrace : misc
| '<'
| '>'
| '('
| ')'
| ';'
| LBRACKET nonbracket_seq_opt RBRACKET
| LBRACE nonbrace_seq_opt RBRACE
'''
pass
def p_nonlgt_seq_opt(p):
'''nonlgt_seq_opt : empty
| nonlgt_seq_opt nonlgt
'''
pass
def p_nonlgt(p):
'''nonlgt : misc
| '('
| ')'
| LBRACKET nonbracket_seq_opt RBRACKET
| '<' nonlgt_seq_opt '>'
| ';'
'''
pass
def p_misc(p):
'''misc : operator
| identifier
| IntegerLiteral
| CharacterLiteral
| FloatingLiteral
| StringLiteral
| reserved
| '?'
| ':'
| '.'
| SCOPE
| ELLIPSIS
| EXTENSION
'''
pass
def p_empty(p):
'''empty : '''
pass
#
# Compute column.
# input is the input text string
# token is a token instance
#
def _find_column(input,token):
''' TODO '''
i = token.lexpos
while i > 0:
if input[i] == '\n': break
i -= 1
column = (token.lexpos - i)+1
return column
def p_error(p):
if p is None:
tmp = "Syntax error at end of file."
else:
tmp = "Syntax error at token "
if p.type is "":
tmp = tmp + "''"
else:
tmp = tmp + str(p.type)
tmp = tmp + " with value '"+str(p.value)+"'"
tmp = tmp + " in line " + str(lexer.lineno-1)
tmp = tmp + " at column "+str(_find_column(_parsedata,p))
raise IOError( tmp )
#
# The function that performs the parsing
#
def parse_cpp(data=None, filename=None, debug=0, optimize=0, verbose=False, func_filter=None):
#
# Reset global data
#
global lexer
lexer = None
global scope_lineno
scope_lineno = 0
global indentifier_lineno
identifier_lineno = {}
global _parse_info
_parse_info=None
global _parsedata
_parsedata=None
global noExceptionLogic
noExceptionLogic = True
#
if debug > 0:
print "Debugging parse_cpp!"
#
# Always remove the parser.out file, which is generated to create debugging
#
if os.path.exists("parser.out"):
os.remove("parser.out")
#
# Remove the parsetab.py* files. These apparently need to be removed
# to ensure the creation of a parser.out file.
#
if os.path.exists("parsetab.py"):
os.remove("parsetab.py")
if os.path.exists("parsetab.pyc"):
os.remove("parsetab.pyc")
global debugging
debugging=True
#
# Build lexer
#
lexer = lex.lex()
#
# Initialize parse object
#
_parse_info = CppInfo(filter=func_filter)
_parse_info.verbose=verbose
#
# Build yaccer
#
write_table = not os.path.exists("parsetab.py")
yacc.yacc(debug=debug, optimize=optimize, write_tables=write_table)
#
# Parse the file
#
if not data is None:
_parsedata=data
ply_init(_parsedata)
yacc.parse(data,debug=debug)
elif not filename is None:
f = open(filename)
data = f.read()
f.close()
_parsedata=data
ply_init(_parsedata)
yacc.parse(data, debug=debug)
else:
return None
#
if not noExceptionLogic:
_parse_info.noExceptionLogic = False
else:
for key in identifier_lineno:
if 'ASSERT_THROWS' in key:
_parse_info.noExceptionLogic = False
break
_parse_info.noExceptionLogic = True
#
return _parse_info
import sys
if __name__ == '__main__': #pragma: no cover
#
# This MAIN routine parses a sequence of files provided at the command
# line. If '-v' is included, then a verbose parsing output is
# generated.
#
for arg in sys.argv[1:]:
if arg == "-v":
continue
print "Parsing file '"+arg+"'"
if '-v' in sys.argv:
parse_cpp(filename=arg,debug=2,verbose=2)
else:
parse_cpp(filename=arg,verbose=2)
#
# Print the _parse_info object summary for this file.
# This illustrates how class inheritance can be used to
# deduce class members.
#
print str(_parse_info)
|
odoomrp/odoomrp-wip
|
refs/heads/8.0
|
machine_manager_preventive/tests/__init__.py
|
5
|
# -*- coding: utf-8 -*-
# Copyright 2015 Daniel Campos - Avanzosc S.L.
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import test_machine_manager_preventive
|
RAPD/RAPD
|
refs/heads/master
|
src/plugins/subcontractors/xdsme/pycgtypes/vec4.py
|
12
|
####################################################################
# vec4 - 4-dimensional vector
#
# Copyright (C) 2002, Matthias Baas ([email protected])
#
# You may distribute under the terms of the BSD license, as
# specified in the file license.txt.
####################################################################
import types, math
# vec4
class vec4:
"""Four-dimensional vector.
This class represents a 4D vector.
"""
def __init__(self, *args):
"""Constructor.
There are several possibilities how to initialize a vector:
v = vec4() -> v = <0,0,0,0>
v = vec4(a) -> v = <a,a,a,a>
v = vec4(x,y) -> v = <x,y,0,0>
v = vec4(x,y,z) -> v = <x,y,z,0>
v = vec4(x,y,z,w) -> v = <x,y,z,w>
Note that specifying just one value sets all four components to
that value.
Additionally you can wrap those values in a list or a tuple or
specify them as a string:
v = vec4([1,2,3]) -> v = <1,2,3,0>
v = vec4("4,5") -> v = <4,5,0,0>
"""
if len(args)==0:
self.x, self.y, self.z, self.w = (0.0, 0.0, 0.0, 0.0)
elif len(args)==1:
T = type(args[0])
# scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x, self.y, self.z, self.w = (args[0], args[0], args[0], args[0])
# vec4
elif isinstance(args[0], vec4):
self.x, self.y, self.z, self.w = args[0]
# Tuple/List
elif T==types.TupleType or T==types.ListType:
if len(args[0])==0:
self.x = self.y = self.z = self.w = 0.0
elif len(args[0])==1:
self.x = self.y = self.z = args[0][0]
self.w = 0.0
elif len(args[0])==2:
self.x, self.y = args[0]
self.z = 0.0
self.w = 0.0
elif len(args[0])==3:
self.x, self.y, self.z = args[0]
self.w = 0.0
elif len(args[0])==4:
self.x, self.y, self.z, self.w = args[0]
else:
raise TypeError, "vec4() takes at most 4 arguments"
# String
elif T==types.StringType:
s=args[0].replace(","," ").replace(" "," ").strip().split(" ")
if s==[""]:
s=[]
f=map(lambda x: float(x), s)
dummy = vec4(f)
self.x, self.y, self.z, self.w = dummy
# error
else:
raise TypeError,"vec4() arg can't be converted to vec4"
elif len(args)==2:
self.x, self.y = args
self.z, self.w = (0.0, 0.0)
elif len(args)==3:
self.x, self.y, self.z = args
self.w = 0.0
elif len(args)==4:
self.x, self.y, self.z, self.w = args
else:
raise TypeError, "vec4() takes at most 4 arguments"
def __repr__(self):
return 'vec4('+`self.x`+', '+`self.y`+', '+`self.z`+', '+`self.w`+')'
def __str__(self):
fmt="%1.4f"
return '('+fmt%self.x+', '+fmt%self.y+', '+fmt%self.z+', '+fmt%self.w+')'
def __eq__(self, other):
"""== operator
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> b=vec4(-0.3, 0.75, 0.5, 0.6)
>>> c=vec4(-0.3, 0.75, 0.5, 0.6)
>>> print a==b
0
>>> print b==c
1
>>> print a==None
0
"""
if isinstance(other, vec4):
return self.x==other.x and self.y==other.y and self.z==other.z
else:
return 0
def __ne__(self, other):
"""!= operator
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> b=vec4(-0.3, 0.75, 0.5, 0.6)
>>> c=vec4(-0.3, 0.75, 0.5, 0.6)
>>> print a!=b
1
>>> print b!=c
0
>>> print a!=None
1
"""
if isinstance(other, vec4):
return self.x!=other.x or self.y!=other.y or self.z!=other.z
else:
return 1
def __add__(self, other):
"""Vector addition.
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> b=vec4(-0.3, 0.75, 0.5, 0.3)
>>> print a+b
(0.7000, 1.2500, -1.3000, 0.5000)
"""
if isinstance(other, vec4):
return vec4(self.x+other.x, self.y+other.y, self.z+other.z, self.w+other.w)
else:
raise TypeError, "unsupported operand type for +"
def __sub__(self, other):
"""Vector subtraction.
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> b=vec4(-0.3, 0.75, 0.5, 0.3)
>>> print a-b
(1.3000, -0.2500, -2.3000, -0.1000)
"""
if isinstance(other, vec4):
return vec4(self.x-other.x, self.y-other.y, self.z-other.z, self.w-other.w)
else:
raise TypeError, "unsupported operand type for -"
def __mul__(self, other):
"""Multiplication with a scalar or dot product.
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> b=vec4(-0.3, 0.75, 0.5, 0.3)
>>> print a*2.0
(2.0000, 1.0000, -3.6000, 0.4000)
>>> print 2.0*a
(2.0000, 1.0000, -3.6000, 0.4000)
>>> print a*b
-0.765
"""
T = type(other)
# vec4*scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return vec4(self.x*other, self.y*other, self.z*other, self.w*other)
# vec4*vec4
if isinstance(other, vec4):
return self.x*other.x + self.y*other.y + self.z*other.z + self.w*other.w
# unsupported
else:
# Try to delegate the operation to the other operand
if getattr(other,"__rmul__",None)!=None:
return other.__rmul__(self)
else:
raise TypeError, "unsupported operand type for *"
__rmul__ = __mul__
def __div__(self, other):
"""Division by scalar
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> print a/2.0
(0.5000, 0.2500, -0.9000, 0.1000)
"""
T = type(other)
# vec4/scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return vec4(self.x/other, self.y/other, self.z/other, self.w/other)
# unsupported
else:
raise TypeError, "unsupported operand type for /"
def __mod__(self, other):
"""Modulo (component wise)
>>> a=vec4(3.0, 2.5, -1.8, 0.2)
>>> print a%2.0
(1.0000, 0.5000, 0.2000, 0.2000)
"""
T = type(other)
# vec4%scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return vec4(self.x%other, self.y%other, self.z%other, self.w%other)
# unsupported
else:
raise TypeError, "unsupported operand type for %"
def __iadd__(self, other):
"""Inline vector addition.
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> b=vec4(-0.3, 0.75, 0.5, 0.3)
>>> a+=b
>>> print a
(0.7000, 1.2500, -1.3000, 0.5000)
"""
if isinstance(other, vec4):
self.x+=other.x
self.y+=other.y
self.z+=other.z
self.w+=other.w
return self
else:
raise TypeError, "unsupported operand type for +="
def __isub__(self, other):
"""Inline vector subtraction.
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> b=vec4(-0.3, 0.75, 0.5, 0.3)
>>> a-=b
>>> print a
(1.3000, -0.2500, -2.3000, -0.1000)
"""
if isinstance(other, vec4):
self.x-=other.x
self.y-=other.y
self.z-=other.z
self.w-=other.w
return self
else:
raise TypeError, "unsupported operand type for -="
def __imul__(self, other):
"""Inline multiplication (only with scalar)
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> a*=2.0
>>> print a
(2.0000, 1.0000, -3.6000, 0.4000)
"""
T = type(other)
# vec4*=scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x*=other
self.y*=other
self.z*=other
self.w*=other
return self
else:
raise TypeError, "unsupported operand type for *="
def __idiv__(self, other):
"""Inline division with scalar
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> a/=2.0
>>> print a
(0.5000, 0.2500, -0.9000, 0.1000)
"""
T = type(other)
# vec4/=scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x/=other
self.y/=other
self.z/=other
self.w/=other
return self
else:
raise TypeError, "unsupported operand type for /="
def __imod__(self, other):
"""Inline modulo
>>> a=vec4(3.0, 2.5, -1.8, 0.2)
>>> a%=2.0
>>> print a
(1.0000, 0.5000, 0.2000, 0.2000)
"""
T = type(other)
# vec4%=scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x%=other
self.y%=other
self.z%=other
self.w%=other
return self
else:
raise TypeError, "unsupported operand type for %="
def __neg__(self):
"""Negation
>>> a=vec4(3.0, 2.5, -1.8, 0.2)
>>> print -a
(-3.0000, -2.5000, 1.8000, -0.2000)
"""
return vec4(-self.x, -self.y, -self.z, -self.w)
def __pos__(self):
"""
>>> a=vec4(3.0, 2.5, -1.8, 0.2)
>>> print +a
(3.0000, 2.5000, -1.8000, 0.2000)
"""
return vec4(+self.x, +self.y, +self.z, +self.w)
def __abs__(self):
"""Return the length of the vector.
abs(v) is equivalent to v.length().
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> print abs(a)
2.12837966538
"""
return math.sqrt(self*self)
def __len__(self):
"""Length of the sequence (always 4)"""
return 4
def __getitem__(self, key):
"""Return a component by index (0-based)
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> print a[0]
1.0
>>> print a[1]
0.5
>>> print a[2]
-1.8
>>> print a[3]
0.2
"""
T=type(key)
if T!=types.IntType and T!=types.LongType:
raise TypeError, "index must be integer"
if key==0: return self.x
elif key==1: return self.y
elif key==2: return self.z
elif key==3: return self.w
else:
raise IndexError,"index out of range"
def __setitem__(self, key, value):
"""Set a component by index (0-based)
>>> a=vec4()
>>> a[0]=1.5; a[1]=0.7; a[2]=-0.3; a[3]=0.2
>>> print a
(1.5000, 0.7000, -0.3000, 0.2000)
"""
T=type(key)
if T!=types.IntType and T!=types.LongType:
raise TypeError, "index must be integer"
if key==0: self.x = value
elif key==1: self.y = value
elif key==2: self.z = value
elif key==3: self.w = value
else:
raise IndexError,"index out of range"
def length(self):
"""Return the length of the vector.
v.length() is equivalent to abs(v).
>>> a=vec4(1.0, 0.5, -1.8, 0.2)
>>> print a.length()
2.12837966538
"""
return math.sqrt(self*self)
def normalize(self):
"""Return normalized vector.
>>> a=vec4(1.0, 0.5, -1.8, 1.2)
>>> print a.normalize()
(0.4107, 0.2053, -0.7392, 0.4928)
"""
nlen = 1.0/math.sqrt(self*self)
return vec4(self.x*nlen, self.y*nlen, self.z*nlen, self.w*nlen)
######################################################################
def _test():
import doctest, vec4
failed, total = doctest.testmod(vec4)
print "%d/%d failed" % (failed, total)
if __name__=="__main__":
_test()
|
stefanv/scipy3
|
refs/heads/master
|
scipy/weave/tests/test_c_spec.py
|
2
|
import os
import sys
# Note: test_dir is global to this file.
# It is made by setup_location()
#globals
global test_dir
test_dir = ''
from numpy.testing import *
from scipy.weave import inline_tools,ext_tools,c_spec
from scipy.weave.build_tools import msvc_exists, gcc_exists
from scipy.weave.catalog import unique_file
def unique_mod(d,file_name):
f = os.path.basename(unique_file(d,file_name))
m = os.path.splitext(f)[0]
return m
def remove_whitespace(in_str):
out = in_str.replace(" ","")
out = out.replace("\t","")
out = out.replace("\n","")
return out
#----------------------------------------------------------------------------
# Scalar conversion test classes
# int, float, complex
#----------------------------------------------------------------------------
# compilers = []
# for c in ('gcc','msvc'):
# mod_name = 'empty' + c
# mod_name = unique_mod(test_dir,mod_name)
# mod = ext_tools.ext_module(mod_name)
# # a = 1
# # code = "a=2;"
# # test = ext_tools.ext_function('test',code,['a'])
# # mod.add_function(test)
# try:
# mod.compile(location = test_dir, compiler = c)
# except CompileError:
# print "Probably don't have Compiler: %s"%c
# else:
# compilers.append(c)
class IntConverter(TestCase):
compiler = ''
@dec.slow
def test_type_match_string(self):
s = c_spec.int_converter()
assert( not s.type_match('string') )
@dec.slow
def test_type_match_int(self):
s = c_spec.int_converter()
assert(s.type_match(5))
@dec.slow
def test_type_match_float(self):
s = c_spec.int_converter()
assert(not s.type_match(5.))
@dec.slow
def test_type_match_complex(self):
s = c_spec.int_converter()
assert(not s.type_match(5.+1j))
@dec.slow
def test_var_in(self):
mod_name = 'int_var_in' + self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = 1
code = "a=2;"
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b=1
test(b)
try:
b = 1.
test(b)
except TypeError:
pass
try:
b = 'abc'
test(b)
except TypeError:
pass
@dec.slow
def test_int_return(self):
mod_name = sys._getframe().f_code.co_name + self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = 1
code = """
a=a+2;
return_val = PyInt_FromLong(a);
"""
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b=1
c = test(b)
assert( c == 3)
class FloatConverter(TestCase):
compiler = ''
@dec.slow
def test_type_match_string(self):
s = c_spec.float_converter()
assert( not s.type_match('string'))
@dec.slow
def test_type_match_int(self):
s = c_spec.float_converter()
assert(not s.type_match(5))
@dec.slow
def test_type_match_float(self):
s = c_spec.float_converter()
assert(s.type_match(5.))
@dec.slow
def test_type_match_complex(self):
s = c_spec.float_converter()
assert(not s.type_match(5.+1j))
@dec.slow
def test_float_var_in(self):
mod_name = sys._getframe().f_code.co_name + self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = 1.
code = "a=2.;"
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b=1.
test(b)
try:
b = 1.
test(b)
except TypeError:
pass
try:
b = 'abc'
test(b)
except TypeError:
pass
@dec.slow
def test_float_return(self):
mod_name = sys._getframe().f_code.co_name + self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = 1.
code = """
a=a+2.;
return_val = PyFloat_FromDouble(a);
"""
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b=1.
c = test(b)
assert( c == 3.)
class ComplexConverter(TestCase):
compiler = ''
@dec.slow
def test_type_match_string(self):
s = c_spec.complex_converter()
assert( not s.type_match('string') )
@dec.slow
def test_type_match_int(self):
s = c_spec.complex_converter()
assert(not s.type_match(5))
@dec.slow
def test_type_match_float(self):
s = c_spec.complex_converter()
assert(not s.type_match(5.))
@dec.slow
def test_type_match_complex(self):
s = c_spec.complex_converter()
assert(s.type_match(5.+1j))
@dec.slow
def test_complex_var_in(self):
mod_name = sys._getframe().f_code.co_name + self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = 1.+1j
code = "a=std::complex<double>(2.,2.);"
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b=1.+1j
test(b)
try:
b = 1.
test(b)
except TypeError:
pass
try:
b = 'abc'
test(b)
except TypeError:
pass
@dec.slow
def test_complex_return(self):
mod_name = sys._getframe().f_code.co_name + self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = 1.+1j
code = """
a= a + std::complex<double>(2.,2.);
return_val = PyComplex_FromDoubles(a.real(),a.imag());
"""
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b=1.+1j
c = test(b)
assert( c == 3.+3j)
#----------------------------------------------------------------------------
# File conversion tests
#----------------------------------------------------------------------------
class FileConverter(TestCase):
compiler = ''
@dec.slow
def test_py_to_file(self):
import tempfile
file_name = tempfile.mktemp()
file = open(file_name,'w')
code = """
fprintf(file,"hello bob");
"""
inline_tools.inline(code,['file'],compiler=self.compiler,force=1)
file.close()
file = open(file_name,'r')
assert(file.read() == "hello bob")
@dec.slow
def test_file_to_py(self):
import tempfile
file_name = tempfile.mktemp()
# not sure I like Py::String as default -- might move to std::sting
# or just plain char*
code = """
const char* _file_name = file_name.c_str();
FILE* file = fopen(_file_name, "w");
return_val = file_to_py(file, _file_name, "w");
"""
file = inline_tools.inline(code,['file_name'], compiler=self.compiler,
force=1)
file.write("hello fred")
file.close()
file = open(file_name,'r')
assert(file.read() == "hello fred")
#----------------------------------------------------------------------------
# Instance conversion tests
#----------------------------------------------------------------------------
class InstanceConverter(TestCase):
pass
#----------------------------------------------------------------------------
# Callable object conversion tests
#----------------------------------------------------------------------------
class CallableConverter(TestCase):
compiler=''
@dec.slow
def test_call_function(self):
import string
func = string.find
search_str = "hello world hello"
sub_str = "world"
# * Not sure about ref counts on search_str and sub_str.
# * Is the Py::String necessary? (it works anyways...)
code = """
py::tuple args(2);
args[0] = search_str;
args[1] = sub_str;
return_val = func.call(args);
"""
actual = inline_tools.inline(code,['func','search_str','sub_str'],
compiler=self.compiler,force=1)
desired = func(search_str,sub_str)
assert(desired == actual)
class SequenceConverter(TestCase):
compiler = ''
@dec.slow
def test_convert_to_dict(self):
d = {}
inline_tools.inline("",['d'],compiler=self.compiler,force=1)
@dec.slow
def test_convert_to_list(self):
l = []
inline_tools.inline("",['l'],compiler=self.compiler,force=1)
@dec.slow
def test_convert_to_string(self):
s = 'hello'
inline_tools.inline("",['s'],compiler=self.compiler,force=1)
@dec.slow
def test_convert_to_tuple(self):
t = ()
inline_tools.inline("",['t'],compiler=self.compiler,force=1)
class StringConverter(TestCase):
compiler = ''
@dec.slow
def test_type_match_string(self):
s = c_spec.string_converter()
assert( s.type_match('string') )
@dec.slow
def test_type_match_int(self):
s = c_spec.string_converter()
assert(not s.type_match(5))
@dec.slow
def test_type_match_float(self):
s = c_spec.string_converter()
assert(not s.type_match(5.))
@dec.slow
def test_type_match_complex(self):
s = c_spec.string_converter()
assert(not s.type_match(5.+1j))
@dec.slow
def test_var_in(self):
mod_name = 'string_var_in'+self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = 'string'
code = 'a=std::string("hello");'
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b='bub'
test(b)
try:
b = 1.
test(b)
except TypeError:
pass
try:
b = 1
test(b)
except TypeError:
pass
@dec.slow
def test_return(self):
mod_name = 'string_return'+self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = 'string'
code = """
a= std::string("hello");
return_val = PyString_FromString(a.c_str());
"""
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b='bub'
c = test(b)
assert( c == 'hello')
class ListConverter(TestCase):
compiler = ''
@dec.slow
def test_type_match_bad(self):
s = c_spec.list_converter()
objs = [{},(),'',1,1.,1+1j]
for i in objs:
assert( not s.type_match(i) )
@dec.slow
def test_type_match_good(self):
s = c_spec.list_converter()
assert(s.type_match([]))
@dec.slow
def test_var_in(self):
mod_name = 'list_var_in'+self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = [1]
code = 'a=py::list();'
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b=[1,2]
test(b)
try:
b = 1.
test(b)
except TypeError:
pass
try:
b = 'string'
test(b)
except TypeError:
pass
@dec.slow
def test_return(self):
mod_name = 'list_return'+self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = [1]
code = """
a=py::list();
a.append("hello");
return_val = a;
"""
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b=[1,2]
c = test(b)
assert( c == ['hello'])
@dec.slow
def test_speed(self):
mod_name = 'list_speed'+self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = range(1000000);
code = """
int v, sum = 0;
for(int i = 0; i < a.len(); i++)
{
v = a[i];
if (v % 2)
sum += v;
else
sum -= v;
}
return_val = sum;
"""
with_cxx = ext_tools.ext_function('with_cxx',code,['a'])
mod.add_function(with_cxx)
code = """
int vv, sum = 0;
PyObject *v;
for(int i = 0; i < a.len(); i++)
{
v = PyList_GetItem(py_a,i);
//didn't set error here -- just speed test
vv = py_to_int(v,"list item");
if (vv % 2)
sum += vv;
else
sum -= vv;
}
return_val = sum;
"""
no_checking = ext_tools.ext_function('no_checking',code,['a'])
mod.add_function(no_checking)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import with_cxx, no_checking'
import time
t1 = time.time()
sum1 = with_cxx(a)
t2 = time.time()
print 'speed test for list access'
print 'compiler:', self.compiler
print 'scxx:', t2 - t1
t1 = time.time()
sum2 = no_checking(a)
t2 = time.time()
print 'C, no checking:', t2 - t1
sum3 = 0
t1 = time.time()
for i in a:
if i % 2:
sum3 += i
else:
sum3 -= i
t2 = time.time()
print 'python:', t2 - t1
assert( sum1 == sum2 and sum1 == sum3)
class TupleConverter(TestCase):
compiler = ''
@dec.slow
def test_type_match_bad(self):
s = c_spec.tuple_converter()
objs = [{},[],'',1,1.,1+1j]
for i in objs:
assert( not s.type_match(i) )
@dec.slow
def test_type_match_good(self):
s = c_spec.tuple_converter()
assert(s.type_match((1,)))
@dec.slow
def test_var_in(self):
mod_name = 'tuple_var_in'+self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = (1,)
code = 'a=py::tuple();'
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b=(1,2)
test(b)
try:
b = 1.
test(b)
except TypeError:
pass
try:
b = 'string'
test(b)
except TypeError:
pass
@dec.slow
def test_return(self):
mod_name = 'tuple_return'+self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = (1,)
code = """
a=py::tuple(2);
a[0] = "hello";
a.set_item(1,py::None);
return_val = a;
"""
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b=(1,2)
c = test(b)
assert( c == ('hello',None))
class DictConverter(TestCase):
""" Base Class for dictionary conversion tests.
"""
# Default string specifying the compiler to use. While this is set
# in all sub-classes, this base test class is found by the test
# infrastructure and run. Therefore, we give it a default value
# so that it can run on its own.
compiler=''
@dec.slow
def test_type_match_bad(self):
s = c_spec.dict_converter()
objs = [[],(),'',1,1.,1+1j]
for i in objs:
assert( not s.type_match(i) )
@dec.slow
def test_type_match_good(self):
s = c_spec.dict_converter()
assert(s.type_match({}))
@dec.slow
def test_var_in(self):
mod_name = 'dict_var_in'+self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = {'z':1}
code = 'a=py::dict();' # This just checks to make sure the type is correct
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b={'y':2}
test(b)
try:
b = 1.
test(b)
except TypeError:
pass
try:
b = 'string'
test(b)
except TypeError:
pass
@dec.slow
def test_return(self):
mod_name = 'dict_return'+self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = {'z':1}
code = """
a=py::dict();
a["hello"] = 5;
return_val = a;
"""
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location = test_dir, compiler = self.compiler)
exec 'from ' + mod_name + ' import test'
b = {'z':2}
c = test(b)
assert( c['hello'] == 5)
# for compiler in compilers:
# for name,klass in globals().iteritems():
# if name[:4]=="Test" and name[-9:] == "Converter":
# exec("class %s%s(%s):\n compiler = '%s'"%(name,compiler,name,compiler))
# for converter in
for _n in dir():
if _n[-9:]=='Converter':
if msvc_exists():
exec "class Test%sMsvc(%s):\n compiler = 'msvc'"%(_n,_n)
else:
exec "class Test%sUnix(%s):\n compiler = ''"%(_n,_n)
if gcc_exists():
exec "class Test%sGcc(%s):\n compiler = 'gcc'"%(_n,_n)
# class TestMsvcIntConverter(TestIntConverter):
# compiler = 'msvc'
# class TestUnixIntConverter(TestIntConverter):
# compiler = ''
# class TestGccIntConverter(TestIntConverter):
# compiler = 'gcc'
#
# class TestMsvcFloatConverter(TestFloatConverter):
# compiler = 'msvc'
#
# class TestMsvcFloatConverter(TestFloatConverter):
# compiler = 'msvc'
# class TestUnixFloatConverter(TestFloatConverter):
# compiler = ''
# class TestGccFloatConverter(TestFloatConverter):
# compiler = 'gcc'
#
# class TestMsvcComplexConverter(TestComplexConverter):
# compiler = 'msvc'
# class TestUnixComplexConverter(TestComplexConverter):
# compiler = ''
# class TestGccComplexConverter(TestComplexConverter):
# compiler = 'gcc'
#
# class TestMsvcFileConverter(TestFileConverter):
# compiler = 'msvc'
# class TestUnixFileConverter(TestFileConverter):
# compiler = ''
# class TestGccFileConverter(TestFileConverter):
# compiler = 'gcc'
#
# class TestMsvcCallableConverter(TestCallableConverter):
# compiler = 'msvc'
# class TestUnixCallableConverter(TestCallableConverter):
# compiler = ''
# class TestGccCallableConverter(TestCallableConverter):
# compiler = 'gcc'
#
# class TestMsvcSequenceConverter(TestSequenceConverter):
# compiler = 'msvc'
# class TestUnixSequenceConverter(TestSequenceConverter):
# compiler = ''
# class TestGccSequenceConverter(TestSequenceConverter):
# compiler = 'gcc'
#
# class TestMsvcStringConverter(TestStringConverter):
# compiler = 'msvc'
# class TestUnixStringConverter(TestStringConverter):
# compiler = ''
# class TestGccStringConverter(TestStringConverter):
# compiler = 'gcc'
#
# class TestMsvcListConverter(TestListConverter):
# compiler = 'msvc'
# class TestUnixListConverter(TestListConverter):
# compiler = ''
# class TestGccListConverter(TestListConverter):
# compiler = 'gcc'
#
# class TestMsvcTupleConverter(TestTupleConverter):
# compiler = 'msvc'
# class TestUnixTupleConverter(TestTupleConverter):
# compiler = ''
# class TestGccTupleConverter(TestTupleConverter):
# compiler = 'gcc'
#
# class TestMsvcDictConverter(TestDictConverter):
# compiler = 'msvc'
# class TestUnixDictConverter(TestDictConverter):
# compiler = ''
# class TestGccDictConverter(TestDictConverter):
# compiler = 'gcc'
#
# class TestMsvcInstanceConverter(TestInstanceConverter):
# compiler = 'msvc'
# class TestUnixInstanceConverter(TestInstanceConverter):
# compiler = ''
# class TestGccInstanceConverter(TestInstanceConverter):
# compiler = 'gcc'
def setup_location():
import tempfile
#test_dir = os.path.join(tempfile.gettempdir(),'test_files')
test_dir = tempfile.mktemp()
if not os.path.exists(test_dir):
os.mkdir(test_dir)
sys.path.insert(0,test_dir)
return test_dir
test_dir = setup_location()
def teardown_location():
import tempfile
test_dir = os.path.join(tempfile.gettempdir(),'test_files')
if sys.path[0] == test_dir:
sys.path = sys.path[1:]
return test_dir
def remove_file(name):
test_dir = os.path.abspath(name)
# if not msvc_exists():
# for _n in dir():
# if _n[:8]=='TestMsvc': exec 'del '+_n
# else:
# for _n in dir():
# if _n[:8]=='TestUnix': exec 'del '+_n
#
# if not (gcc_exists() and msvc_exists() and sys.platform == 'win32'):
# for _n in dir():
# if _n[:7]=='TestGcc': exec 'del '+_n
#
if __name__ == "__main__":
import nose
nose.run(argv=['', __file__])
|
sasukeh/cinder
|
refs/heads/master
|
cinder/tests/unit/volume/drivers/test_hgst.py
|
7
|
# Copyright (c) 2015 HGST Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_concurrency import processutils
from cinder import context
from cinder import exception
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers.hgst import HGSTDriver
from cinder.volume import volume_types
class HGSTTestCase(test.TestCase):
# Need to mock these since we use them on driver creation
@mock.patch('pwd.getpwnam', return_value=1)
@mock.patch('grp.getgrnam', return_value=1)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def setUp(self, mock_ghn, mock_grnam, mock_pwnam):
"""Set up UUT and all the flags required for later fake_executes."""
super(HGSTTestCase, self).setUp()
self.stubs.Set(processutils, 'execute', self._fake_execute)
self._fail_vgc_cluster = False
self._fail_ip = False
self._fail_network_list = False
self._fail_domain_list = False
self._empty_domain_list = False
self._fail_host_storage = False
self._fail_space_list = False
self._fail_space_delete = False
self._fail_set_apphosts = False
self._fail_extend = False
self._request_cancel = False
self._return_blocked = 0
self.configuration = mock.Mock(spec=conf.Configuration)
self.configuration.safe_get = self._fake_safe_get
self._reset_configuration()
self.driver = HGSTDriver(configuration=self.configuration,
execute=self._fake_execute)
def _fake_safe_get(self, value):
"""Don't throw exception on missing parameters, return None."""
try:
val = getattr(self.configuration, value)
except AttributeError:
val = None
return val
def _reset_configuration(self):
"""Set safe and sane values for config params."""
self.configuration.num_volume_device_scan_tries = 1
self.configuration.volume_dd_blocksize = '1M'
self.configuration.volume_backend_name = 'hgst-1'
self.configuration.hgst_storage_servers = 'stor1:gbd0,stor2:gbd0'
self.configuration.hgst_net = 'net1'
self.configuration.hgst_redundancy = '0'
self.configuration.hgst_space_user = 'kane'
self.configuration.hgst_space_group = 'xanadu'
self.configuration.hgst_space_mode = '0777'
def _parse_space_create(self, *cmd):
"""Eats a vgc-cluster space-create command line to a dict."""
self.created = {'storageserver': ''}
cmd = list(*cmd)
while cmd:
param = cmd.pop(0)
if param == "-n":
self.created['name'] = cmd.pop(0)
elif param == "-N":
self.created['net'] = cmd.pop(0)
elif param == "-s":
self.created['size'] = cmd.pop(0)
elif param == "--redundancy":
self.created['redundancy'] = cmd.pop(0)
elif param == "--user":
self.created['user'] = cmd.pop(0)
elif param == "--user":
self.created['user'] = cmd.pop(0)
elif param == "--group":
self.created['group'] = cmd.pop(0)
elif param == "--mode":
self.created['mode'] = cmd.pop(0)
elif param == "-S":
self.created['storageserver'] += cmd.pop(0) + ","
else:
pass
def _parse_space_extend(self, *cmd):
"""Eats a vgc-cluster space-extend commandline to a dict."""
self.extended = {'storageserver': ''}
cmd = list(*cmd)
while cmd:
param = cmd.pop(0)
if param == "-n":
self.extended['name'] = cmd.pop(0)
elif param == "-s":
self.extended['size'] = cmd.pop(0)
elif param == "-S":
self.extended['storageserver'] += cmd.pop(0) + ","
else:
pass
if self._fail_extend:
raise processutils.ProcessExecutionError(exit_code=1)
else:
return '', ''
def _parse_space_delete(self, *cmd):
"""Eats a vgc-cluster space-delete commandline to a dict."""
self.deleted = {}
cmd = list(*cmd)
while cmd:
param = cmd.pop(0)
if param == "-n":
self.deleted['name'] = cmd.pop(0)
else:
pass
if self._fail_space_delete:
raise processutils.ProcessExecutionError(exit_code=1)
else:
return '', ''
def _parse_space_list(self, *cmd):
"""Eats a vgc-cluster space-list commandline to a dict."""
json = False
nameOnly = False
cmd = list(*cmd)
while cmd:
param = cmd.pop(0)
if param == "--json":
json = True
elif param == "--name-only":
nameOnly = True
elif param == "-n":
pass # Don't use the name here...
else:
pass
if self._fail_space_list:
raise processutils.ProcessExecutionError(exit_code=1)
elif nameOnly:
return "space1\nspace2\nvolume1\n", ''
elif json:
return HGST_SPACE_JSON, ''
else:
return '', ''
def _parse_network_list(self, *cmd):
"""Eat a network-list command and return error or results."""
if self._fail_network_list:
raise processutils.ProcessExecutionError(exit_code=1)
else:
return NETWORK_LIST, ''
def _parse_domain_list(self, *cmd):
"""Eat a domain-list command and return error, empty, or results."""
if self._fail_domain_list:
raise processutils.ProcessExecutionError(exit_code=1)
elif self._empty_domain_list:
return '', ''
else:
return "thisserver\nthatserver\nanotherserver\n", ''
def _fake_execute(self, *cmd, **kwargs):
"""Sudo hook to catch commands to allow running on all hosts."""
cmdlist = list(cmd)
exe = cmdlist.pop(0)
if exe == 'vgc-cluster':
exe = cmdlist.pop(0)
if exe == "request-cancel":
self._request_cancel = True
if self._return_blocked > 0:
return 'Request cancelled', ''
else:
raise processutils.ProcessExecutionError(exit_code=1)
elif self._fail_vgc_cluster:
raise processutils.ProcessExecutionError(exit_code=1)
elif exe == "--version":
return "HGST Solutions V2.5.0.0.x.x.x.x.x", ''
elif exe == "space-list":
return self._parse_space_list(cmdlist)
elif exe == "space-create":
self._parse_space_create(cmdlist)
if self._return_blocked > 0:
self._return_blocked = self._return_blocked - 1
out = "VGC_CREATE_000002\nBLOCKED\n"
raise processutils.ProcessExecutionError(stdout=out,
exit_code=1)
return '', ''
elif exe == "space-delete":
return self._parse_space_delete(cmdlist)
elif exe == "space-extend":
return self._parse_space_extend(cmdlist)
elif exe == "host-storage":
if self._fail_host_storage:
raise processutils.ProcessExecutionError(exit_code=1)
return HGST_HOST_STORAGE, ''
elif exe == "domain-list":
return self._parse_domain_list()
elif exe == "network-list":
return self._parse_network_list()
elif exe == "space-set-apphosts":
if self._fail_set_apphosts:
raise processutils.ProcessExecutionError(exit_code=1)
return '', ''
else:
raise NotImplementedError
elif exe == 'ip':
if self._fail_ip:
raise processutils.ProcessExecutionError(exit_code=1)
else:
return IP_OUTPUT, ''
elif exe == 'dd':
self.dd_count = -1
for p in cmdlist:
if 'count=' in p:
self.dd_count = int(p[6:])
return DD_OUTPUT, ''
else:
return '', ''
@mock.patch('pwd.getpwnam', return_value=1)
@mock.patch('grp.getgrnam', return_value=1)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_vgc_cluster_not_present(self, mock_ghn, mock_grnam, mock_pwnam):
"""Test exception when vgc-cluster returns an error."""
# Should pass
self._fail_vgc_cluster = False
self.driver.check_for_setup_error()
# Should throw exception
self._fail_vgc_cluster = True
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
@mock.patch('pwd.getpwnam', return_value=1)
@mock.patch('grp.getgrnam', return_value=1)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_parameter_redundancy_invalid(self, mock_ghn, mock_grnam,
mock_pwnam):
"""Test when hgst_redundancy config parameter not 0 or 1."""
# Should pass
self.driver.check_for_setup_error()
# Should throw exceptions
self.configuration.hgst_redundancy = ''
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
self.configuration.hgst_redundancy = 'Fred'
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
@mock.patch('pwd.getpwnam', return_value=1)
@mock.patch('grp.getgrnam', return_value=1)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_parameter_user_invalid(self, mock_ghn, mock_grnam, mock_pwnam):
"""Test exception when hgst_space_user doesn't map to UNIX user."""
# Should pass
self.driver.check_for_setup_error()
# Should throw exceptions
mock_pwnam.side_effect = KeyError()
self.configuration.hgst_space_user = ''
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
self.configuration.hgst_space_user = 'Fred!`'
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
@mock.patch('pwd.getpwnam', return_value=1)
@mock.patch('grp.getgrnam', return_value=1)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_parameter_group_invalid(self, mock_ghn, mock_grnam, mock_pwnam):
"""Test exception when hgst_space_group doesn't map to UNIX group."""
# Should pass
self.driver.check_for_setup_error()
# Should throw exceptions
mock_grnam.side_effect = KeyError()
self.configuration.hgst_space_group = ''
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
self.configuration.hgst_space_group = 'Fred!`'
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
@mock.patch('pwd.getpwnam', return_value=1)
@mock.patch('grp.getgrnam', return_value=1)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_parameter_mode_invalid(self, mock_ghn, mock_grnam, mock_pwnam):
"""Test exception when mode for created spaces isn't proper format."""
# Should pass
self.driver.check_for_setup_error()
# Should throw exceptions
self.configuration.hgst_space_mode = ''
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
self.configuration.hgst_space_mode = 'Fred'
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
@mock.patch('pwd.getpwnam', return_value=1)
@mock.patch('grp.getgrnam', return_value=1)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_parameter_net_invalid(self, mock_ghn, mock_grnam, mock_pwnam):
"""Test exception when hgst_net not in the domain."""
# Should pass
self.driver.check_for_setup_error()
# Should throw exceptions
self._fail_network_list = True
self.configuration.hgst_net = 'Fred'
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
self._fail_network_list = False
@mock.patch('pwd.getpwnam', return_value=1)
@mock.patch('grp.getgrnam', return_value=1)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_ip_addr_fails(self, mock_ghn, mock_grnam, mock_pwnam):
"""Test exception when IP ADDR command fails."""
# Should pass
self.driver.check_for_setup_error()
# Throw exception, need to clear internal cached host in driver
self._fail_ip = True
self.driver._vgc_host = None
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
@mock.patch('pwd.getpwnam', return_value=1)
@mock.patch('grp.getgrnam', return_value=1)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_domain_list_fails(self, mock_ghn, mock_grnam, mock_pwnam):
"""Test exception when domain-list fails for the domain."""
# Should pass
self.driver.check_for_setup_error()
# Throw exception, need to clear internal cached host in driver
self._fail_domain_list = True
self.driver._vgc_host = None
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
@mock.patch('pwd.getpwnam', return_value=1)
@mock.patch('grp.getgrnam', return_value=1)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_not_in_domain(self, mock_ghn, mock_grnam, mock_pwnam):
"""Test exception when Cinder host not domain member."""
# Should pass
self.driver.check_for_setup_error()
# Throw exception, need to clear internal cached host in driver
self._empty_domain_list = True
self.driver._vgc_host = None
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
@mock.patch('pwd.getpwnam', return_value=1)
@mock.patch('grp.getgrnam', return_value=1)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_parameter_storageservers_invalid(self, mock_ghn, mock_grnam,
mock_pwnam):
"""Test exception when the storage servers are invalid/missing."""
# Should pass
self.driver.check_for_setup_error()
# Storage_hosts missing
self.configuration.hgst_storage_servers = ''
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
# missing a : between host and devnode
self.configuration.hgst_storage_servers = 'stor1,stor2'
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
# missing a : between host and devnode
self.configuration.hgst_storage_servers = 'stor1:gbd0,stor2'
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
# Host not in cluster
self.configuration.hgst_storage_servers = 'stor1:gbd0'
self._fail_host_storage = True
self.assertRaises(exception.VolumeDriverException,
self.driver.check_for_setup_error)
def test_update_volume_stats(self):
"""Get cluster space available, should pass."""
actual = self.driver.get_volume_stats(True)
self.assertEqual('HGST', actual['vendor_name'])
self.assertEqual('hgst', actual['storage_protocol'])
self.assertEqual(90, actual['total_capacity_gb'])
self.assertEqual(87, actual['free_capacity_gb'])
self.assertEqual(0, actual['reserved_percentage'])
def test_update_volume_stats_redundancy(self):
"""Get cluster space available, half-sized - 1 for mirrors."""
self.configuration.hgst_redundancy = '1'
actual = self.driver.get_volume_stats(True)
self.assertEqual('HGST', actual['vendor_name'])
self.assertEqual('hgst', actual['storage_protocol'])
self.assertEqual(44, actual['total_capacity_gb'])
self.assertEqual(43, actual['free_capacity_gb'])
self.assertEqual(0, actual['reserved_percentage'])
def test_update_volume_stats_cached(self):
"""Get cached cluster space, should not call executable."""
self._fail_host_storage = True
actual = self.driver.get_volume_stats(False)
self.assertEqual('HGST', actual['vendor_name'])
self.assertEqual('hgst', actual['storage_protocol'])
self.assertEqual(90, actual['total_capacity_gb'])
self.assertEqual(87, actual['free_capacity_gb'])
self.assertEqual(0, actual['reserved_percentage'])
def test_update_volume_stats_error(self):
"""Test that when host-storage gives an error, return unknown."""
self._fail_host_storage = True
actual = self.driver.get_volume_stats(True)
self.assertEqual('HGST', actual['vendor_name'])
self.assertEqual('hgst', actual['storage_protocol'])
self.assertEqual('unknown', actual['total_capacity_gb'])
self.assertEqual('unknown', actual['free_capacity_gb'])
self.assertEqual(0, actual['reserved_percentage'])
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_create_volume(self, mock_ghn):
"""Test volume creation, ensure appropriate size expansion/name."""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
volume = {'id': '1', 'name': 'volume1',
'display_name': '',
'volume_type_id': type_ref['id'],
'size': 10}
ret = self.driver.create_volume(volume)
expected = {'redundancy': '0', 'group': 'xanadu',
'name': 'volume10', 'mode': '0777',
'user': 'kane', 'net': 'net1',
'storageserver': 'stor1:gbd0,stor2:gbd0,',
'size': '12'}
self.assertDictMatch(expected, self.created)
# Check the returned provider, note the the provider_id is hashed
expected_pid = {'provider_id': 'volume10'}
self.assertDictMatch(expected_pid, ret)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_create_volume_name_creation_fail(self, mock_ghn):
"""Test volume creation exception when can't make a hashed name."""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
volume = {'id': '1', 'name': 'volume1',
'display_name': '',
'volume_type_id': type_ref['id'],
'size': 10}
self._fail_space_list = True
self.assertRaises(exception.VolumeDriverException,
self.driver.create_volume, volume)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_create_snapshot(self, mock_ghn):
"""Test creating a snapshot, ensure full data of original copied."""
# Now snapshot the volume and check commands
snapshot = {'volume_name': 'volume10',
'volume_id': 'xxx', 'display_name': 'snap10',
'name': '123abc', 'volume_size': 10, 'id': '123abc',
'volume': {'provider_id': 'space10'}}
ret = self.driver.create_snapshot(snapshot)
# We must copy entier underlying storage, ~12GB, not just 10GB
self.assertEqual(11444, self.dd_count)
# Check space-create command
expected = {'redundancy': '0', 'group': 'xanadu',
'name': snapshot['display_name'], 'mode': '0777',
'user': 'kane', 'net': 'net1',
'storageserver': 'stor1:gbd0,stor2:gbd0,',
'size': '12'}
self.assertDictMatch(expected, self.created)
# Check the returned provider
expected_pid = {'provider_id': 'snap10'}
self.assertDictMatch(expected_pid, ret)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_create_cloned_volume(self, mock_ghn):
"""Test creating a clone, ensure full size is copied from original."""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
orig = {'id': '1', 'name': 'volume1', 'display_name': '',
'volume_type_id': type_ref['id'], 'size': 10,
'provider_id': 'space_orig'}
clone = {'id': '2', 'name': 'clone1', 'display_name': '',
'volume_type_id': type_ref['id'], 'size': 10}
pid = self.driver.create_cloned_volume(clone, orig)
# We must copy entier underlying storage, ~12GB, not just 10GB
self.assertEqual(11444, self.dd_count)
# Check space-create command
expected = {'redundancy': '0', 'group': 'xanadu',
'name': 'clone1', 'mode': '0777',
'user': 'kane', 'net': 'net1',
'storageserver': 'stor1:gbd0,stor2:gbd0,',
'size': '12'}
self.assertDictMatch(expected, self.created)
# Check the returned provider
expected_pid = {'provider_id': 'clone1'}
self.assertDictMatch(expected_pid, pid)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_add_cinder_apphosts_fails(self, mock_ghn):
"""Test exception when set-apphost can't connect volume to host."""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
orig = {'id': '1', 'name': 'volume1', 'display_name': '',
'volume_type_id': type_ref['id'], 'size': 10,
'provider_id': 'space_orig'}
clone = {'id': '2', 'name': 'clone1', 'display_name': '',
'volume_type_id': type_ref['id'], 'size': 10}
self._fail_set_apphosts = True
self.assertRaises(exception.VolumeDriverException,
self.driver.create_cloned_volume, clone, orig)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_create_volume_from_snapshot(self, mock_ghn):
"""Test creating volume from snapshot, ensure full space copy."""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
snap = {'id': '1', 'name': 'volume1', 'display_name': '',
'volume_type_id': type_ref['id'], 'size': 10,
'provider_id': 'space_orig'}
volume = {'id': '2', 'name': 'volume2', 'display_name': '',
'volume_type_id': type_ref['id'], 'size': 10}
pid = self.driver.create_volume_from_snapshot(volume, snap)
# We must copy entier underlying storage, ~12GB, not just 10GB
self.assertEqual(11444, self.dd_count)
# Check space-create command
expected = {'redundancy': '0', 'group': 'xanadu',
'name': 'volume2', 'mode': '0777',
'user': 'kane', 'net': 'net1',
'storageserver': 'stor1:gbd0,stor2:gbd0,',
'size': '12'}
self.assertDictMatch(expected, self.created)
# Check the returned provider
expected_pid = {'provider_id': 'volume2'}
self.assertDictMatch(expected_pid, pid)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_create_volume_blocked(self, mock_ghn):
"""Test volume creation where only initial space-create is blocked.
This should actually pass because we are blocked byt return an error
in request-cancel, meaning that it got unblocked before we could kill
the space request.
"""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
volume = {'id': '1', 'name': 'volume1',
'display_name': '',
'volume_type_id': type_ref['id'],
'size': 10}
self._return_blocked = 1 # Block & fail cancel => create succeeded
ret = self.driver.create_volume(volume)
expected = {'redundancy': '0', 'group': 'xanadu',
'name': 'volume10', 'mode': '0777',
'user': 'kane', 'net': 'net1',
'storageserver': 'stor1:gbd0,stor2:gbd0,',
'size': '12'}
self.assertDictMatch(expected, self.created)
# Check the returned provider
expected_pid = {'provider_id': 'volume10'}
self.assertDictMatch(expected_pid, ret)
self.assertEqual(True, self._request_cancel)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_create_volume_blocked_and_fail(self, mock_ghn):
"""Test volume creation where space-create blocked permanently.
This should fail because the initial create was blocked and the
request-cancel succeeded, meaning the create operation never
completed.
"""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
volume = {'id': '1', 'name': 'volume1',
'display_name': '',
'volume_type_id': type_ref['id'],
'size': 10}
self._return_blocked = 2 # Block & pass cancel => create failed. :(
self.assertRaises(exception.VolumeDriverException,
self.driver.create_volume, volume)
self.assertEqual(True, self._request_cancel)
def test_delete_volume(self):
"""Test deleting existing volume, ensure proper name used."""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
volume = {'id': '1', 'name': 'volume1',
'display_name': '',
'volume_type_id': type_ref['id'],
'size': 10,
'provider_id': 'volume10'}
self.driver.delete_volume(volume)
expected = {'name': 'volume10'}
self.assertDictMatch(expected, self.deleted)
def test_delete_volume_failure_modes(self):
"""Test cases where space-delete fails, but OS delete is still OK."""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
volume = {'id': '1', 'name': 'volume1',
'display_name': '',
'volume_type_id': type_ref['id'],
'size': 10,
'provider_id': 'volume10'}
self._fail_space_delete = True
# This should not throw an exception, space-delete failure not problem
self.driver.delete_volume(volume)
self._fail_space_delete = False
volume['provider_id'] = None
# This should also not throw an exception
self.driver.delete_volume(volume)
def test_delete_snapshot(self):
"""Test deleting a snapshot, ensure proper name is removed."""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
snapshot = {'id': '1', 'name': 'volume1',
'display_name': '',
'volume_type_id': type_ref['id'],
'size': 10,
'provider_id': 'snap10'}
self.driver.delete_snapshot(snapshot)
expected = {'name': 'snap10'}
self.assertDictMatch(expected, self.deleted)
def test_extend_volume(self):
"""Test extending a volume, check the size in GB vs. GiB."""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
volume = {'id': '1', 'name': 'volume1',
'display_name': '',
'volume_type_id': type_ref['id'],
'size': 10,
'provider_id': 'volume10'}
self.extended = {'name': '', 'size': '0',
'storageserver': ''}
self.driver.extend_volume(volume, 12)
expected = {'name': 'volume10', 'size': '2',
'storageserver': 'stor1:gbd0,stor2:gbd0,'}
self.assertDictMatch(expected, self.extended)
def test_extend_volume_noextend(self):
"""Test extending a volume where Space does not need to be enlarged.
Because Spaces are generated somewhat larger than the requested size
from OpenStack due to the base10(HGST)/base2(OS) mismatch, they can
sometimes be larger than requested from OS. In that case a
volume_extend may actually be a noop since the volume is already large
enough to satisfy OS's request.
"""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
volume = {'id': '1', 'name': 'volume1',
'display_name': '',
'volume_type_id': type_ref['id'],
'size': 10,
'provider_id': 'volume10'}
self.extended = {'name': '', 'size': '0',
'storageserver': ''}
self.driver.extend_volume(volume, 10)
expected = {'name': '', 'size': '0',
'storageserver': ''}
self.assertDictMatch(expected, self.extended)
def test_space_list_fails(self):
"""Test exception is thrown when we can't call space-list."""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
volume = {'id': '1', 'name': 'volume1',
'display_name': '',
'volume_type_id': type_ref['id'],
'size': 10,
'provider_id': 'volume10'}
self.extended = {'name': '', 'size': '0',
'storageserver': ''}
self._fail_space_list = True
self.assertRaises(exception.VolumeDriverException,
self.driver.extend_volume, volume, 12)
def test_cli_error_not_blocked(self):
"""Test the _blocked handler's handlinf of a non-blocked error.
The _handle_blocked handler is called on any process errors in the
code. If the error was not caused by a blocked command condition
(syntax error, out of space, etc.) then it should just throw the
exception and not try and retry the command.
"""
ctxt = context.get_admin_context()
extra_specs = {}
type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)
volume = {'id': '1', 'name': 'volume1',
'display_name': '',
'volume_type_id': type_ref['id'],
'size': 10,
'provider_id': 'volume10'}
self.extended = {'name': '', 'size': '0',
'storageserver': ''}
self._fail_extend = True
self.assertRaises(exception.VolumeDriverException,
self.driver.extend_volume, volume, 12)
self.assertEqual(False, self._request_cancel)
@mock.patch('socket.gethostbyname', return_value='123.123.123.123')
def test_initialize_connection(self, moch_ghn):
"""Test that the connection_info for Nova makes sense."""
volume = {'name': '123', 'provider_id': 'spacey'}
conn = self.driver.initialize_connection(volume, None)
expected = {'name': 'spacey', 'noremovehost': 'thisserver'}
self.assertDictMatch(expected, conn['data'])
# Below are some command outputs we emulate
IP_OUTPUT = """
3: em2: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state
link/ether 00:25:90:d9:18:09 brd ff:ff:ff:ff:ff:ff
inet 192.168.0.23/24 brd 192.168.0.255 scope global em2
valid_lft forever preferred_lft forever
inet6 fe80::225:90ff:fed9:1809/64 scope link
valid_lft forever preferred_lft forever
1: lo: <LOOPBACK,UP,LOWER_UP> mtu 65536 qdisc noqueue state UNKNOWN
link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00
inet 123.123.123.123/8 scope host lo
valid_lft forever preferred_lft forever
inet 169.254.169.254/32 scope link lo
valid_lft forever preferred_lft forever
inet6 ::1/128 scope host
valid_lft forever preferred_lft forever
2: em1: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq master
link/ether 00:25:90:d9:18:08 brd ff:ff:ff:ff:ff:ff
inet6 fe80::225:90ff:fed9:1808/64 scope link
valid_lft forever preferred_lft forever
"""
HGST_HOST_STORAGE = """
{
"hostStatus": [
{
"node": "tm33.virident.info",
"up": true,
"isManager": true,
"cardStatus": [
{
"cardName": "/dev/sda3",
"cardSerialNumber": "002f09b4037a9d521c007ee4esda3",
"cardStatus": "Good",
"cardStateDetails": "Normal",
"cardActionRequired": "",
"cardTemperatureC": 0,
"deviceType": "Generic",
"cardTemperatureState": "Safe",
"partitionStatus": [
{
"partName": "/dev/gbd0",
"partitionState": "READY",
"usableCapacityBytes": 98213822464,
"totalReadBytes": 0,
"totalWriteBytes": 0,
"remainingLifePCT": 100,
"flashReservesLeftPCT": 100,
"fmc": true,
"vspaceCapacityAvailable": 94947041280,
"vspaceReducedCapacityAvailable": 87194279936,
"_partitionID": "002f09b4037a9d521c007ee4esda3:0",
"_usedSpaceBytes": 3266781184,
"_enabledSpaceBytes": 3266781184,
"_disabledSpaceBytes": 0
}
]
}
],
"driverStatus": {
"vgcdriveDriverLoaded": true,
"vhaDriverLoaded": true,
"vcacheDriverLoaded": true,
"vlvmDriverLoaded": true,
"ipDataProviderLoaded": true,
"ibDataProviderLoaded": false,
"driverUptimeSecs": 4800,
"rVersion": "20368.d55ec22.master"
},
"totalCapacityBytes": 98213822464,
"totalUsedBytes": 3266781184,
"totalEnabledBytes": 3266781184,
"totalDisabledBytes": 0
},
{
"node": "tm32.virident.info",
"up": true,
"isManager": false,
"cardStatus": [],
"driverStatus": {
"vgcdriveDriverLoaded": true,
"vhaDriverLoaded": true,
"vcacheDriverLoaded": true,
"vlvmDriverLoaded": true,
"ipDataProviderLoaded": true,
"ibDataProviderLoaded": false,
"driverUptimeSecs": 0,
"rVersion": "20368.d55ec22.master"
},
"totalCapacityBytes": 0,
"totalUsedBytes": 0,
"totalEnabledBytes": 0,
"totalDisabledBytes": 0
}
],
"totalCapacityBytes": 98213822464,
"totalUsedBytes": 3266781184,
"totalEnabledBytes": 3266781184,
"totalDisabledBytes": 0
}
"""
HGST_SPACE_JSON = """
{
"resources": [
{
"resourceType": "vLVM-L",
"resourceID": "vLVM-L:698cdb43-54da-863e-1699-294a080ce4db",
"state": "OFFLINE",
"instanceStates": {},
"redundancy": 0,
"sizeBytes": 12000000000,
"name": "volume10",
"nodes": [],
"networks": [
"net1"
],
"components": [
{
"resourceType": "vLVM-S",
"resourceID": "vLVM-S:698cdb43-54da-863e-eb10-6275f47b8ed2",
"redundancy": 0,
"order": 0,
"sizeBytes": 12000000000,
"numStripes": 1,
"stripeSizeBytes": null,
"name": "volume10s00",
"state": "OFFLINE",
"instanceStates": {},
"components": [
{
"name": "volume10h00",
"resourceType": "vHA",
"resourceID": "vHA:3e86da54-40db-8c69-0300-0000ac10476e",
"redundancy": 0,
"sizeBytes": 12000000000,
"state": "GOOD",
"components": [
{
"name": "volume10h00",
"vspaceType": "vHA",
"vspaceRole": "primary",
"storageObjectID": "vHA:3e86da54-40db-8c69--18130019e486",
"state": "Disconnected (DCS)",
"node": "tm33.virident.info",
"partName": "/dev/gbd0"
}
],
"crState": "GOOD"
},
{
"name": "volume10v00",
"resourceType": "vShare",
"resourceID": "vShare:3f86da54-41db-8c69-0300-ecf4bbcc14cc",
"redundancy": 0,
"order": 0,
"sizeBytes": 12000000000,
"state": "GOOD",
"components": [
{
"name": "volume10v00",
"vspaceType": "vShare",
"vspaceRole": "target",
"storageObjectID": "vShare:3f86da54-41db-8c64bbcc14cc:T",
"state": "Started",
"node": "tm33.virident.info",
"partName": "/dev/gbd0_volume10h00"
}
]
}
]
}
],
"_size": "12GB",
"_state": "OFFLINE",
"_ugm": "",
"_nets": "net1",
"_hosts": "tm33.virident.info(12GB,NC)",
"_ahosts": "",
"_shosts": "tm33.virident.info(12GB)",
"_name": "volume10",
"_node": "",
"_type": "vLVM-L",
"_detail": "vLVM-L:698cdb43-54da-863e-1699-294a080ce4db",
"_device": ""
}
]
}
"""
NETWORK_LIST = """
Network Name Type Flags Description
------------ ---- ---------- ------------------------
net1 IPv4 autoConfig 192.168.0.0/24 1Gb/s
net2 IPv4 autoConfig 192.168.10.0/24 10Gb/s
"""
DD_OUTPUT = """
1+0 records in
1+0 records out
1024 bytes (1.0 kB) copied, 0.000427529 s, 2.4 MB/s
"""
|
Carmezim/tensorflow
|
refs/heads/master
|
tensorflow/contrib/tensor_forest/client/random_forest.py
|
15
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A tf.learn implementation of tensor_forest (extremely random forests)."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import framework as contrib_framework
from tensorflow.contrib.learn.python.learn.estimators import constants
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import model_fn as model_fn_lib
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.contrib.tensor_forest.client import eval_metrics
from tensorflow.contrib.tensor_forest.python import tensor_forest
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import monitored_session
from tensorflow.python.training import session_run_hook
KEYS_NAME = 'keys'
LOSS_NAME = 'rf_training_loss'
def _assert_float32(tensors):
"""Assert all tensors are float32.
Args:
tensors: `Tensor` or `dict` of `Tensor` objects.
Raises:
TypeError: if any tensor is not float32.
"""
if not isinstance(tensors, dict):
tensors = [tensors]
else:
tensors = tensors.values()
for tensor in tensors:
if tensor.dtype.base_dtype != dtypes.float32:
raise TypeError('Expected dtype=float32, %s.' % tensor)
class TensorForestLossHook(session_run_hook.SessionRunHook):
"""Monitor to request stop when loss stops decreasing."""
def __init__(self, early_stopping_rounds):
self.early_stopping_rounds = early_stopping_rounds
self.min_loss = None
self.last_step = -1
# self.steps records the number of steps for which the loss has been
# non-decreasing
self.steps = 0
def before_run(self, run_context):
return session_run_hook.SessionRunArgs(
{'global_step': contrib_framework.get_global_step(),
'current_loss': run_context.session.graph.get_operation_by_name(
LOSS_NAME).outputs[0]})
def after_run(self, run_context, run_values):
current_loss = run_values.results['current_loss']
current_step = run_values.results['global_step']
self.steps += 1
# Guard against the global step going backwards, which might happen
# if we recover from something.
if self.last_step == -1 or self.last_step > current_step:
logging.info('TensorForestLossHook resetting last_step.')
self.last_step = current_step
self.steps = 0
self.min_loss = None
return
self.last_step = current_step
if self.min_loss is None or current_loss < self.min_loss:
self.min_loss = current_loss
self.steps = 0
if self.steps > self.early_stopping_rounds:
logging.info('TensorForestLossHook requesting stop.')
run_context.request_stop()
class EveryCheckpointPreSaveListener(
basic_session_run_hooks.CheckpointSaverListener):
"""Runs a given op before each checkpoint save."""
def __init__(self, op):
"""Initializes the object.
Args:
op: An op to run before each checkpoint save.
"""
self._op = op
def before_save(self, session, global_step_value):
session.run(self._op)
def get_model_fn(params,
graph_builder_class,
device_assigner,
weights_name=None,
keys_name=None,
early_stopping_rounds=100,
num_trainers=1,
trainer_id=0,
report_feature_importances=False,
model_dir=None,
local_eval=False):
"""Return a model function given a way to construct a graph builder."""
def _model_fn(features, labels, mode):
"""Function that returns predictions, training loss, and training op."""
weights = None
if weights_name and weights_name in features:
weights = features.pop(weights_name)
keys = None
if keys_name and keys_name in features:
keys = features.pop(keys_name)
# If we're doing eval, optionally ignore device_assigner.
# Also ignore device assigner if we're exporting (mode == INFER)
dev_assn = device_assigner
if (mode == model_fn_lib.ModeKeys.INFER or
(local_eval and mode == model_fn_lib.ModeKeys.EVAL)):
dev_assn = None
graph_builder = graph_builder_class(params,
device_assigner=dev_assn)
inference = {}
output_alternatives = None
if (mode == model_fn_lib.ModeKeys.EVAL or
mode == model_fn_lib.ModeKeys.INFER):
inference[eval_metrics.INFERENCE_PROB_NAME] = (
graph_builder.inference_graph(features))
if params.regression:
predictions = {
None: inference[eval_metrics.INFERENCE_PROB_NAME]}
output_alternatives = {
None: (constants.ProblemType.LINEAR_REGRESSION, predictions)}
else:
inference[eval_metrics.INFERENCE_PRED_NAME] = math_ops.argmax(
inference[eval_metrics.INFERENCE_PROB_NAME], 1)
predictions = {
prediction_key.PredictionKey.PROBABILITIES:
inference[eval_metrics.INFERENCE_PROB_NAME],
prediction_key.PredictionKey.CLASSES:
inference[eval_metrics.INFERENCE_PRED_NAME]}
output_alternatives = {
None: (constants.ProblemType.CLASSIFICATION, predictions)}
if report_feature_importances:
inference[eval_metrics.FEATURE_IMPORTANCE_NAME] = (
graph_builder.feature_importances())
if keys is not None:
inference[keys_name] = keys
# labels might be None if we're doing prediction (which brings up the
# question of why we force everything to adhere to a single model_fn).
loss_deps = []
training_graph = None
training_hooks = []
scaffold = None
if labels is not None and mode == model_fn_lib.ModeKeys.TRAIN:
training_graph = control_flow_ops.group(
graph_builder.training_graph(
features, labels, input_weights=weights,
num_trainers=num_trainers,
trainer_id=trainer_id),
state_ops.assign_add(contrib_framework.get_global_step(), 1))
loss_deps.append(training_graph)
if hasattr(graph_builder, 'finalize_training'):
finalize_listener = EveryCheckpointPreSaveListener(
graph_builder.finalize_training())
scaffold = monitored_session.Scaffold()
training_hooks.append(
basic_session_run_hooks.CheckpointSaverHook(
model_dir, save_secs=600, save_steps=None,
scaffold=scaffold,
listeners=[finalize_listener]))
training_loss = None
if (mode == model_fn_lib.ModeKeys.EVAL or
mode == model_fn_lib.ModeKeys.TRAIN):
with ops.control_dependencies(loss_deps):
training_loss = graph_builder.training_loss(
features, labels, name=LOSS_NAME)
# Put weights back in
if weights is not None:
features[weights_name] = weights
if early_stopping_rounds:
training_hooks.append(TensorForestLossHook(early_stopping_rounds))
return model_fn_lib.ModelFnOps(
mode=mode,
predictions=inference,
loss=training_loss,
train_op=training_graph,
training_hooks=training_hooks,
scaffold=scaffold,
output_alternatives=output_alternatives)
return _model_fn
class TensorForestEstimator(estimator.Estimator):
"""An estimator that can train and evaluate a random forest.
Example:
```python
params = tf.contrib.tensor_forest.python.tensor_forest.ForestHParams(
num_classes=2, num_features=40, num_trees=10, max_nodes=1000)
# Estimator using the default graph builder.
estimator = TensorForestEstimator(params, model_dir=model_dir)
# Or estimator using TrainingLossForest as the graph builder.
estimator = TensorForestEstimator(
params, graph_builder_class=tensor_forest.TrainingLossForest,
model_dir=model_dir)
# Input builders
def input_fn_train: # returns x, y
...
def input_fn_eval: # returns x, y
...
estimator.fit(input_fn=input_fn_train)
estimator.evaluate(input_fn=input_fn_eval)
# Predict returns an iterable of dicts.
results = list(estimator.predict(x=x))
prob0 = results[0][eval_metrics.INFERENCE_PROB_NAME]
prediction0 = results[0][eval_metrics.INFERENCE_PRED_NAME]
```
"""
def __init__(self, params, device_assigner=None, model_dir=None,
graph_builder_class=tensor_forest.RandomForestGraphs,
config=None, weights_name=None, keys_name=None,
feature_engineering_fn=None,
early_stopping_rounds=100,
num_trainers=1, trainer_id=0,
report_feature_importances=False,
local_eval=False):
"""Initializes a TensorForestEstimator instance.
Args:
params: ForestHParams object that holds random forest hyperparameters.
These parameters will be passed into `model_fn`.
device_assigner: An `object` instance that controls how trees get
assigned to devices. If `None`, will use
`tensor_forest.RandomForestDeviceAssigner`.
model_dir: Directory to save model parameters, graph, etc. To continue
training a previously saved model, load checkpoints saved to this
directory into an estimator.
graph_builder_class: An `object` instance that defines how TF graphs for
random forest training and inference are built. By default will use
`tensor_forest.RandomForestGraphs`.
config: `RunConfig` object to configure the runtime settings.
weights_name: A string defining feature column name representing
weights. Will be multiplied by the loss of the example. Used to
downweight or boost examples during training.
keys_name: A string naming one of the features to strip out and
pass through into the inference/eval results dict. Useful for
associating specific examples with their prediction.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and returns features and
labels which will be fed into the model.
early_stopping_rounds: Allows training to terminate early if the forest is
no longer growing. 100 by default. Set to a Falsy value to disable
the default training hook.
num_trainers: Number of training jobs, which will partition trees
among them.
trainer_id: Which trainer this instance is.
report_feature_importances: If True, print out feature importances
during evaluation.
local_eval: If True, don't use a device assigner for eval. This is to
support some common setups where eval is done on a single machine, even
though training might be distributed.
Returns:
A `TensorForestEstimator` instance.
"""
super(TensorForestEstimator, self).__init__(
model_fn=get_model_fn(
params.fill(),
graph_builder_class,
device_assigner,
weights_name=weights_name,
keys_name=keys_name,
early_stopping_rounds=early_stopping_rounds,
num_trainers=num_trainers,
trainer_id=trainer_id,
report_feature_importances=report_feature_importances,
model_dir=model_dir,
local_eval=local_eval),
model_dir=model_dir,
config=config,
feature_engineering_fn=feature_engineering_fn)
|
billbonney/ardupilot
|
refs/heads/master
|
Tools/LogAnalyzer/VehicleType.py
|
187
|
class VehicleType():
Plane = 17
Copter = 23
Rover = 37
# these should really be "Plane", "Copter" and "Rover", but many
# things use these values as triggers in their code:
VehicleTypeString = {
17: "ArduPlane",
23: "ArduCopter",
37: "ArduRover"
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.