repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
saurabh6790/aimobilize-app-backup | refs/heads/master | accounts/report/bank_reconciliation_statement/bank_reconciliation_statement.py | 6 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import flt
def execute(filters=None):
if not filters: filters = {}
debit_or_credit = webnotes.conn.get_value("Account", filters["account"], "debit_or_credit")
columns = get_columns()
data = get_entries(filters)
from accounts.utils import get_balance_on
balance_as_per_company = get_balance_on(filters["account"], filters["report_date"])
total_debit, total_credit = 0,0
for d in data:
total_debit += flt(d[4])
total_credit += flt(d[5])
if debit_or_credit == 'Debit':
bank_bal = flt(balance_as_per_company) - flt(total_debit) + flt(total_credit)
else:
bank_bal = flt(balance_as_per_company) + flt(total_debit) - flt(total_credit)
data += [
get_balance_row("Balance as per company books", balance_as_per_company, debit_or_credit),
["", "", "", "Amounts not reflected in bank", total_debit, total_credit],
get_balance_row("Balance as per bank", bank_bal, debit_or_credit)
]
return columns, data
def get_columns():
return ["Journal Voucher:Link/Journal Voucher:140", "Posting Date:Date:100",
"Clearance Date:Date:110", "Against Account:Link/Account:200",
"Debit:Currency:120", "Credit:Currency:120"
]
def get_entries(filters):
entries = webnotes.conn.sql("""select
jv.name, jv.posting_date, jv.clearance_date, jvd.against_account, jvd.debit, jvd.credit
from
`tabJournal Voucher Detail` jvd, `tabJournal Voucher` jv
where jvd.parent = jv.name and jv.docstatus=1 and ifnull(jv.cheque_no, '')!= ''
and jvd.account = %(account)s and jv.posting_date <= %(report_date)s
and ifnull(jv.clearance_date, '4000-01-01') > %(report_date)s
order by jv.name DESC""", filters, as_list=1)
return entries
def get_balance_row(label, amount, debit_or_credit):
if debit_or_credit == "Debit":
return ["", "", "", label, amount, 0]
else:
return ["", "", "", label, 0, amount] |
plotly/python-api | refs/heads/master | packages/python/plotly/plotly/validators/layout/mapbox/layer/_maxzoom.py | 1 | import _plotly_utils.basevalidators
class MaxzoomValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="maxzoom", parent_name="layout.mapbox.layer", **kwargs
):
super(MaxzoomValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
max=kwargs.pop("max", 24),
min=kwargs.pop("min", 0),
role=kwargs.pop("role", "info"),
**kwargs
)
|
abadger/ansible | refs/heads/devel | test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py | 47 | # -*- coding: utf-8 -*-
# This code is part of Ansible, but is an independent component.
# This particular file, and this file only, is based on
# Lib/ipaddress.py of cpython
# It is licensed under the PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
#
# 1. This LICENSE AGREEMENT is between the Python Software Foundation
# ("PSF"), and the Individual or Organization ("Licensee") accessing and
# otherwise using this software ("Python") in source or binary form and
# its associated documentation.
#
# 2. Subject to the terms and conditions of this License Agreement, PSF hereby
# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
# analyze, test, perform and/or display publicly, prepare derivative works,
# distribute, and otherwise use Python alone or in any derivative version,
# provided, however, that PSF's License Agreement and PSF's notice of copyright,
# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
# 2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved"
# are retained in Python alone or in any derivative version prepared by Licensee.
#
# 3. In the event Licensee prepares a derivative work that is based on
# or incorporates Python or any part thereof, and wants to make
# the derivative work available to others as provided herein, then
# Licensee hereby agrees to include in any such work a brief summary of
# the changes made to Python.
#
# 4. PSF is making Python available to Licensee on an "AS IS"
# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
# INFRINGE ANY THIRD PARTY RIGHTS.
#
# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
#
# 6. This License Agreement will automatically terminate upon a material
# breach of its terms and conditions.
#
# 7. Nothing in this License Agreement shall be deemed to create any
# relationship of agency, partnership, or joint venture between PSF and
# Licensee. This License Agreement does not grant permission to use PSF
# trademarks or trade name in a trademark sense to endorse or promote
# products or services of Licensee, or any third party.
#
# 8. By copying, installing or otherwise using Python, Licensee
# agrees to be bound by the terms and conditions of this License
# Agreement.
# Copyright 2007 Google Inc.
# Licensed to PSF under a Contributor Agreement.
"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
This library is used to create/poke/manipulate IPv4 and IPv6 addresses
and networks.
"""
from __future__ import unicode_literals
import itertools
import struct
# The following makes it easier for us to script updates of the bundled code and is not part of
# upstream
_BUNDLED_METADATA = {"pypi_name": "ipaddress", "version": "1.0.22"}
__version__ = "1.0.22"
# Compatibility functions
_compat_int_types = (int,)
try:
_compat_int_types = (int, long)
except NameError:
pass
try:
_compat_str = unicode
except NameError:
_compat_str = str
assert bytes != str
if b"\0"[0] == 0: # Python 3 semantics
def _compat_bytes_to_byte_vals(byt):
return byt
else:
def _compat_bytes_to_byte_vals(byt):
return [struct.unpack(b"!B", b)[0] for b in byt]
try:
_compat_int_from_byte_vals = int.from_bytes
except AttributeError:
def _compat_int_from_byte_vals(bytvals, endianess):
assert endianess == "big"
res = 0
for bv in bytvals:
assert isinstance(bv, _compat_int_types)
res = (res << 8) + bv
return res
def _compat_to_bytes(intval, length, endianess):
assert isinstance(intval, _compat_int_types)
assert endianess == "big"
if length == 4:
if intval < 0 or intval >= 2 ** 32:
raise struct.error("integer out of range for 'I' format code")
return struct.pack(b"!I", intval)
elif length == 16:
if intval < 0 or intval >= 2 ** 128:
raise struct.error("integer out of range for 'QQ' format code")
return struct.pack(b"!QQ", intval >> 64, intval & 0xFFFFFFFFFFFFFFFF)
else:
raise NotImplementedError()
if hasattr(int, "bit_length"):
# Not int.bit_length , since that won't work in 2.7 where long exists
def _compat_bit_length(i):
return i.bit_length()
else:
def _compat_bit_length(i):
for res in itertools.count():
if i >> res == 0:
return res
def _compat_range(start, end, step=1):
assert step > 0
i = start
while i < end:
yield i
i += step
class _TotalOrderingMixin(object):
__slots__ = ()
# Helper that derives the other comparison operations from
# __lt__ and __eq__
# We avoid functools.total_ordering because it doesn't handle
# NotImplemented correctly yet (http://bugs.python.org/issue10042)
def __eq__(self, other):
raise NotImplementedError
def __ne__(self, other):
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not equal
def __lt__(self, other):
raise NotImplementedError
def __le__(self, other):
less = self.__lt__(other)
if less is NotImplemented or not less:
return self.__eq__(other)
return less
def __gt__(self, other):
less = self.__lt__(other)
if less is NotImplemented:
return NotImplemented
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not (less or equal)
def __ge__(self, other):
less = self.__lt__(other)
if less is NotImplemented:
return NotImplemented
return not less
IPV4LENGTH = 32
IPV6LENGTH = 128
class AddressValueError(ValueError):
"""A Value Error related to the address."""
class NetmaskValueError(ValueError):
"""A Value Error related to the netmask."""
def ip_address(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Address or IPv6Address object.
Raises:
ValueError: if the *address* passed isn't either a v4 or a v6
address
"""
try:
return IPv4Address(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Address(address)
except (AddressValueError, NetmaskValueError):
pass
if isinstance(address, bytes):
raise AddressValueError(
"%r does not appear to be an IPv4 or IPv6 address. "
"Did you pass in a bytes (str in Python 2) instead of"
" a unicode object?" % address
)
raise ValueError(
"%r does not appear to be an IPv4 or IPv6 address" % address
)
def ip_network(address, strict=True):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP network. Either IPv4 or
IPv6 networks may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Network or IPv6Network object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address. Or if the network has host bits set.
"""
try:
return IPv4Network(address, strict)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Network(address, strict)
except (AddressValueError, NetmaskValueError):
pass
if isinstance(address, bytes):
raise AddressValueError(
"%r does not appear to be an IPv4 or IPv6 network. "
"Did you pass in a bytes (str in Python 2) instead of"
" a unicode object?" % address
)
raise ValueError(
"%r does not appear to be an IPv4 or IPv6 network" % address
)
def ip_interface(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Interface or IPv6Interface object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address.
Notes:
The IPv?Interface classes describe an Address on a particular
Network, so they're basically a combination of both the Address
and Network classes.
"""
try:
return IPv4Interface(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Interface(address)
except (AddressValueError, NetmaskValueError):
pass
raise ValueError(
"%r does not appear to be an IPv4 or IPv6 interface" % address
)
def v4_int_to_packed(address):
"""Represent an address as 4 packed bytes in network (big-endian) order.
Args:
address: An integer representation of an IPv4 IP address.
Returns:
The integer address packed as 4 bytes in network (big-endian) order.
Raises:
ValueError: If the integer is negative or too large to be an
IPv4 IP address.
"""
try:
return _compat_to_bytes(address, 4, "big")
except (struct.error, OverflowError):
raise ValueError("Address negative or too large for IPv4")
def v6_int_to_packed(address):
"""Represent an address as 16 packed bytes in network (big-endian) order.
Args:
address: An integer representation of an IPv6 IP address.
Returns:
The integer address packed as 16 bytes in network (big-endian) order.
"""
try:
return _compat_to_bytes(address, 16, "big")
except (struct.error, OverflowError):
raise ValueError("Address negative or too large for IPv6")
def _split_optional_netmask(address):
"""Helper to split the netmask and raise AddressValueError if needed"""
addr = _compat_str(address).split("/")
if len(addr) > 2:
raise AddressValueError("Only one '/' permitted in %r" % address)
return addr
def _find_address_range(addresses):
"""Find a sequence of sorted deduplicated IPv#Address.
Args:
addresses: a list of IPv#Address objects.
Yields:
A tuple containing the first and last IP addresses in the sequence.
"""
it = iter(addresses)
first = last = next(it) # pylint: disable=stop-iteration-return
for ip in it:
if ip._ip != last._ip + 1:
yield first, last
first = ip
last = ip
yield first, last
def _count_righthand_zero_bits(number, bits):
"""Count the number of zero bits on the right hand side.
Args:
number: an integer.
bits: maximum number of bits to count.
Returns:
The number of zero bits on the right hand side of the number.
"""
if number == 0:
return bits
return min(bits, _compat_bit_length(~number & (number - 1)))
def summarize_address_range(first, last):
"""Summarize a network range given the first and last IP addresses.
Example:
>>> list(summarize_address_range(IPv4Address('192.0.2.0'),
... IPv4Address('192.0.2.130')))
... #doctest: +NORMALIZE_WHITESPACE
[IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
IPv4Network('192.0.2.130/32')]
Args:
first: the first IPv4Address or IPv6Address in the range.
last: the last IPv4Address or IPv6Address in the range.
Returns:
An iterator of the summarized IPv(4|6) network objects.
Raise:
TypeError:
If the first and last objects are not IP addresses.
If the first and last objects are not the same version.
ValueError:
If the last object is not greater than the first.
If the version of the first address is not 4 or 6.
"""
if not (
isinstance(first, _BaseAddress) and isinstance(last, _BaseAddress)
):
raise TypeError("first and last must be IP addresses, not networks")
if first.version != last.version:
raise TypeError(
"%s and %s are not of the same version" % (first, last)
)
if first > last:
raise ValueError("last IP address must be greater than first")
if first.version == 4:
ip = IPv4Network
elif first.version == 6:
ip = IPv6Network
else:
raise ValueError("unknown IP version")
ip_bits = first._max_prefixlen
first_int = first._ip
last_int = last._ip
while first_int <= last_int:
nbits = min(
_count_righthand_zero_bits(first_int, ip_bits),
_compat_bit_length(last_int - first_int + 1) - 1,
)
net = ip((first_int, ip_bits - nbits))
yield net
first_int += 1 << nbits
if first_int - 1 == ip._ALL_ONES:
break
def _collapse_addresses_internal(addresses):
"""Loops through the addresses, collapsing concurrent netblocks.
Example:
ip1 = IPv4Network('192.0.2.0/26')
ip2 = IPv4Network('192.0.2.64/26')
ip3 = IPv4Network('192.0.2.128/26')
ip4 = IPv4Network('192.0.2.192/26')
_collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
[IPv4Network('192.0.2.0/24')]
This shouldn't be called directly; it is called via
collapse_addresses([]).
Args:
addresses: A list of IPv4Network's or IPv6Network's
Returns:
A list of IPv4Network's or IPv6Network's depending on what we were
passed.
"""
# First merge
to_merge = list(addresses)
subnets = {}
while to_merge:
net = to_merge.pop()
supernet = net.supernet()
existing = subnets.get(supernet)
if existing is None:
subnets[supernet] = net
elif existing != net:
# Merge consecutive subnets
del subnets[supernet]
to_merge.append(supernet)
# Then iterate over resulting networks, skipping subsumed subnets
last = None
for net in sorted(subnets.values()):
if last is not None:
# Since they are sorted,
# last.network_address <= net.network_address is a given.
if last.broadcast_address >= net.broadcast_address:
continue
yield net
last = net
def collapse_addresses(addresses):
"""Collapse a list of IP objects.
Example:
collapse_addresses([IPv4Network('192.0.2.0/25'),
IPv4Network('192.0.2.128/25')]) ->
[IPv4Network('192.0.2.0/24')]
Args:
addresses: An iterator of IPv4Network or IPv6Network objects.
Returns:
An iterator of the collapsed IPv(4|6)Network objects.
Raises:
TypeError: If passed a list of mixed version objects.
"""
addrs = []
ips = []
nets = []
# split IP addresses and networks
for ip in addresses:
if isinstance(ip, _BaseAddress):
if ips and ips[-1]._version != ip._version:
raise TypeError(
"%s and %s are not of the same version" % (ip, ips[-1])
)
ips.append(ip)
elif ip._prefixlen == ip._max_prefixlen:
if ips and ips[-1]._version != ip._version:
raise TypeError(
"%s and %s are not of the same version" % (ip, ips[-1])
)
try:
ips.append(ip.ip)
except AttributeError:
ips.append(ip.network_address)
else:
if nets and nets[-1]._version != ip._version:
raise TypeError(
"%s and %s are not of the same version" % (ip, nets[-1])
)
nets.append(ip)
# sort and dedup
ips = sorted(set(ips))
# find consecutive address ranges in the sorted sequence and summarize them
if ips:
for first, last in _find_address_range(ips):
addrs.extend(summarize_address_range(first, last))
return _collapse_addresses_internal(addrs + nets)
def get_mixed_type_key(obj):
"""Return a key suitable for sorting between networks and addresses.
Address and Network objects are not sortable by default; they're
fundamentally different so the expression
IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
doesn't make any sense. There are some times however, where you may wish
to have ipaddress sort these for you anyway. If you need to do this, you
can use this function as the key= argument to sorted().
Args:
obj: either a Network or Address object.
Returns:
appropriate key.
"""
if isinstance(obj, _BaseNetwork):
return obj._get_networks_key()
elif isinstance(obj, _BaseAddress):
return obj._get_address_key()
return NotImplemented
class _IPAddressBase(_TotalOrderingMixin):
"""The mother class."""
__slots__ = ()
@property
def exploded(self):
"""Return the longhand version of the IP address as a string."""
return self._explode_shorthand_ip_string()
@property
def compressed(self):
"""Return the shorthand version of the IP address as a string."""
return _compat_str(self)
@property
def reverse_pointer(self):
"""The name of the reverse DNS pointer for the IP address, e.g.:
>>> ipaddress.ip_address("127.0.0.1").reverse_pointer
'1.0.0.127.in-addr.arpa'
>>> ipaddress.ip_address("2001:db8::1").reverse_pointer
'1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
"""
return self._reverse_pointer()
@property
def version(self):
msg = "%200s has no version specified" % (type(self),)
raise NotImplementedError(msg)
def _check_int_address(self, address):
if address < 0:
msg = "%d (< 0) is not permitted as an IPv%d address"
raise AddressValueError(msg % (address, self._version))
if address > self._ALL_ONES:
msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
raise AddressValueError(
msg % (address, self._max_prefixlen, self._version)
)
def _check_packed_address(self, address, expected_len):
address_len = len(address)
if address_len != expected_len:
msg = (
"%r (len %d != %d) is not permitted as an IPv%d address. "
"Did you pass in a bytes (str in Python 2) instead of"
" a unicode object?"
)
raise AddressValueError(
msg % (address, address_len, expected_len, self._version)
)
@classmethod
def _ip_int_from_prefix(cls, prefixlen):
"""Turn the prefix length into a bitwise netmask
Args:
prefixlen: An integer, the prefix length.
Returns:
An integer.
"""
return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
@classmethod
def _prefix_from_ip_int(cls, ip_int):
"""Return prefix length from the bitwise netmask.
Args:
ip_int: An integer, the netmask in expanded bitwise format
Returns:
An integer, the prefix length.
Raises:
ValueError: If the input intermingles zeroes & ones
"""
trailing_zeroes = _count_righthand_zero_bits(
ip_int, cls._max_prefixlen
)
prefixlen = cls._max_prefixlen - trailing_zeroes
leading_ones = ip_int >> trailing_zeroes
all_ones = (1 << prefixlen) - 1
if leading_ones != all_ones:
byteslen = cls._max_prefixlen // 8
details = _compat_to_bytes(ip_int, byteslen, "big")
msg = "Netmask pattern %r mixes zeroes & ones"
raise ValueError(msg % details)
return prefixlen
@classmethod
def _report_invalid_netmask(cls, netmask_str):
msg = "%r is not a valid netmask" % netmask_str
raise NetmaskValueError(msg)
@classmethod
def _prefix_from_prefix_string(cls, prefixlen_str):
"""Return prefix length from a numeric string
Args:
prefixlen_str: The string to be converted
Returns:
An integer, the prefix length.
Raises:
NetmaskValueError: If the input is not a valid netmask
"""
# int allows a leading +/- as well as surrounding whitespace,
# so we ensure that isn't the case
if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
cls._report_invalid_netmask(prefixlen_str)
try:
prefixlen = int(prefixlen_str)
except ValueError:
cls._report_invalid_netmask(prefixlen_str)
if not (0 <= prefixlen <= cls._max_prefixlen):
cls._report_invalid_netmask(prefixlen_str)
return prefixlen
@classmethod
def _prefix_from_ip_string(cls, ip_str):
"""Turn a netmask/hostmask string into a prefix length
Args:
ip_str: The netmask/hostmask to be converted
Returns:
An integer, the prefix length.
Raises:
NetmaskValueError: If the input is not a valid netmask/hostmask
"""
# Parse the netmask/hostmask like an IP address.
try:
ip_int = cls._ip_int_from_string(ip_str)
except AddressValueError:
cls._report_invalid_netmask(ip_str)
# Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
# Note that the two ambiguous cases (all-ones and all-zeroes) are
# treated as netmasks.
try:
return cls._prefix_from_ip_int(ip_int)
except ValueError:
pass
# Invert the bits, and try matching a /0+1+/ hostmask instead.
ip_int ^= cls._ALL_ONES
try:
return cls._prefix_from_ip_int(ip_int)
except ValueError:
cls._report_invalid_netmask(ip_str)
def __reduce__(self):
return self.__class__, (_compat_str(self),)
class _BaseAddress(_IPAddressBase):
"""A generic IP object.
This IP class contains the version independent methods which are
used by single IP addresses.
"""
__slots__ = ()
def __int__(self):
return self._ip
def __eq__(self, other):
try:
return self._ip == other._ip and self._version == other._version
except AttributeError:
return NotImplemented
def __lt__(self, other):
if not isinstance(other, _IPAddressBase):
return NotImplemented
if not isinstance(other, _BaseAddress):
raise TypeError(
"%s and %s are not of the same type" % (self, other)
)
if self._version != other._version:
raise TypeError(
"%s and %s are not of the same version" % (self, other)
)
if self._ip != other._ip:
return self._ip < other._ip
return False
# Shorthand for Integer addition and subtraction. This is not
# meant to ever support addition/subtraction of addresses.
def __add__(self, other):
if not isinstance(other, _compat_int_types):
return NotImplemented
return self.__class__(int(self) + other)
def __sub__(self, other):
if not isinstance(other, _compat_int_types):
return NotImplemented
return self.__class__(int(self) - other)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, _compat_str(self))
def __str__(self):
return _compat_str(self._string_from_ip_int(self._ip))
def __hash__(self):
return hash(hex(int(self._ip)))
def _get_address_key(self):
return (self._version, self)
def __reduce__(self):
return self.__class__, (self._ip,)
class _BaseNetwork(_IPAddressBase):
"""A generic IP network object.
This IP class contains the version independent methods which are
used by networks.
"""
def __init__(self, address):
self._cache = {}
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, _compat_str(self))
def __str__(self):
return "%s/%d" % (self.network_address, self.prefixlen)
def hosts(self):
"""Generate Iterator over usable hosts in a network.
This is like __iter__ except it doesn't return the network
or broadcast addresses.
"""
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network + 1, broadcast):
yield self._address_class(x)
def __iter__(self):
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network, broadcast + 1):
yield self._address_class(x)
def __getitem__(self, n):
network = int(self.network_address)
broadcast = int(self.broadcast_address)
if n >= 0:
if network + n > broadcast:
raise IndexError("address out of range")
return self._address_class(network + n)
else:
n += 1
if broadcast + n < network:
raise IndexError("address out of range")
return self._address_class(broadcast + n)
def __lt__(self, other):
if not isinstance(other, _IPAddressBase):
return NotImplemented
if not isinstance(other, _BaseNetwork):
raise TypeError(
"%s and %s are not of the same type" % (self, other)
)
if self._version != other._version:
raise TypeError(
"%s and %s are not of the same version" % (self, other)
)
if self.network_address != other.network_address:
return self.network_address < other.network_address
if self.netmask != other.netmask:
return self.netmask < other.netmask
return False
def __eq__(self, other):
try:
return (
self._version == other._version
and self.network_address == other.network_address
and int(self.netmask) == int(other.netmask)
)
except AttributeError:
return NotImplemented
def __hash__(self):
return hash(int(self.network_address) ^ int(self.netmask))
def __contains__(self, other):
# always false if one is v4 and the other is v6.
if self._version != other._version:
return False
# dealing with another network.
if isinstance(other, _BaseNetwork):
return False
# dealing with another address
else:
# address
return (
int(self.network_address)
<= int(other._ip)
<= int(self.broadcast_address)
)
def overlaps(self, other):
"""Tell if self is partly contained in other."""
return self.network_address in other or (
self.broadcast_address in other
or (
other.network_address in self
or (other.broadcast_address in self)
)
)
@property
def broadcast_address(self):
x = self._cache.get("broadcast_address")
if x is None:
x = self._address_class(
int(self.network_address) | int(self.hostmask)
)
self._cache["broadcast_address"] = x
return x
@property
def hostmask(self):
x = self._cache.get("hostmask")
if x is None:
x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
self._cache["hostmask"] = x
return x
@property
def with_prefixlen(self):
return "%s/%d" % (self.network_address, self._prefixlen)
@property
def with_netmask(self):
return "%s/%s" % (self.network_address, self.netmask)
@property
def with_hostmask(self):
return "%s/%s" % (self.network_address, self.hostmask)
@property
def num_addresses(self):
"""Number of hosts in the current subnet."""
return int(self.broadcast_address) - int(self.network_address) + 1
@property
def _address_class(self):
# Returning bare address objects (rather than interfaces) allows for
# more consistent behaviour across the network address, broadcast
# address and individual host addresses.
msg = "%200s has no associated address class" % (type(self),)
raise NotImplementedError(msg)
@property
def prefixlen(self):
return self._prefixlen
def address_exclude(self, other):
"""Remove an address from a larger block.
For example:
addr1 = ip_network('192.0.2.0/28')
addr2 = ip_network('192.0.2.1/32')
list(addr1.address_exclude(addr2)) =
[IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
or IPv6:
addr1 = ip_network('2001:db8::1/32')
addr2 = ip_network('2001:db8::1/128')
list(addr1.address_exclude(addr2)) =
[ip_network('2001:db8::1/128'),
ip_network('2001:db8::2/127'),
ip_network('2001:db8::4/126'),
ip_network('2001:db8::8/125'),
...
ip_network('2001:db8:8000::/33')]
Args:
other: An IPv4Network or IPv6Network object of the same type.
Returns:
An iterator of the IPv(4|6)Network objects which is self
minus other.
Raises:
TypeError: If self and other are of differing address
versions, or if other is not a network object.
ValueError: If other is not completely contained by self.
"""
if not self._version == other._version:
raise TypeError(
"%s and %s are not of the same version" % (self, other)
)
if not isinstance(other, _BaseNetwork):
raise TypeError("%s is not a network object" % other)
if not other.subnet_of(self):
raise ValueError("%s not contained in %s" % (other, self))
if other == self:
return
# Make sure we're comparing the network of other.
other = other.__class__(
"%s/%s" % (other.network_address, other.prefixlen)
)
s1, s2 = self.subnets()
while s1 != other and s2 != other:
if other.subnet_of(s1):
yield s2
s1, s2 = s1.subnets()
elif other.subnet_of(s2):
yield s1
s1, s2 = s2.subnets()
else:
# If we got here, there's a bug somewhere.
raise AssertionError(
"Error performing exclusion: "
"s1: %s s2: %s other: %s" % (s1, s2, other)
)
if s1 == other:
yield s2
elif s2 == other:
yield s1
else:
# If we got here, there's a bug somewhere.
raise AssertionError(
"Error performing exclusion: "
"s1: %s s2: %s other: %s" % (s1, s2, other)
)
def compare_networks(self, other):
"""Compare two IP objects.
This is only concerned about the comparison of the integer
representation of the network addresses. This means that the
host bits aren't considered at all in this method. If you want
to compare host bits, you can easily enough do a
'HostA._ip < HostB._ip'
Args:
other: An IP object.
Returns:
If the IP versions of self and other are the same, returns:
-1 if self < other:
eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
IPv6Network('2001:db8::1000/124') <
IPv6Network('2001:db8::2000/124')
0 if self == other
eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
IPv6Network('2001:db8::1000/124') ==
IPv6Network('2001:db8::1000/124')
1 if self > other
eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
IPv6Network('2001:db8::2000/124') >
IPv6Network('2001:db8::1000/124')
Raises:
TypeError if the IP versions are different.
"""
# does this need to raise a ValueError?
if self._version != other._version:
raise TypeError(
"%s and %s are not of the same type" % (self, other)
)
# self._version == other._version below here:
if self.network_address < other.network_address:
return -1
if self.network_address > other.network_address:
return 1
# self.network_address == other.network_address below here:
if self.netmask < other.netmask:
return -1
if self.netmask > other.netmask:
return 1
return 0
def _get_networks_key(self):
"""Network-only key function.
Returns an object that identifies this address' network and
netmask. This function is a suitable "key" argument for sorted()
and list.sort().
"""
return (self._version, self.network_address, self.netmask)
def subnets(self, prefixlen_diff=1, new_prefix=None):
"""The subnets which join to make the current subnet.
In the case that self contains only one IP
(self._prefixlen == 32 for IPv4 or self._prefixlen == 128
for IPv6), yield an iterator with just ourself.
Args:
prefixlen_diff: An integer, the amount the prefix length
should be increased by. This should not be set if
new_prefix is also set.
new_prefix: The desired new prefix length. This must be a
larger number (smaller prefix) than the existing prefix.
This should not be set if prefixlen_diff is also set.
Returns:
An iterator of IPv(4|6) objects.
Raises:
ValueError: The prefixlen_diff is too small or too large.
OR
prefixlen_diff and new_prefix are both set or new_prefix
is a smaller number than the current prefix (smaller
number means a larger network)
"""
if self._prefixlen == self._max_prefixlen:
yield self
return
if new_prefix is not None:
if new_prefix < self._prefixlen:
raise ValueError("new prefix must be longer")
if prefixlen_diff != 1:
raise ValueError("cannot set prefixlen_diff and new_prefix")
prefixlen_diff = new_prefix - self._prefixlen
if prefixlen_diff < 0:
raise ValueError("prefix length diff must be > 0")
new_prefixlen = self._prefixlen + prefixlen_diff
if new_prefixlen > self._max_prefixlen:
raise ValueError(
"prefix length diff %d is invalid for netblock %s"
% (new_prefixlen, self)
)
start = int(self.network_address)
end = int(self.broadcast_address) + 1
step = (int(self.hostmask) + 1) >> prefixlen_diff
for new_addr in _compat_range(start, end, step):
current = self.__class__((new_addr, new_prefixlen))
yield current
def supernet(self, prefixlen_diff=1, new_prefix=None):
"""The supernet containing the current network.
Args:
prefixlen_diff: An integer, the amount the prefix length of
the network should be decreased by. For example, given a
/24 network and a prefixlen_diff of 3, a supernet with a
/21 netmask is returned.
Returns:
An IPv4 network object.
Raises:
ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
a negative prefix length.
OR
If prefixlen_diff and new_prefix are both set or new_prefix is a
larger number than the current prefix (larger number means a
smaller network)
"""
if self._prefixlen == 0:
return self
if new_prefix is not None:
if new_prefix > self._prefixlen:
raise ValueError("new prefix must be shorter")
if prefixlen_diff != 1:
raise ValueError("cannot set prefixlen_diff and new_prefix")
prefixlen_diff = self._prefixlen - new_prefix
new_prefixlen = self.prefixlen - prefixlen_diff
if new_prefixlen < 0:
raise ValueError(
"current prefixlen is %d, cannot have a prefixlen_diff of %d"
% (self.prefixlen, prefixlen_diff)
)
return self.__class__(
(
int(self.network_address)
& (int(self.netmask) << prefixlen_diff),
new_prefixlen,
)
)
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is a multicast address.
See RFC 2373 2.7 for details.
"""
return (
self.network_address.is_multicast
and self.broadcast_address.is_multicast
)
@staticmethod
def _is_subnet_of(a, b):
try:
# Always false if one is v4 and the other is v6.
if a._version != b._version:
raise TypeError(
"%s and %s are not of the same version" % (a, b)
)
return (
b.network_address <= a.network_address
and b.broadcast_address >= a.broadcast_address
)
except AttributeError:
raise TypeError(
"Unable to test subnet containment "
"between %s and %s" % (a, b)
)
def subnet_of(self, other):
"""Return True if this network is a subnet of other."""
return self._is_subnet_of(self, other)
def supernet_of(self, other):
"""Return True if this network is a supernet of other."""
return self._is_subnet_of(other, self)
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within one of the
reserved IPv6 Network ranges.
"""
return (
self.network_address.is_reserved
and self.broadcast_address.is_reserved
)
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is reserved per RFC 4291.
"""
return (
self.network_address.is_link_local
and self.broadcast_address.is_link_local
)
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv4-special-registry or iana-ipv6-special-registry.
"""
return (
self.network_address.is_private
and self.broadcast_address.is_private
)
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, True if the address is not reserved per
iana-ipv4-special-registry or iana-ipv6-special-registry.
"""
return not self.is_private
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 2373 2.5.2.
"""
return (
self.network_address.is_unspecified
and self.broadcast_address.is_unspecified
)
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback address as defined in
RFC 2373 2.5.3.
"""
return (
self.network_address.is_loopback
and self.broadcast_address.is_loopback
)
class _BaseV4(object):
"""Base IPv4 object.
The following methods are used by IPv4 objects in both single IP
addresses and networks.
"""
__slots__ = ()
_version = 4
# Equivalent to 255.255.255.255 or 32 bits of 1's.
_ALL_ONES = (2 ** IPV4LENGTH) - 1
_DECIMAL_DIGITS = frozenset("0123456789")
# the valid octets for host and netmasks. only useful for IPv4.
_valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])
_max_prefixlen = IPV4LENGTH
# There are only a handful of valid v4 netmasks, so we cache them all
# when constructed (see _make_netmask()).
_netmask_cache = {}
def _explode_shorthand_ip_string(self):
return _compat_str(self)
@classmethod
def _make_netmask(cls, arg):
"""Make a (netmask, prefix_len) tuple from the given argument.
Argument can be:
- an integer (the prefix length)
- a string representing the prefix length (e.g. "24")
- a string representing the prefix netmask (e.g. "255.255.255.0")
"""
if arg not in cls._netmask_cache:
if isinstance(arg, _compat_int_types):
prefixlen = arg
else:
try:
# Check for a netmask in prefix length form
prefixlen = cls._prefix_from_prefix_string(arg)
except NetmaskValueError:
# Check for a netmask or hostmask in dotted-quad form.
# This may raise NetmaskValueError.
prefixlen = cls._prefix_from_ip_string(arg)
netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
cls._netmask_cache[arg] = netmask, prefixlen
return cls._netmask_cache[arg]
@classmethod
def _ip_int_from_string(cls, ip_str):
"""Turn the given IP string into an integer for comparison.
Args:
ip_str: A string, the IP ip_str.
Returns:
The IP ip_str as an integer.
Raises:
AddressValueError: if ip_str isn't a valid IPv4 Address.
"""
if not ip_str:
raise AddressValueError("Address cannot be empty")
octets = ip_str.split(".")
if len(octets) != 4:
raise AddressValueError("Expected 4 octets in %r" % ip_str)
try:
return _compat_int_from_byte_vals(
map(cls._parse_octet, octets), "big"
)
except ValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
@classmethod
def _parse_octet(cls, octet_str):
"""Convert a decimal octet into an integer.
Args:
octet_str: A string, the number to parse.
Returns:
The octet as an integer.
Raises:
ValueError: if the octet isn't strictly a decimal from [0..255].
"""
if not octet_str:
raise ValueError("Empty octet not permitted")
# Whitelist the characters, since int() allows a lot of bizarre stuff.
if not cls._DECIMAL_DIGITS.issuperset(octet_str):
msg = "Only decimal digits permitted in %r"
raise ValueError(msg % octet_str)
# We do the length check second, since the invalid character error
# is likely to be more informative for the user
if len(octet_str) > 3:
msg = "At most 3 characters permitted in %r"
raise ValueError(msg % octet_str)
# Convert to integer (we know digits are legal)
octet_int = int(octet_str, 10)
# Any octets that look like they *might* be written in octal,
# and which don't look exactly the same in both octal and
# decimal are rejected as ambiguous
if octet_int > 7 and octet_str[0] == "0":
msg = "Ambiguous (octal/decimal) value in %r not permitted"
raise ValueError(msg % octet_str)
if octet_int > 255:
raise ValueError("Octet %d (> 255) not permitted" % octet_int)
return octet_int
@classmethod
def _string_from_ip_int(cls, ip_int):
"""Turns a 32-bit integer into dotted decimal notation.
Args:
ip_int: An integer, the IP address.
Returns:
The IP address as a string in dotted decimal notation.
"""
return ".".join(
_compat_str(
struct.unpack(b"!B", b)[0] if isinstance(b, bytes) else b
)
for b in _compat_to_bytes(ip_int, 4, "big")
)
def _is_hostmask(self, ip_str):
"""Test if the IP string is a hostmask (rather than a netmask).
Args:
ip_str: A string, the potential hostmask.
Returns:
A boolean, True if the IP string is a hostmask.
"""
bits = ip_str.split(".")
try:
parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
except ValueError:
return False
if len(parts) != len(bits):
return False
if parts[0] < parts[-1]:
return True
return False
def _reverse_pointer(self):
"""Return the reverse DNS pointer name for the IPv4 address.
This implements the method described in RFC1035 3.5.
"""
reverse_octets = _compat_str(self).split(".")[::-1]
return ".".join(reverse_octets) + ".in-addr.arpa"
@property
def max_prefixlen(self):
return self._max_prefixlen
@property
def version(self):
return self._version
class IPv4Address(_BaseV4, _BaseAddress):
"""Represent and manipulate single IPv4 Addresses."""
__slots__ = ("_ip", "__weakref__")
def __init__(self, address):
"""
Args:
address: A string or integer representing the IP
Additionally, an integer can be passed, so
IPv4Address('192.0.2.1') == IPv4Address(3221225985).
or, more generally
IPv4Address(int(IPv4Address('192.0.2.1'))) ==
IPv4Address('192.0.2.1')
Raises:
AddressValueError: If ipaddress isn't a valid IPv4 address.
"""
# Efficient constructor from integer.
if isinstance(address, _compat_int_types):
self._check_int_address(address)
self._ip = address
return
# Constructing from a packed address
if isinstance(address, bytes):
self._check_packed_address(address, 4)
bvs = _compat_bytes_to_byte_vals(address)
self._ip = _compat_int_from_byte_vals(bvs, "big")
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP string.
addr_str = _compat_str(address)
if "/" in addr_str:
raise AddressValueError("Unexpected '/' in %r" % address)
self._ip = self._ip_int_from_string(addr_str)
@property
def packed(self):
"""The binary representation of this address."""
return v4_int_to_packed(self._ip)
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within the
reserved IPv4 Network range.
"""
return self in self._constants._reserved_network
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv4-special-registry.
"""
return any(self in net for net in self._constants._private_networks)
@property
def is_global(self):
return (
self not in self._constants._public_network and not self.is_private
)
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is multicast.
See RFC 3171 for details.
"""
return self in self._constants._multicast_network
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 5735 3.
"""
return self == self._constants._unspecified_address
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback per RFC 3330.
"""
return self in self._constants._loopback_network
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is link-local per RFC 3927.
"""
return self in self._constants._linklocal_network
class IPv4Interface(IPv4Address):
def __init__(self, address):
if isinstance(address, (bytes, _compat_int_types)):
IPv4Address.__init__(self, address)
self.network = IPv4Network(self._ip)
self._prefixlen = self._max_prefixlen
return
if isinstance(address, tuple):
IPv4Address.__init__(self, address[0])
if len(address) > 1:
self._prefixlen = int(address[1])
else:
self._prefixlen = self._max_prefixlen
self.network = IPv4Network(address, strict=False)
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
return
addr = _split_optional_netmask(address)
IPv4Address.__init__(self, addr[0])
self.network = IPv4Network(address, strict=False)
self._prefixlen = self.network._prefixlen
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
def __str__(self):
return "%s/%d" % (
self._string_from_ip_int(self._ip),
self.network.prefixlen,
)
def __eq__(self, other):
address_equal = IPv4Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except AttributeError:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False
def __lt__(self, other):
address_less = IPv4Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return (
self.network < other.network
or self.network == other.network
and address_less
)
except AttributeError:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False
def __hash__(self):
return self._ip ^ self._prefixlen ^ int(self.network.network_address)
__reduce__ = _IPAddressBase.__reduce__
@property
def ip(self):
return IPv4Address(self._ip)
@property
def with_prefixlen(self):
return "%s/%s" % (self._string_from_ip_int(self._ip), self._prefixlen)
@property
def with_netmask(self):
return "%s/%s" % (self._string_from_ip_int(self._ip), self.netmask)
@property
def with_hostmask(self):
return "%s/%s" % (self._string_from_ip_int(self._ip), self.hostmask)
class IPv4Network(_BaseV4, _BaseNetwork):
"""This class represents and manipulates 32-bit IPv4 network + addresses..
Attributes: [examples for IPv4Network('192.0.2.0/27')]
.network_address: IPv4Address('192.0.2.0')
.hostmask: IPv4Address('0.0.0.31')
.broadcast_address: IPv4Address('192.0.2.32')
.netmask: IPv4Address('255.255.255.224')
.prefixlen: 27
"""
# Class to use when creating address objects
_address_class = IPv4Address
def __init__(self, address, strict=True):
"""Instantiate a new IPv4 network object.
Args:
address: A string or integer representing the IP [& network].
'192.0.2.0/24'
'192.0.2.0/255.255.255.0'
'192.0.0.2/0.0.0.255'
are all functionally the same in IPv4. Similarly,
'192.0.2.1'
'192.0.2.1/255.255.255.255'
'192.0.2.1/32'
are also functionally equivalent. That is to say, failing to
provide a subnetmask will create an object with a mask of /32.
If the mask (portion after the / in the argument) is given in
dotted quad form, it is treated as a netmask if it starts with a
non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
starts with a zero field (e.g. 0.255.255.255 == /8), with the
single exception of an all-zero mask which is treated as a
netmask == /0. If no mask is given, a default of /32 is used.
Additionally, an integer can be passed, so
IPv4Network('192.0.2.1') == IPv4Network(3221225985)
or, more generally
IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
IPv4Interface('192.0.2.1')
Raises:
AddressValueError: If ipaddress isn't a valid IPv4 address.
NetmaskValueError: If the netmask isn't valid for
an IPv4 address.
ValueError: If strict is True and a network address is not
supplied.
"""
_BaseNetwork.__init__(self, address)
# Constructing from a packed address or integer
if isinstance(address, (_compat_int_types, bytes)):
self.network_address = IPv4Address(address)
self.netmask, self._prefixlen = self._make_netmask(
self._max_prefixlen
)
# fixme: address/network test here.
return
if isinstance(address, tuple):
if len(address) > 1:
arg = address[1]
else:
# We weren't given an address[1]
arg = self._max_prefixlen
self.network_address = IPv4Address(address[0])
self.netmask, self._prefixlen = self._make_netmask(arg)
packed = int(self.network_address)
if packed & int(self.netmask) != packed:
if strict:
raise ValueError("%s has host bits set" % self)
else:
self.network_address = IPv4Address(
packed & int(self.netmask)
)
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
addr = _split_optional_netmask(address)
self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
if len(addr) == 2:
arg = addr[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
if strict:
if (
IPv4Address(int(self.network_address) & int(self.netmask))
!= self.network_address
):
raise ValueError("%s has host bits set" % self)
self.network_address = IPv4Address(
int(self.network_address) & int(self.netmask)
)
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, True if the address is not reserved per
iana-ipv4-special-registry.
"""
return (
not (
self.network_address in IPv4Network("100.64.0.0/10")
and self.broadcast_address in IPv4Network("100.64.0.0/10")
)
and not self.is_private
)
class _IPv4Constants(object):
_linklocal_network = IPv4Network("169.254.0.0/16")
_loopback_network = IPv4Network("127.0.0.0/8")
_multicast_network = IPv4Network("224.0.0.0/4")
_public_network = IPv4Network("100.64.0.0/10")
_private_networks = [
IPv4Network("0.0.0.0/8"),
IPv4Network("10.0.0.0/8"),
IPv4Network("127.0.0.0/8"),
IPv4Network("169.254.0.0/16"),
IPv4Network("172.16.0.0/12"),
IPv4Network("192.0.0.0/29"),
IPv4Network("192.0.0.170/31"),
IPv4Network("192.0.2.0/24"),
IPv4Network("192.168.0.0/16"),
IPv4Network("198.18.0.0/15"),
IPv4Network("198.51.100.0/24"),
IPv4Network("203.0.113.0/24"),
IPv4Network("240.0.0.0/4"),
IPv4Network("255.255.255.255/32"),
]
_reserved_network = IPv4Network("240.0.0.0/4")
_unspecified_address = IPv4Address("0.0.0.0")
IPv4Address._constants = _IPv4Constants
class _BaseV6(object):
"""Base IPv6 object.
The following methods are used by IPv6 objects in both single IP
addresses and networks.
"""
__slots__ = ()
_version = 6
_ALL_ONES = (2 ** IPV6LENGTH) - 1
_HEXTET_COUNT = 8
_HEX_DIGITS = frozenset("0123456789ABCDEFabcdef")
_max_prefixlen = IPV6LENGTH
# There are only a bunch of valid v6 netmasks, so we cache them all
# when constructed (see _make_netmask()).
_netmask_cache = {}
@classmethod
def _make_netmask(cls, arg):
"""Make a (netmask, prefix_len) tuple from the given argument.
Argument can be:
- an integer (the prefix length)
- a string representing the prefix length (e.g. "24")
- a string representing the prefix netmask (e.g. "255.255.255.0")
"""
if arg not in cls._netmask_cache:
if isinstance(arg, _compat_int_types):
prefixlen = arg
else:
prefixlen = cls._prefix_from_prefix_string(arg)
netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
cls._netmask_cache[arg] = netmask, prefixlen
return cls._netmask_cache[arg]
@classmethod
def _ip_int_from_string(cls, ip_str):
"""Turn an IPv6 ip_str into an integer.
Args:
ip_str: A string, the IPv6 ip_str.
Returns:
An int, the IPv6 address
Raises:
AddressValueError: if ip_str isn't a valid IPv6 Address.
"""
if not ip_str:
raise AddressValueError("Address cannot be empty")
parts = ip_str.split(":")
# An IPv6 address needs at least 2 colons (3 parts).
_min_parts = 3
if len(parts) < _min_parts:
msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
raise AddressValueError(msg)
# If the address has an IPv4-style suffix, convert it to hexadecimal.
if "." in parts[-1]:
try:
ipv4_int = IPv4Address(parts.pop())._ip
except AddressValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
parts.append("%x" % ((ipv4_int >> 16) & 0xFFFF))
parts.append("%x" % (ipv4_int & 0xFFFF))
# An IPv6 address can't have more than 8 colons (9 parts).
# The extra colon comes from using the "::" notation for a single
# leading or trailing zero part.
_max_parts = cls._HEXTET_COUNT + 1
if len(parts) > _max_parts:
msg = "At most %d colons permitted in %r" % (
_max_parts - 1,
ip_str,
)
raise AddressValueError(msg)
# Disregarding the endpoints, find '::' with nothing in between.
# This indicates that a run of zeroes has been skipped.
skip_index = None
for i in _compat_range(1, len(parts) - 1):
if not parts[i]:
if skip_index is not None:
# Can't have more than one '::'
msg = "At most one '::' permitted in %r" % ip_str
raise AddressValueError(msg)
skip_index = i
# parts_hi is the number of parts to copy from above/before the '::'
# parts_lo is the number of parts to copy from below/after the '::'
if skip_index is not None:
# If we found a '::', then check if it also covers the endpoints.
parts_hi = skip_index
parts_lo = len(parts) - skip_index - 1
if not parts[0]:
parts_hi -= 1
if parts_hi:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
parts_lo -= 1
if parts_lo:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
if parts_skipped < 1:
msg = "Expected at most %d other parts with '::' in %r"
raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
else:
# Otherwise, allocate the entire address to parts_hi. The
# endpoints could still be empty, but _parse_hextet() will check
# for that.
if len(parts) != cls._HEXTET_COUNT:
msg = "Exactly %d parts expected without '::' in %r"
raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
if not parts[0]:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_hi = len(parts)
parts_lo = 0
parts_skipped = 0
try:
# Now, parse the hextets into a 128-bit integer.
ip_int = 0
for i in range(parts_hi):
ip_int <<= 16
ip_int |= cls._parse_hextet(parts[i])
ip_int <<= 16 * parts_skipped
for i in range(-parts_lo, 0):
ip_int <<= 16
ip_int |= cls._parse_hextet(parts[i])
return ip_int
except ValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
@classmethod
def _parse_hextet(cls, hextet_str):
"""Convert an IPv6 hextet string into an integer.
Args:
hextet_str: A string, the number to parse.
Returns:
The hextet as an integer.
Raises:
ValueError: if the input isn't strictly a hex number from
[0..FFFF].
"""
# Whitelist the characters, since int() allows a lot of bizarre stuff.
if not cls._HEX_DIGITS.issuperset(hextet_str):
raise ValueError("Only hex digits permitted in %r" % hextet_str)
# We do the length check second, since the invalid character error
# is likely to be more informative for the user
if len(hextet_str) > 4:
msg = "At most 4 characters permitted in %r"
raise ValueError(msg % hextet_str)
# Length check means we can skip checking the integer value
return int(hextet_str, 16)
@classmethod
def _compress_hextets(cls, hextets):
"""Compresses a list of hextets.
Compresses a list of strings, replacing the longest continuous
sequence of "0" in the list with "" and adding empty strings at
the beginning or at the end of the string such that subsequently
calling ":".join(hextets) will produce the compressed version of
the IPv6 address.
Args:
hextets: A list of strings, the hextets to compress.
Returns:
A list of strings.
"""
best_doublecolon_start = -1
best_doublecolon_len = 0
doublecolon_start = -1
doublecolon_len = 0
for index, hextet in enumerate(hextets):
if hextet == "0":
doublecolon_len += 1
if doublecolon_start == -1:
# Start of a sequence of zeros.
doublecolon_start = index
if doublecolon_len > best_doublecolon_len:
# This is the longest sequence of zeros so far.
best_doublecolon_len = doublecolon_len
best_doublecolon_start = doublecolon_start
else:
doublecolon_len = 0
doublecolon_start = -1
if best_doublecolon_len > 1:
best_doublecolon_end = (
best_doublecolon_start + best_doublecolon_len
)
# For zeros at the end of the address.
if best_doublecolon_end == len(hextets):
hextets += [""]
hextets[best_doublecolon_start:best_doublecolon_end] = [""]
# For zeros at the beginning of the address.
if best_doublecolon_start == 0:
hextets = [""] + hextets
return hextets
@classmethod
def _string_from_ip_int(cls, ip_int=None):
"""Turns a 128-bit integer into hexadecimal notation.
Args:
ip_int: An integer, the IP address.
Returns:
A string, the hexadecimal representation of the address.
Raises:
ValueError: The address is bigger than 128 bits of all ones.
"""
if ip_int is None:
ip_int = int(cls._ip)
if ip_int > cls._ALL_ONES:
raise ValueError("IPv6 address is too large")
hex_str = "%032x" % ip_int
hextets = ["%x" % int(hex_str[x : x + 4], 16) for x in range(0, 32, 4)]
hextets = cls._compress_hextets(hextets)
return ":".join(hextets)
def _explode_shorthand_ip_string(self):
"""Expand a shortened IPv6 address.
Args:
ip_str: A string, the IPv6 address.
Returns:
A string, the expanded IPv6 address.
"""
if isinstance(self, IPv6Network):
ip_str = _compat_str(self.network_address)
elif isinstance(self, IPv6Interface):
ip_str = _compat_str(self.ip)
else:
ip_str = _compat_str(self)
ip_int = self._ip_int_from_string(ip_str)
hex_str = "%032x" % ip_int
parts = [hex_str[x : x + 4] for x in range(0, 32, 4)]
if isinstance(self, (_BaseNetwork, IPv6Interface)):
return "%s/%d" % (":".join(parts), self._prefixlen)
return ":".join(parts)
def _reverse_pointer(self):
"""Return the reverse DNS pointer name for the IPv6 address.
This implements the method described in RFC3596 2.5.
"""
reverse_chars = self.exploded[::-1].replace(":", "")
return ".".join(reverse_chars) + ".ip6.arpa"
@property
def max_prefixlen(self):
return self._max_prefixlen
@property
def version(self):
return self._version
class IPv6Address(_BaseV6, _BaseAddress):
"""Represent and manipulate single IPv6 Addresses."""
__slots__ = ("_ip", "__weakref__")
def __init__(self, address):
"""Instantiate a new IPv6 address object.
Args:
address: A string or integer representing the IP
Additionally, an integer can be passed, so
IPv6Address('2001:db8::') ==
IPv6Address(42540766411282592856903984951653826560)
or, more generally
IPv6Address(int(IPv6Address('2001:db8::'))) ==
IPv6Address('2001:db8::')
Raises:
AddressValueError: If address isn't a valid IPv6 address.
"""
# Efficient constructor from integer.
if isinstance(address, _compat_int_types):
self._check_int_address(address)
self._ip = address
return
# Constructing from a packed address
if isinstance(address, bytes):
self._check_packed_address(address, 16)
bvs = _compat_bytes_to_byte_vals(address)
self._ip = _compat_int_from_byte_vals(bvs, "big")
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP string.
addr_str = _compat_str(address)
if "/" in addr_str:
raise AddressValueError("Unexpected '/' in %r" % address)
self._ip = self._ip_int_from_string(addr_str)
@property
def packed(self):
"""The binary representation of this address."""
return v6_int_to_packed(self._ip)
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is a multicast address.
See RFC 2373 2.7 for details.
"""
return self in self._constants._multicast_network
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within one of the
reserved IPv6 Network ranges.
"""
return any(self in x for x in self._constants._reserved_networks)
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is reserved per RFC 4291.
"""
return self in self._constants._linklocal_network
@property
def is_site_local(self):
"""Test if the address is reserved for site-local.
Note that the site-local address space has been deprecated by RFC 3879.
Use is_private to test if this address is in the space of unique local
addresses as defined by RFC 4193.
Returns:
A boolean, True if the address is reserved per RFC 3513 2.5.6.
"""
return self in self._constants._sitelocal_network
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv6-special-registry.
"""
return any(self in net for net in self._constants._private_networks)
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, true if the address is not reserved per
iana-ipv6-special-registry.
"""
return not self.is_private
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 2373 2.5.2.
"""
return self._ip == 0
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback address as defined in
RFC 2373 2.5.3.
"""
return self._ip == 1
@property
def ipv4_mapped(self):
"""Return the IPv4 mapped address.
Returns:
If the IPv6 address is a v4 mapped address, return the
IPv4 mapped address. Return None otherwise.
"""
if (self._ip >> 32) != 0xFFFF:
return None
return IPv4Address(self._ip & 0xFFFFFFFF)
@property
def teredo(self):
"""Tuple of embedded teredo IPs.
Returns:
Tuple of the (server, client) IPs or None if the address
doesn't appear to be a teredo address (doesn't start with
2001::/32)
"""
if (self._ip >> 96) != 0x20010000:
return None
return (
IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
IPv4Address(~self._ip & 0xFFFFFFFF),
)
@property
def sixtofour(self):
"""Return the IPv4 6to4 embedded address.
Returns:
The IPv4 6to4-embedded address if present or None if the
address doesn't appear to contain a 6to4 embedded address.
"""
if (self._ip >> 112) != 0x2002:
return None
return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
class IPv6Interface(IPv6Address):
def __init__(self, address):
if isinstance(address, (bytes, _compat_int_types)):
IPv6Address.__init__(self, address)
self.network = IPv6Network(self._ip)
self._prefixlen = self._max_prefixlen
return
if isinstance(address, tuple):
IPv6Address.__init__(self, address[0])
if len(address) > 1:
self._prefixlen = int(address[1])
else:
self._prefixlen = self._max_prefixlen
self.network = IPv6Network(address, strict=False)
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
return
addr = _split_optional_netmask(address)
IPv6Address.__init__(self, addr[0])
self.network = IPv6Network(address, strict=False)
self.netmask = self.network.netmask
self._prefixlen = self.network._prefixlen
self.hostmask = self.network.hostmask
def __str__(self):
return "%s/%d" % (
self._string_from_ip_int(self._ip),
self.network.prefixlen,
)
def __eq__(self, other):
address_equal = IPv6Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except AttributeError:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False
def __lt__(self, other):
address_less = IPv6Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return (
self.network < other.network
or self.network == other.network
and address_less
)
except AttributeError:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False
def __hash__(self):
return self._ip ^ self._prefixlen ^ int(self.network.network_address)
__reduce__ = _IPAddressBase.__reduce__
@property
def ip(self):
return IPv6Address(self._ip)
@property
def with_prefixlen(self):
return "%s/%s" % (self._string_from_ip_int(self._ip), self._prefixlen)
@property
def with_netmask(self):
return "%s/%s" % (self._string_from_ip_int(self._ip), self.netmask)
@property
def with_hostmask(self):
return "%s/%s" % (self._string_from_ip_int(self._ip), self.hostmask)
@property
def is_unspecified(self):
return self._ip == 0 and self.network.is_unspecified
@property
def is_loopback(self):
return self._ip == 1 and self.network.is_loopback
class IPv6Network(_BaseV6, _BaseNetwork):
"""This class represents and manipulates 128-bit IPv6 networks.
Attributes: [examples for IPv6('2001:db8::1000/124')]
.network_address: IPv6Address('2001:db8::1000')
.hostmask: IPv6Address('::f')
.broadcast_address: IPv6Address('2001:db8::100f')
.netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
.prefixlen: 124
"""
# Class to use when creating address objects
_address_class = IPv6Address
def __init__(self, address, strict=True):
"""Instantiate a new IPv6 Network object.
Args:
address: A string or integer representing the IPv6 network or the
IP and prefix/netmask.
'2001:db8::/128'
'2001:db8:0000:0000:0000:0000:0000:0000/128'
'2001:db8::'
are all functionally the same in IPv6. That is to say,
failing to provide a subnetmask will create an object with
a mask of /128.
Additionally, an integer can be passed, so
IPv6Network('2001:db8::') ==
IPv6Network(42540766411282592856903984951653826560)
or, more generally
IPv6Network(int(IPv6Network('2001:db8::'))) ==
IPv6Network('2001:db8::')
strict: A boolean. If true, ensure that we have been passed
A true network address, eg, 2001:db8::1000/124 and not an
IP address on a network, eg, 2001:db8::1/124.
Raises:
AddressValueError: If address isn't a valid IPv6 address.
NetmaskValueError: If the netmask isn't valid for
an IPv6 address.
ValueError: If strict was True and a network address was not
supplied.
"""
_BaseNetwork.__init__(self, address)
# Efficient constructor from integer or packed address
if isinstance(address, (bytes, _compat_int_types)):
self.network_address = IPv6Address(address)
self.netmask, self._prefixlen = self._make_netmask(
self._max_prefixlen
)
return
if isinstance(address, tuple):
if len(address) > 1:
arg = address[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
self.network_address = IPv6Address(address[0])
packed = int(self.network_address)
if packed & int(self.netmask) != packed:
if strict:
raise ValueError("%s has host bits set" % self)
else:
self.network_address = IPv6Address(
packed & int(self.netmask)
)
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
addr = _split_optional_netmask(address)
self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
if len(addr) == 2:
arg = addr[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
if strict:
if (
IPv6Address(int(self.network_address) & int(self.netmask))
!= self.network_address
):
raise ValueError("%s has host bits set" % self)
self.network_address = IPv6Address(
int(self.network_address) & int(self.netmask)
)
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
def hosts(self):
"""Generate Iterator over usable hosts in a network.
This is like __iter__ except it doesn't return the
Subnet-Router anycast address.
"""
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network + 1, broadcast + 1):
yield self._address_class(x)
@property
def is_site_local(self):
"""Test if the address is reserved for site-local.
Note that the site-local address space has been deprecated by RFC 3879.
Use is_private to test if this address is in the space of unique local
addresses as defined by RFC 4193.
Returns:
A boolean, True if the address is reserved per RFC 3513 2.5.6.
"""
return (
self.network_address.is_site_local
and self.broadcast_address.is_site_local
)
class _IPv6Constants(object):
_linklocal_network = IPv6Network("fe80::/10")
_multicast_network = IPv6Network("ff00::/8")
_private_networks = [
IPv6Network("::1/128"),
IPv6Network("::/128"),
IPv6Network("::ffff:0:0/96"),
IPv6Network("100::/64"),
IPv6Network("2001::/23"),
IPv6Network("2001:2::/48"),
IPv6Network("2001:db8::/32"),
IPv6Network("2001:10::/28"),
IPv6Network("fc00::/7"),
IPv6Network("fe80::/10"),
]
_reserved_networks = [
IPv6Network("::/8"),
IPv6Network("100::/8"),
IPv6Network("200::/7"),
IPv6Network("400::/6"),
IPv6Network("800::/5"),
IPv6Network("1000::/4"),
IPv6Network("4000::/3"),
IPv6Network("6000::/3"),
IPv6Network("8000::/3"),
IPv6Network("A000::/3"),
IPv6Network("C000::/3"),
IPv6Network("E000::/4"),
IPv6Network("F000::/5"),
IPv6Network("F800::/6"),
IPv6Network("FE00::/9"),
]
_sitelocal_network = IPv6Network("fec0::/10")
IPv6Address._constants = _IPv6Constants
|
aladdinwang/django-cms | refs/heads/master | cms/plugins/text/models.py | 5 | from cms.models import CMSPlugin
from cms.plugins.text.utils import (plugin_admin_html_to_tags,
plugin_tags_to_admin_html, plugin_tags_to_id_list, replace_plugin_tags)
from cms.utils.html import clean_html
from django.db import models
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
_old_tree_cache = {}
class AbstractText(CMSPlugin):
"""Abstract Text Plugin Class"""
body = models.TextField(_("body"))
class Meta:
abstract = True
def _set_body_admin(self, text):
self.body = plugin_admin_html_to_tags(text)
def _get_body_admin(self):
return plugin_tags_to_admin_html(self.body)
body_for_admin = property(_get_body_admin, _set_body_admin, None,
"""
body attribute, but with transformations
applied to allow editing in the
admin. Read/write.
""")
search_fields = ('body',)
def __unicode__(self):
return Truncator(strip_tags(self.body)).chars(30)
def clean(self):
self.body = clean_html(self.body, full=False)
def clean_plugins(self):
ids = plugin_tags_to_id_list(self.body)
plugins = CMSPlugin.objects.filter(parent=self)
for plugin in plugins:
if not plugin.pk in ids:
plugin.delete() #delete plugins that are not referenced in the text anymore
def post_copy(self, old_instance, ziplist):
"""
Fix references to plugins
"""
replace_ids = {}
for new, old in ziplist:
replace_ids[old.pk] = new.pk
self.body = replace_plugin_tags(old_instance.get_plugin_instance()[0].body, replace_ids)
self.save()
class Text(AbstractText):
"""
Actual Text Class
"""
|
dzan/xenOnArm | refs/heads/master | tools/python/xen/xm/dumppolicy.py | 49 | #============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2006 International Business Machines Corp.
# Author: Reiner Sailer <[email protected]>
#============================================================================
"""Display currently enforced policy (low-level hypervisor representation).
"""
import os
import sys
import base64
import tempfile
import commands
from xen.util.xsm.xsm import XSMError, err, dump_policy, dump_policy_file
from xen.xm.opts import OptionError
from xen.xm import main as xm_main
from xen.xm.main import server
from xen.util import xsconstants
DOM0_UUID = "00000000-0000-0000-0000-000000000000"
def help():
return """
Retrieve and print currently enforced hypervisor policy information
(low-level)."""
def main(argv):
if len(argv) != 1:
raise OptionError("No arguments expected.")
if xm_main.serverType == xm_main.SERVER_XEN_API:
try:
bin_pol = server.xenapi.ACMPolicy.get_enforced_binary()
if bin_pol:
dom0_ssid = server.xenapi.ACMPolicy.get_VM_ssidref(DOM0_UUID)
bin = base64.b64decode(bin_pol)
try:
fd, filename = tempfile.mkstemp(suffix=".bin")
os.write(fd, bin)
os.close(fd)
dump_policy_file(filename, dom0_ssid)
finally:
os.unlink(filename)
else:
err("No policy is installed.")
except Exception, e:
err("An error occurred getting the running policy: %s" % str(e))
else:
dump_policy()
if __name__ == '__main__':
try:
main(sys.argv)
except Exception, e:
sys.stderr.write('Error: %s\n' % str(e))
sys.exit(-1)
|
Koonkie/MetaPathways_Python_Koonkie.3.0 | refs/heads/master | libs/python_modules/utils/utils.py | 2 | #!/usr/bin/env python
__author__ = "Kishori M Konwar"
__copyright__ = "Copyright 2013, MetaPathways"
__credits__ = ["r"]
__version__ = "1.0"
__maintainer__ = "Kishori M Konwar"
__status__ = "Release"
"""Contains general utility code for the metapaths project"""
try:
from shutil import rmtree
from StringIO import StringIO
from os import getenv, makedirs, path, remove
from operator import itemgetter
from os.path import abspath, exists, dirname, join, isdir
from collections import defaultdict
from optparse import make_option
from glob import glob
import sys, os, traceback, shutil
from libs.python_modules.parsers.fastareader import FastaReader
from libs.python_modules.utils.sysutil import pathDelim
except:
print """ Could not load some user defined module functions"""
print """ Make sure your typed \'source MetaPathwaysrc\'"""
print """ """
print traceback.print_exc(10)
sys.exit(3)
def fprintf(file, fmt, *args):
file.write(fmt % args)
def printf(fmt, *args):
sys.stdout.write(fmt % args)
sys.stdout.flush()
def eprintf(fmt, *args):
sys.stderr.write(fmt % args)
sys.stderr.flush()
PATHDELIM = pathDelim()
def which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def isFastaFile(filename):
''' this function checks if the given file is a fasta file
by examining the first 100 lines of the file
'''
fastaNamePATT = re.compile(r'^>')
fastaAlphabetPATT = re.compile(r'[a-zA-Z]+')
isFasta = True
seenNamePatt = False
try:
c = 0
with open(filename) as fp:
for line in fp:
'''trim the line'''
line_trimmed = line.strip()
if line_trimmed:
if fastaNamePATT.search(line_trimmed):
''' is a name line '''
seenNamePatt = True
else:
''' not a seq name '''
if fastaAlphabetPATT.search(line_trimmed):
''' it is of the alphabet'''
if not seenNamePatt:
''' am i seeing sequence before the name'''
isFasta = False
else:
isFasta = False
c+=1
if c > 500:
break
fp.close()
except:
eprintf("ERROR:\tCannot open filee " + filename)
print traceback.print_exc(10)
return False
if seenNamePatt==False:
isFasta = False
return isFasta
def isGenbank(filename):
''' this function decides if a file is in genbank format or not
by reading the first 100 lines and look for the key words that
usually appear in the genbank file formsts
'''
locusPATT = re.compile(r'^\s*LOCUS')
versionPATT = re.compile(r'^\s*VERSION')
featuresPATT = re.compile(r'^\s*FEATURES')
originPATT = re.compile(r'\s*ORIGIN')
accessionPATT = re.compile(r'^\s*ACCESSION')
sourcePATT = re.compile(r'^\s*SOURCE')
patterns = [locusPATT, versionPATT, featuresPATT, originPATT, accessionPATT, sourcePATT ]
countPatterns = [ 0 for i in range(0, len(patterns)) ]
try:
c = 0
with open(filename) as fp:
for line in fp:
'''trim the line'''
line_trimmed = line.strip()
if line_trimmed:
for i in range(0, len(patterns) ):
if patterns[i].search(line_trimmed.upper()):
countPatterns[i] = 1
c+=1
if c > 500:
break
except:
eprintf("ERROR:\tCannot open filex " + filename)
print traceback.print_exc(10)
return False
numPattsSeen = 0
for val in countPatterns:
numPattsSeen += val
if numPattsSeen >= 3:
'''if you have seen more than 3 of the above patters
then we decide that it is a genbank file
'''
return True
return False
def isNucleotide( filename):
''' checks if a fasta file is a nucleotide file format'''
fastaNamePATT = re.compile(r'^>')
isFasta = True
nucCount = 0.0
nonNucCount = 0.0
try:
c = 0
with open(filename) as fp:
for line in fp:
'''trim the line'''
line_trimmed = line.strip()
if line_trimmed:
if not fastaNamePATT.search(line_trimmed):
for a in line_trimmed.upper():
if a in ['A', 'T', 'C', 'G', 'N' ]:
nucCount+= 1
else:
nonNucCount+= 1
c+=1
if c > 500:
break
except:
eprintf("ERROR:\tCannot open file " + filename)
return False
if nucCount ==0:
return False
if float(nucCount)/float(nonNucCount + nucCount) > 0.9 :
return True
return False
def check_file_types(filenames):
filetypes={}
for filename in filenames:
if not path.exists(filename):
filetypes[filename] = ['UNKNOWN', 'UNKNOWN', False]
if isFastaFile(filename):
if isNucleotide(filename):
filetypes[filename] = ['FASTA', 'NUCL', False]
else: # assume amino
filetypes[filename] = ['FASTA', 'AMINO', False]
elif isGenbank(filename):
filetypes[filename] = ['GENBANK', 'NOT-USED', False]
else:
filetypes[filename] = ['UNKNOWN', 'UNKNOWN', False]
return filetypes
def load_job_status_file(filename, A) :
if path.exists(filename):
listfile = open(filename, 'r')
lines = listfile.readlines()
listfile.close()
for line in lines:
fields = [ x.strip() for x in line.strip().split('\t') ]
if len(fields) == 6:
if not fields[0] in A:
A[fields[0]] = {}
if not fields[1] in A[fields[0]]:
A[fields[0]][fields[1]] = {}
if not fields[2] in A[fields[0]][fields[1]]:
A[fields[0]][fields[1]][fields[2]] = {}
if not fields[3] in A[fields[0]][fields[1]][fields[2]]:
A[fields[0]][fields[1]][fields[2]][fields[3]] = {}
A[fields[0]][fields[1]][fields[2]][fields[3]][fields[4]]=int(fields[5])
def remove_files(dir, filenames):
for file in filenames:
try:
if path.exists(dir + PATHDELIM + file):
remove(dir + PATHDELIM + file)
except IOError:
print "Cannot remove file " + dir + PATHDELIM + file + " !"
sys.exit(0)
# (Re)create the sequence blocks along with the necessary log files
def create_splits(outputdir, listfilename, input_filename, maxMBytes, maxSize, splitPrefix = 'split', splitSuffix=''):
maxBytes = 1024*1024*maxMBytes
if splitSuffix:
suffix = '.' + splitSuffix
else:
suffix = ''
try:
if path.exists( listfilename):
listfile = open( listfilename, 'r')
listfilenames = [ x.strip() for x in listfile.readlines() ]
remove_files(outputdir, listfilenames)
listfile.close()
except IOError:
print "Cannot read file " + listfilename + " !"
sys.exit(0)
try:
listfile = open(listfilename, 'w')
except IOError:
print "Cannot read file " + listfilename + " !"
sys.exit(0)
fragments= []
seq_beg_pattern = re.compile(">")
splitno = 0
currblocksize = 0
currblockbyteSize = 0
fastareader = FastaReader(input_filename)
# Read sequences from sorted sequence file and write them to block files
for name in fastareader:
fragments.append(fastareader.seqname)
fragments.append(fastareader.sequence)
if currblocksize >= maxSize -1 or currblockbyteSize >= maxBytes:
splitfile = open(outputdir + PATHDELIM + splitPrefix + str(splitno) + suffix, 'w')
fprintf(splitfile, "%s",'\n'.join(fragments))
fragments=[]
splitfile.close()
# Add this block name to the blocklistfile
fprintf(listfile, "%s\n", splitPrefix + str(splitno) + suffix)
splitno += 1
currblocksize = 0
currblockbyteSize = 0
else:
currblocksize += 1
currblockbyteSize += len(fastareader.sequence)
if fragments:
splitfile = open(outputdir + PATHDELIM + splitPrefix + str(splitno) + suffix, 'w')
fprintf(splitfile, "%s",'\n'.join(fragments))
splitfile.close()
fragments = []
fprintf(listfile, "%s\n", splitPrefix + str(splitno) + suffix)
splitno += 1
#Add this block name to the blocklistfile
currblocksize = 0
currblockbyteSize = 0
listfile.close()
return True
def countNoOfSequencesInFile(file):
fastareader = FastaReader(file)
count = 0
for record in fastareader:
count+=1
return count
def number_of_lines_in_file(filename):
try:
file = open(filename, 'r')
lines = file.readlines()
file.close()
size = len(lines)
except:
return 0
return size
def read_one_column(listfilename, dictionary, col=0) :
try:
listfile = open(listfilename, 'r')
lines = listfile.readlines()
for line in lines:
fields = [ x.strip() for x in line.strip().split('\t') ]
if len(fields) > col:
dictionary[fields[col]] = True
listfile.close()
except:
traceback.print_exc(1)
def enforce_number_of_fields_per_row(listfilename, col):
needsSanitization = False
try:
listfile = open(listfilename, 'r+')
lines = listfile.readlines()
for line in lines:
fields = [ x.strip() for x in line.strip().split('\t') if len(x.strip()) ]
if len(fields) != col:
needsSanitization = True
if needsSanitization:
listfile.seek(0)
listfile.truncate()
for line in lines:
fields = [ x.strip() for x in line.strip().split('\t') if len(x.strip()) ]
if len(fields) == col:
fprintf(listfile, line)
listfile.close()
except:
traceback.print_exc(1)
return needsSanitization
# if the the folder is found all the files
# in the folder and but DO NOT delete the folder
def clearFolderIfExists(folderName):
if path.exists(folderName) :
files = glob(folderName)
for f in files:
remove(f)
# if the the folder is found all the files
# in the folder and then delete the folder too
def removeFolderIfFound(folderName):
if path.exists(folderName) :
files = glob(folderName)
for f in files:
remove(f)
if path.exists(folderName):
shutil.rmtree(origFolderName)
# if folder does not exist then create one
def createFolderIfNotFound( folderName ):
if not path.exists(folderName) :
makedirs(folderName)
return False
else:
return True
# does folder does ?
def doesFolderExist( folderName ):
if not path.exists(folderName) :
return False
else:
return True
# does file exist ?
def doesFileExist( fileName ):
if not path.exists(fileName) :
return False
else:
return True
def does_plain_or_gz_FileExist( fileName ):
if path.exists(fileName) or path.exists(fileName + '.gz') :
return True
return False
#"""This module defines classes for working with GenBank records."""
import re
import sys
class FastaReader():
"""Parses a GenBank record from a string or file."""
stop = False
START_PATTERN = re.compile(r'^>')
name = None
future_name =None
sequence=""
def __init__(self, fasta_filename):
try:
self.file = open(fasta_filename, 'r')
except IOError:
print "Cannot open fasta file " + fasta_filename
def __iter__(self):
return self
def next(self):
if self.stop:
raise StopIteration
try:
if not self.name:
self.name = self.file.readline().strip()
line = self.file.readline().strip()
except:
line = None
if not line:
self.stop = True
raise StopIteration
fragments = []
while line and not self.START_PATTERN.search(line):
fragments.append(line.strip())
line = self.file.readline()
# print line
if self.future_name:
self.name = self.future_name
if line:
self.future_name = line.strip()
self.sequence =''.join(fragments)
self.seqname = self.name
return self.name
def read_list(listfilename, dictionary, col=0) :
""" Read the contents of a file into a dictionary (col begin with 0) """
try:
listfile = open(listfilename, 'r')
lines = listfile.readlines()
for line in lines:
fields = [ x.strip() for x in line.strip().split('\t') ]
if len(fields) > col:
dictionary[fields[0]] = fields[col]
listfile.close()
except:
traceback.print_exception()
def hasInput(expected_input):
""" checks if the expected input, a file or folder is present"""
if path.exists(expected_input):
return True
else:
return False
def sQuote(string):
""" Puts double quotes around a string"""
return "\'" + string + "\'"
def shouldRunStep1(run_type, dir , expected_outputs):
""" decide if a command should be run if it is overlay,
when the expected outputs are present """
if run_type =='overlay' and doFilesExist(expected_outputs, dir = dir):
return False
else:
return True
def shouldRunStep(run_type, expected_output):
""" decide if a command should be run if it is overlay,
when results are alread computed decide not to run """
if run_type =='overlay' and path.exists(expected_output):
return False
else:
return True
def hasResults(expected_output):
""" has the results to use """
if path.exists(expected_output):
return True
else:
return False
def hasResults1(dir , expected_outputs):
""" has the results to use """
if doFilesExist(expected_outputs, dir = dir):
return True
else:
return False
def shouldRunStepOnDirectory(run_type, dirName):
"""if the directory is empty then there is not precomputed results
and so you should decide to run the command
"""
dirName = dirName + PATHDELIM + '*'
files = glob(dirName)
if len(files)==0:
return True
else:
return False
def removeDirOnRedo(command_Status, origFolderName):
""" if the command is "redo" then delete all the files
in the folder and then delete the folder too """
if command_Status=='redo' and path.exists(origFolderName) :
folderName = origFolderName + PATHDELIM + '*'
files = glob(folderName)
for f in files:
remove(f)
if path.exists(origFolderName):
shutil.rmtree(origFolderName)
def removeFileOnRedo(command_Status, fileName):
""" if the command is "redo" then delete the file """
if command_Status=='redo' and path.exists(fileName) :
remove(fileName)
return True
else:
return False
def cleanDirOnRedo(command_Status, folderName):
""" remove all the files in the directory on Redo """
if command_Status=='redo':
cleanDirectory(folderName)
def cleanDirectory( folderName):
""" remove all the files in the directory """
folderName = folderName + PATHDELIM + '*'
files = glob(folderName)
for f in files:
remove(f)
def checkOrCreateFolder( folderName ):
""" if folder does not exist then create one """
if not path.exists(folderName) :
makedirs(folderName)
return False
else:
return True
def doFilesExist( fileNames, dir="" ):
""" does the file Exist? """
for fileName in fileNames:
file = fileName
if dir!='':
file = dir + PATHDELIM + fileName
if not path.exists(file):
return False
return True
def Singleton(class_):
instances = {}
def getinstance(*args, **kwargs):
if class_ not in instances:
instances[class_] = class_(*args, **kwargs)
return instances[class_]
return getinstance
def extractSampleName(sampleName, type = None):
sample_name = sampleName
if type == 'fasta' or type==None:
sample_name = re.sub(r'^.*/','',sample_name, re.I)
sample_name = re.sub(r'^.*\\','',sample_name, re.I)
sample_name = re.sub(r'\.fasta$','',sample_name, re.I)
sample_name = re.sub(r'\.fna$','',sample_name, re.I)
sample_name = re.sub(r'\.faa$','',sample_name, re.I)
sample_name = re.sub(r'\.fas$','',sample_name, re.I)
sample_name = re.sub(r'\.fa$','',sample_name, re.I)
elif type in ['gbk-unannotated', 'gbk-annotated'] or type==None:
sample_name = re.sub(r'^.*/','',sample_name, re.I)
sample_name = re.sub(r'^.*\\','',sample_name, re.I)
sample_name = re.sub(r'\.gbk$','',sample_name, re.I)
else:
eprintf("ERROR: Incorrect type %s to function extractSampleName\n", sQuote(type))
return sample_name
def createDummyFile(absfilename):
try:
f = open(absfilename, 'w')
f.close()
except:
return False
return True
#
|
xdevelsistemas/taiga-back-community | refs/heads/stable | tests/integration/test_webhooks_wikipages.py | 2 | # -*- coding: utf-8 -*-
# Copyright (C) 2014-2016 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2016 Jesús Espino <[email protected]>
# Copyright (C) 2014-2016 David Barragán <[email protected]>
# Copyright (C) 2014-2016 Alejandro Alonso <[email protected]>
# Copyright (C) 2014-2016 Anler Hernández <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pytest
from unittest.mock import patch
from unittest.mock import Mock
from .. import factories as f
from taiga.projects.history import services
pytestmark = pytest.mark.django_db(transaction=True)
from taiga.base.utils import json
def test_webhooks_when_create_wiki_page(settings):
settings.WEBHOOKS_ENABLED = True
project = f.ProjectFactory()
f.WebhookFactory.create(project=project)
f.WebhookFactory.create(project=project)
obj = f.WikiPageFactory.create(project=project)
with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
services.take_snapshot(obj, user=obj.owner)
assert send_request_mock.call_count == 2
(webhook_id, url, key, data) = send_request_mock.call_args[0]
assert data["action"] == "create"
assert data["type"] == "wikipage"
assert data["by"]["id"] == obj.owner.id
assert "date" in data
assert data["data"]["id"] == obj.id
def test_webhooks_when_update_wiki_page(settings):
settings.WEBHOOKS_ENABLED = True
project = f.ProjectFactory()
f.WebhookFactory.create(project=project)
f.WebhookFactory.create(project=project)
obj = f.WikiPageFactory.create(project=project)
with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
services.take_snapshot(obj, user=obj.owner)
assert send_request_mock.call_count == 2
obj.content = "test webhook update"
obj.save()
with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
services.take_snapshot(obj, user=obj.owner, comment="test_comment")
assert send_request_mock.call_count == 2
(webhook_id, url, key, data) = send_request_mock.call_args[0]
assert data["action"] == "change"
assert data["type"] == "wikipage"
assert data["by"]["id"] == obj.owner.id
assert "date" in data
assert data["data"]["id"] == obj.id
assert data["data"]["content"] == obj.content
assert data["change"]["comment"] == "test_comment"
assert data["change"]["diff"]["content_html"]["from"] != data["change"]["diff"]["content_html"]["to"]
assert obj.content in data["change"]["diff"]["content_html"]["to"]
def test_webhooks_when_delete_wiki_page(settings):
settings.WEBHOOKS_ENABLED = True
project = f.ProjectFactory()
f.WebhookFactory.create(project=project)
f.WebhookFactory.create(project=project)
obj = f.WikiPageFactory.create(project=project)
with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
services.take_snapshot(obj, user=obj.owner, delete=True)
assert send_request_mock.call_count == 2
(webhook_id, url, key, data) = send_request_mock.call_args[0]
assert data["action"] == "delete"
assert data["type"] == "wikipage"
assert data["by"]["id"] == obj.owner.id
assert "date" in data
assert "data" in data
def test_webhooks_when_update_wiki_page_attachments(settings):
settings.WEBHOOKS_ENABLED = True
project = f.ProjectFactory()
f.WebhookFactory.create(project=project)
f.WebhookFactory.create(project=project)
obj = f.WikiPageFactory.create(project=project)
with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
services.take_snapshot(obj, user=obj.owner)
assert send_request_mock.call_count == 2
# Create attachments
attachment1 = f.WikiAttachmentFactory(project=obj.project, content_object=obj, owner=obj.owner)
attachment2 = f.WikiAttachmentFactory(project=obj.project, content_object=obj, owner=obj.owner)
with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
services.take_snapshot(obj, user=obj.owner, comment="test_comment")
assert send_request_mock.call_count == 2
(webhook_id, url, key, data) = send_request_mock.call_args[0]
assert data["action"] == "change"
assert data["type"] == "wikipage"
assert data["by"]["id"] == obj.owner.id
assert "date" in data
assert data["data"]["id"] == obj.id
assert data["change"]["comment"] == "test_comment"
assert len(data["change"]["diff"]["attachments"]["new"]) == 2
assert len(data["change"]["diff"]["attachments"]["changed"]) == 0
assert len(data["change"]["diff"]["attachments"]["deleted"]) == 0
# Update attachment
attachment1.description = "new attachment description"
attachment1.save()
with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
services.take_snapshot(obj, user=obj.owner, comment="test_comment")
assert send_request_mock.call_count == 2
(webhook_id, url, key, data) = send_request_mock.call_args[0]
assert data["action"] == "change"
assert data["type"] == "wikipage"
assert data["by"]["id"] == obj.owner.id
assert "date" in data
assert data["data"]["id"] == obj.id
assert data["change"]["comment"] == "test_comment"
assert len(data["change"]["diff"]["attachments"]["new"]) == 0
assert len(data["change"]["diff"]["attachments"]["changed"]) == 1
assert len(data["change"]["diff"]["attachments"]["deleted"]) == 0
# Delete attachment
attachment2.delete()
with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
services.take_snapshot(obj, user=obj.owner, comment="test_comment")
assert send_request_mock.call_count == 2
(webhook_id, url, key, data) = send_request_mock.call_args[0]
assert data["action"] == "change"
assert data["type"] == "wikipage"
assert data["by"]["id"] == obj.owner.id
assert "date" in data
assert data["data"]["id"] == obj.id
assert data["change"]["comment"] == "test_comment"
assert len(data["change"]["diff"]["attachments"]["new"]) == 0
assert len(data["change"]["diff"]["attachments"]["changed"]) == 0
assert len(data["change"]["diff"]["attachments"]["deleted"]) == 1
|
porduna/labmanager | refs/heads/master | labmanager/views/embed.py | 4 | import urlparse
import traceback
import datetime
import certifi
import requests
from bs4 import BeautifulSoup
from flask import Blueprint, render_template, make_response, redirect, url_for, request, session, jsonify, current_app, Response
from labmanager.views.authn import requires_golab_login, current_golab_user
from labmanager.application import SSL_DOMAIN_WHITELIST
from labmanager.db import db
from labmanager.babel import gettext, lazy_gettext
from labmanager.models import EmbedApplication, EmbedApplicationTranslation, GoLabOAuthUser, UseLog
from labmanager.models import HttpsUnsupportedUrl
from labmanager.rlms import find_smartgateway_link, find_smartgateway_opensocial_link
from labmanager.translator.languages import obtain_languages
from labmanager.utils import remote_addr, anonymize_ip_address
from flask.ext.wtf import Form
from wtforms import TextField, BooleanField, HiddenField, SelectMultipleField
from wtforms.validators import required
from wtforms.fields.html5 import URLField
from wtforms.widgets import HiddenInput, TextInput, CheckboxInput, html_params, HTMLString
from wtforms.widgets.html5 import URLInput
embed_blueprint = Blueprint('embed', __name__)
@embed_blueprint.context_processor
def inject_variables():
return dict(current_golab_user=current_golab_user())
class AngularJSInput(object):
def __init__(self, **kwargs):
self._internal_kwargs = kwargs
super(AngularJSInput, self).__init__()
# Support render_field(form.field, ng_value="foo")
# http://stackoverflow.com/questions/20440056/custom-attributes-for-flask-wtforms
def __call__(self, field, **kwargs):
for key in list(kwargs):
if key.startswith('ng_'):
kwargs['ng-' + key[3:]] = kwargs.pop(key)
for key in list(self._internal_kwargs):
if key.startswith('ng_'):
kwargs['ng-' + key[3:]] = self._internal_kwargs[key]
return super(AngularJSInput, self).__call__(field, **kwargs)
class AngularJSTextInput(AngularJSInput, TextInput):
pass
class AngularJSURLInput(AngularJSInput, URLInput):
pass
class AngularJSHiddenInput(AngularJSInput, HiddenInput):
pass
class AngularJSCheckboxInput(AngularJSInput, CheckboxInput):
pass
class DivWidget(object):
def __init__(self, padding = '10px'):
self.padding = padding
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
html = ['<div %s>' % (html_params(**kwargs))]
for subfield in field:
html.append('<label class="checkbox-inline">%s %s</label>' % (subfield(), subfield.label.text))
html.append('</div>')
return HTMLString(''.join(html))
class MultiCheckboxField(SelectMultipleField):
widget = DivWidget()
option_widget = CheckboxInput()
CERTIFICATES_CHECKED = False
def check_certificates():
"""Some comodo certificates are wrong."""
global CERTIFICATES_CHECKED
if CERTIFICATES_CHECKED:
return
ca_file = certifi.where()
with open('utils/comodo_domain_server_ca.crt', 'rb') as infile:
comodo_ca = infile.read()
with open(ca_file, 'rb') as infile:
ca_file_contents = infile.read()
if comodo_ca not in ca_file_contents:
try:
requests.get("https://cosci.tw/run/", timeout=(10, 10)).close()
except:
with open(ca_file, 'ab') as outfile:
outfile.write(comodo_ca)
CERTIFICATES_CHECKED = True
#
# App Composer checker
#
@embed_blueprint.route('/https-limitations/', methods=['GET', 'POST'])
def allowed_hosts():
if request.method == 'POST':
data = request.get_json(force=True, silent=True)
if request.headers.get('gw4labs-auth') != current_app.config.get('ALLOWED_HOSTS_CREDENTIAL', object()):
return "Invalid gw4labs-auth credentials", 403
# Unsupported https URLs
unsupported_urls = data['hosts']
processed_hosts = []
for huu in db.session.query(HttpsUnsupportedUrl).all():
if huu.url in processed_hosts:
huu.update()
else:
db.session.delete(huu)
processed_hosts.append(huu.url)
for missing_host in set(unsupported_urls).difference(set(processed_hosts)):
huu = HttpsUnsupportedUrl(missing_host)
db.session.add(huu)
db.session.commit()
all_hosts = [ {
'url': huu.url,
'when': huu.last_update.strftime("%Y-%m-%d %H:%M:%S")
} for huu in db.session.query(HttpsUnsupportedUrl).all() ]
return jsonify(hosts=all_hosts)
#
# Public URLs
#
@embed_blueprint.route('/apps/')
def apps():
applications = db.session.query(EmbedApplication).order_by(EmbedApplication.last_update).all()
return render_template("embed/apps.html", user = current_golab_user(), applications = applications, title = gettext("List of applications"))
@embed_blueprint.route('/apps/<identifier>/')
def app(identifier):
application = db.session.query(EmbedApplication).filter_by(identifier = identifier).first()
if application is None:
return render_template("embed/error.html", message = gettext("Application '{identifier}' not found").format(identifier=identifier), user = current_golab_user()), 404
return render_template("embed/app.html", user = current_golab_user(), app = application, title = gettext("Application {name}").format(name=application.name))
@embed_blueprint.route('/apps/<identifier>/app-legacy.html')
def app_legacy_html(identifier):
application = db.session.query(EmbedApplication).filter_by(identifier = identifier).first()
if application is None:
return jsonify(error=True, message="App not found")
apps_per_language = {
'en': application.full_url,
}
for translation in application.translations:
apps_per_language[translation.language] = translation.full_url
return render_template("embed/app-embedded.html", apps=apps_per_language)
@embed_blueprint.route('/apps/<identifier>/params.txt')
def params_txt(identifier):
# This is only for https://create.nyu.edu/dream/apps/mmSimDiffusion/params.txt
return 'pressuregauge=1&useIcons=1&runMode=0&saveActions=1¶mLoaded=1'
@embed_blueprint.route('/popup.html')
def popup():
return render_template("embed/popup.html", identifier='', name='N/A', title='N/A')
@embed_blueprint.route('/popup/messages.xml')
def popup_translations():
return Response("""<?xml version='1.0' encoding='UTF-8'?><messagebundle>
<msg name="embed.title">Load in another page</msg>
<msg name="embed.body">For security reasons this online lab cannot be opened in this page directly. Click on the button below to open the lab in a separate page.</msg>
<msg name="embed.link">Open in a new page</msg>
</messagebundle>""", mimetype='application/xml')
@embed_blueprint.route('/apps/<identifier>/app.html')
def app_html(identifier):
application = db.session.query(EmbedApplication).filter_by(identifier = identifier).first()
if application is None:
return render_template("embed/error.html", user = current_golab_user(), message = gettext("Application '{identifier}' not found").format(identifier=identifier)), 404
apps_per_language = {}
languages = ['en']
for translation in application.translations:
apps_per_language[translation.language] = {
'url': translation.url,
'full_url': translation.full_url,
}
languages.append(translation.language)
author = application.owner.display_name
domain = urlparse.urlparse(application.url).netloc
unsupported_url = db.session.query(HttpsUnsupportedUrl).filter_by(url=domain).first()
# TODO: is this really useful? (unsupported_url)
supports_https = application.url.startswith('https://') or application.uses_proxy
requires_https = False
if (request.args.get('requires_https') or '').lower() in ['true', '1']:
requires_https = True
if request.environ.get('old_wsgi.url_scheme') == 'https':
requires_https = True
print(requires_https, supports_https)
if requires_https and not supports_https:
return render_template("embed/popup.html", identifier=identifier, app=application, apps_per_language=apps_per_language, name=application.name, title=application.name)
return render_template("embed/app-embed.html", author = author, user = current_golab_user(),
identifier=identifier, app = application, languages=languages,
apps_per_language = apps_per_language, supports_https=supports_https,
requires_https = requires_https,
title = gettext("Application {name}").format(name=application.name))
@embed_blueprint.route('/apps/<identifier>/app.xml')
def app_xml(identifier):
application = db.session.query(EmbedApplication).filter_by(identifier = identifier).first()
if application is None:
return render_template("embed/error.xml", user = current_golab_user(), message = gettext("Application '{identifier}' not found").format(identifier=identifier)), 404
apps_per_language = {}
languages = ['en']
for translation in application.translations:
apps_per_language[translation.language] = {
'url': translation.url,
'full_url': translation.full_url,
}
languages.append(translation.language)
author = application.owner.display_name
response = make_response(render_template("embed/app.xml", author = author, user = current_golab_user(), identifier=identifier, app = application, languages=languages, apps_per_language = apps_per_language, title = gettext("Application {name}").format(name=application.name)))
response.content_type = 'application/xml'
return response
#
# Management URLs
#
@embed_blueprint.route('/')
@requires_golab_login
def index():
applications = db.session.query(EmbedApplication).filter_by(owner = current_golab_user()).order_by(EmbedApplication.last_update).all()
return render_template("embed/index.html", applications = applications, user = current_golab_user())
class SimplifiedApplicationForm(Form):
name = TextField(lazy_gettext("Name:"), validators=[required()], widget = AngularJSTextInput(ng_model='embed.name', ng_enter="submitForm()"), description=lazy_gettext("Name of the resource"))
age_ranges_range = HiddenField(lazy_gettext("Age ranges:"), validators=[], description=lazy_gettext("Select the age ranges this tool is useful for"))
# The following are NOT REQUIRED
description = TextField(lazy_gettext("Description:"), validators=[], widget = AngularJSTextInput(ng_model='embed.description', ng_enter="submitForm()"), description=lazy_gettext("Describe the resource in a few words"))
domains_text = TextField(lazy_gettext("Domains:"), validators=[], widget = AngularJSTextInput(ng_enter="submitForm()"), description=lazy_gettext("Say in which domains apply to the resource (separated by commas): e.g., physics, electronics..."))
url = URLField(lazy_gettext("Web:"), widget = AngularJSURLInput(ng_model='embed.url', ng_enter="submitForm()"), description=lazy_gettext("Web address of the resource"))
height = HiddenField(lazy_gettext("Height:"), widget = AngularJSHiddenInput(ng_model='embed.height'))
scale = HiddenField(lazy_gettext("Scale:"), widget = AngularJSHiddenInput(ng_model='embed.scale'))
class ApplicationForm(SimplifiedApplicationForm):
url = URLField(lazy_gettext("Web:"), validators=[required()], widget = AngularJSURLInput(ng_model='embed.url', ng_enter="submitForm()"), description=lazy_gettext("Web address of the resource"))
height = HiddenField(lazy_gettext("Height:"), validators=[required()], widget = AngularJSHiddenInput(ng_model='embed.height'))
scale = HiddenField(lazy_gettext("Scale:"), validators=[required()], widget = AngularJSHiddenInput(ng_model='embed.scale'))
uses_proxy = BooleanField(lazy_gettext("Try https proxy?"))
def obtain_formatted_languages(existing_language_codes):
languages = [ (lang.split('_')[0], name) for lang, name in obtain_languages().items() if lang != 'en_ALL' and name != 'DEFAULT']
return [ { 'code' : language, 'name' : name } for language, name in languages if language not in existing_language_codes]
def list_of_languages():
return { key.split('_')[0] : value for key, value in obtain_languages().items() }
def _get_scale_value(form):
if form.scale.data:
try:
scale = int(100 * float(form.scale.data))
except ValueError:
pass
else:
form.scale.data = unicode(scale)
return scale
return None
def get_url_metadata(url, timeout = 3):
name = ''
description = ''
code = None
x_frame_options = ''
error_retrieving = False
content_type = ''
request_kwargs = {}
if url.startswith('https://'):
netloc = urlparse.urlparse(url).netloc
if netloc in SSL_DOMAIN_WHITELIST:
request_kwargs['verify'] = False
try:
req = requests.get(url, timeout=(timeout, timeout), stream=True, **request_kwargs)
except:
traceback.print_exc()
error_retrieving = True
else:
try:
code = req.status_code
x_frame_options = req.headers.get('X-Frame-Options', '').lower()
content_type = req.headers.get('content-type', '').lower()
if req.status_code == 200 and 'html' in req.headers.get('content-type', '').lower():
# First megabyte maximum
content = req.iter_content(1024 * 1024).next()
soup = BeautifulSoup(content, 'lxml')
name = (soup.find("title").text or '').strip()
meta_description = soup.find("meta", attrs={'name': 'description'})
if meta_description is not None:
meta_description_text = meta_description.attrs.get('content')
if meta_description_text:
description = (meta_description_text or '').strip()
req.close()
except:
traceback.print_exc()
return { 'name' : name, 'description': description, 'code': code, 'x_frame_options' : x_frame_options, 'error_retrieving' : error_retrieving, 'content_type' : content_type }
@embed_blueprint.route('/stats', methods = ['POST'])
def stats():
url = request.args.get('url')
timezone_minutes = request.args.get('timezone_minutes')
ip_address = anonymize_ip_address(remote_addr())
log = UseLog(url = url, ip_address = ip_address, web_browser = request.headers.get('User-Agent'), user_agent = request.user_agent, lang_header=request.headers.get('Accept-Language'), timezone_minutes=timezone_minutes)
db.session.add(log)
db.session.commit()
return "This is only for local statistics. No personal information is stored."
@embed_blueprint.route('/sync', methods = ['GET'])
def sync():
return "Not used anymore"
composer_contents = requests.get('http://composer.golabz.eu/export-embed.json').json()
current_users = { user.email: user for user in db.session.query(GoLabOAuthUser).all() }
users_modified = 0
users_added = 0
for user in composer_contents['users']:
if user['email'] in current_users:
if current_users[user['email']].display_name != user['display_name']:
current_users[user['email']].display_name = user['display_name']
users_modified += 1
else:
db.session.add(GoLabOAuthUser(email=user['email'], display_name=user['display_name']))
users_added += 1
db.session.commit()
# Users sync'ed
current_apps_by_public_id = { app.identifier: app for app in db.session.query(EmbedApplication).all() }
public_identifiers_by_db_id = { app.id : app.identifier for app in current_apps_by_public_id.values() }
current_translation_urls = {
# public_identifier: {
# 'es': obj
# }
}
for translation_db in db.session.query(EmbedApplicationTranslation).all():
public_identifier = public_identifiers_by_db_id[translation_db.embed_application_id]
if public_identifier not in current_translation_urls:
current_translation_urls[public_identifier] = {}
current_translation_urls[public_identifier][translation_db.language] = translation_db
current_users = { user.email: user for user in db.session.query(GoLabOAuthUser).all() }
# Now we have everything in memory. Let's process it
apps_added = 0
apps_modified = 0
FORMAT = '%Y-%m-%dT%H:%M:%S'
for app in composer_contents['apps']:
creation = datetime.datetime.strptime(app['creation'], FORMAT)
last_update = datetime.datetime.strptime(app['last_update'], FORMAT)
owner = current_users[app['owner_mail']]
if app['identifier'] in current_apps_by_public_id:
modified = False
current_app = current_apps_by_public_id[app['identifier']]
if current_app.url != app['url']:
modified = True
current_app.url = app['url']
if current_app.name != app['name']:
modified = True
current_app.name = app['name']
if current_app.height != app['height']:
modified = True
current_app.height = app['height']
if current_app.scale != app['scale']:
modified = True
current_app.scale = app['scale']
if current_app.last_update != last_update:
modified = True
current_app.last_update = last_update
if current_app.creation != creation:
modified = True
current_app.creation = creation
current_translations = current_translation_urls.get(app['identifier'], {})
for translation in app['translations']:
if translation['language'] not in current_translations:
new_translation = EmbedApplicationTranslation(embed_application = current_app, url = translation['url'], language = translation['language'])
db.session.add(new_translation)
modified = True
else:
if current_translations[translation['language']].url != translation['url']:
modified = True
current_translations[translation['language']].url = translation['url']
if modified:
apps_modified += 1
else:
new_app = EmbedApplication(url = app['url'], name = app['name'], owner = owner, height = app['height'], identifier = app['identifier'], creation = creation, last_update = last_update, scale = app['scale'])
db.session.add(new_app)
apps_added += 1
for translation in app['translations']:
new_translation = EmbedApplicationTranslation(embed_application = new_app, url = translation['url'], language = translation['language'])
db.session.add(new_translation)
db.session.commit()
return "<html><body><p>Sync completed. Users modified: %s; Users added: %s; Apps modified: %s; Apps added: %s</p></body></html>" % (users_modified, users_added, apps_modified, apps_added)
def find_replacement(app):
sg_replacement = find_smartgateway_opensocial_link(app.url)
if sg_replacement:
return sg_replacement
return 'http://gateway.golabz.eu/embed/apps/{}/app.xml'.format(app.identifier)
@embed_blueprint.route('/migrations/appcomp2gw/graasp.json', methods = ['GET'])
def appcomp2gw_graasp_migration():
replacements = {}
for app in db.session.query(EmbedApplication).all():
original_url = 'http://composer.golabz.eu/embed/apps/{}/app.xml'.format(app.identifier)
replacements[original_url] = find_replacement(app)
return jsonify(replacements=replacements, total=len(replacements))
@embed_blueprint.route('/migrations/appcomp2gw/golabz.json', methods = ['GET'])
def appcomp2gw_golabz_migration():
try:
labs = requests.get("http://www.golabz.eu/rest/labs/retrieve.json").json()
except:
return "Couldn't connect to golabz"
lab_urls = set()
for lab in labs:
for lab_app in lab['lab_apps']:
lab_urls.add(lab_app['app_url'])
replacements = {}
for app in db.session.query(EmbedApplication).all():
original_url = 'http://composer.golabz.eu/embed/apps/{}/app.xml'.format(app.identifier)
if original_url in lab_urls:
replacements[original_url] = find_replacement(app)
return jsonify(replacements=replacements, total=len(replacements))
def obtain_golabz_manual_data():
try:
labs = requests.get("http://www.golabz.eu/rest/labs/retrieve.json").json()
except:
return "Couldn't connect to golabz"
lab_urls = set()
labs_by_lab_url = {}
for lab in labs:
for lab_app in lab['lab_apps']:
lab_urls.add(lab_app['app_url'])
labs_by_lab_url[lab_app['app_url']] = lab
replacements = []
for app in db.session.query(EmbedApplication).all():
original_url = 'http://composer.golabz.eu/embed/apps/{}/app.xml'.format(app.identifier)
original2_url = 'http://gateway.golabz.eu/embed/apps/{}/app.xml'.format(app.identifier)
if original_url in lab_urls or original2_url in lab_urls:
sg_replacement = find_smartgateway_opensocial_link(app.url)
if sg_replacement:
if original_url in lab_urls:
current_url = original_url
else:
current_url = original2_url
replacements.append({
'old_url': current_url,
'new_url': sg_replacement,
'golabz_page': labs_by_lab_url[current_url]['lab_golabz_page'],
'golabz_author': labs_by_lab_url[current_url]['author'],
'title': labs_by_lab_url[current_url]['title'],
'gateway_author_name': app.owner.display_name,
'gateway_author_email': app.owner.email,
})
return replacements
@embed_blueprint.route('/migrations/appcomp2gw/golabz-manual.json', methods = ['GET'])
def appcomp2gw_golabz_manual_migration_json():
replacements = obtain_golabz_manual_data()
return jsonify(replacements=replacements, total=len(replacements))
@embed_blueprint.route('/migrations/appcomp2gw/golabz-manual.html', methods = ['GET'])
def appcomp2gw_golabz_manual_migration_html():
replacements = obtain_golabz_manual_data()
return render_template('embed/migration_appcomp2gw_golabz_manual.html', replacements=replacements)
@embed_blueprint.route('/create', methods = ['GET', 'POST'])
@requires_golab_login
def create():
check_certificates()
original_url = request.args.get('url')
if original_url:
bookmarklet_from = original_url
else:
bookmarklet_from = None
original_application = None
if original_url:
applications = db.session.query(EmbedApplication).filter_by(url=original_url).all()
if applications:
original_application = applications[0]
for app in applications:
if len(app.translations) > len(original_application.translations):
original_application = app
if app.name and not original_application.name:
original_application = app
continue
if app.description and not original_application.description:
original_application = app
continue
if original_application is not None:
form = ApplicationForm(obj=original_application)
else:
form = ApplicationForm()
if not form.url.data and original_url:
form.url.data = original_url
if not form.name.data:
result = get_url_metadata(original_url, timeout = 5)
if result['name']:
form.name.data = result['name']
if result['description'] and not form.description.data:
form.description.data = result['description']
if form.url.data:
form.url.data = form.url.data.strip()
if form.validate_on_submit():
form_scale = _get_scale_value(form)
application = EmbedApplication(url = form.url.data, name = form.name.data, owner = current_golab_user(), height=form.height.data, scale=form_scale, description=form.description.data, age_ranges_range = form.age_ranges_range.data)
application.domains_text = form.domains_text.data
db.session.add(application)
try:
db.session.commit()
except Exception as e:
traceback.print_exc()
return render_template("embed/error.html", message = gettext("There was an error creating an application"), user = current_golab_user()), 500
else:
kwargs = {}
if bookmarklet_from:
kwargs['url'] = bookmarklet_from
return redirect(url_for('.edit', identifier=application.identifier, **kwargs))
return render_template("embed/create.html", form=form, header_message=gettext("Add a web"), user = current_golab_user(), bookmarklet_from=bookmarklet_from, create=True, edit=False)
@embed_blueprint.route('/check.json')
def check_json():
url = request.args.get('url')
if not url:
return jsonify(error=True, message=gettext("No URL provided"), url=url)
if not url.startswith(('http://', 'https://')):
return jsonify(error=True, message=gettext("URL doesn't start by http:// or https://"), url=url)
if url == 'http://':
return jsonify(error=False, url=url)
sg_link = find_smartgateway_link(url, request.referrer)
if sg_link:
return jsonify(error=False, sg_link=sg_link, url=url)
metadata = get_url_metadata(url, timeout = 5)
if metadata['error_retrieving']:
return jsonify(error=True, message=gettext("Error retrieving URL"), url=url)
if metadata['code'] != 200:
return jsonify(error=True, message=gettext("Error accessing to the URL"), url=url)
if metadata['x_frame_options'] in ('deny', 'sameorigin') or metadata['x_frame_options'].startswith('allow'):
return jsonify(error=True, message=gettext("This website does not support being loaded from a different site, so it is unavailable for Go-Lab"), url=url)
if 'html' not in metadata['content_type']:
if 'shockwave' in metadata['content_type'] or 'flash' in metadata['content_type']:
return jsonify(error=False, url=url)
return jsonify(error=True, message=gettext("URL is not HTML"), url=url)
return jsonify(error=False, url=url, name = metadata['name'], description = metadata['description'])
@embed_blueprint.route('/edit/<identifier>/', methods = ['GET', 'POST'])
@requires_golab_login
def edit(identifier):
existing_languages = {
# lang: {
# 'code': 'es',
# 'name': 'Spanish',
# 'url': 'http://....'
# }
}
existing_languages_db = {
# lang: db_instance
}
all_languages = list_of_languages()
# Obtain from the database
application = db.session.query(EmbedApplication).filter_by(identifier = identifier).first()
if application is None:
return "Application does not exist", 404
for translation in application.translations:
existing_languages_db[translation.language] = translation
existing_languages[translation.language] = {
'code': translation.language,
'name': all_languages.get(translation.language) or 'Language not supported anymore',
'url': translation.url
}
# languages added by the UI
posted_languages = {
# 'es' : 'http://.../'
}
if request.method == 'POST':
for key in request.form:
if key.startswith('language.'):
lang_code = key[len('language.'):]
if lang_code in all_languages:
posted_languages[lang_code] = request.form[key]
form = ApplicationForm(obj=application)
if form.validate_on_submit():
# Check for new ones or changed
for posted_language, url in posted_languages.items():
if posted_language in existing_languages_db:
translation = existing_languages_db[posted_language]
if translation.url != url: # Don't trigger unnecessary UPDATEs
translation.url = url
else:
translation = EmbedApplicationTranslation(embed_application = application, url=url, language=posted_language)
db.session.add(translation)
# Delete old ones
for existing_language, translation in existing_languages_db.items():
if existing_language not in posted_languages:
existing_languages.pop(existing_language)
db.session.delete(translation)
form_scale = _get_scale_value(form)
application.update(url=form.url.data, name=form.name.data, height=form.height.data, scale=form_scale, age_ranges_range=form.age_ranges_range.data, description=form.description.data, domains_text=form.domains_text.data)
db.session.commit()
# TODO: does this still make sense?
# if request.form.get('action') == 'publish':
# return _post_contents(app_to_json(application), application.url)
# Add the posted languages to the existing ones
for lang_code, url in posted_languages.items():
existing_languages[lang_code] = {
'code' : lang_code,
'name' : all_languages[lang_code],
'url' : url
}
# Obtain the languages formatted as required but excluding those already added
languages = obtain_formatted_languages(existing_languages)
bookmarklet_from = request.args.get('url')
return render_template("embed/create.html", user = current_golab_user(), form=form, identifier=identifier, header_message=gettext("Edit web"), languages=languages, existing_languages=list(existing_languages.values()), all_languages=all_languages, bookmarklet_from=bookmarklet_from, edit=True, create=False)
from labmanager.views.repository import app_to_json
|
lmazuel/azure-sdk-for-python | refs/heads/master | azure-mgmt-machinelearningcompute/azure/mgmt/machinelearningcompute/models/auto_scale_configuration.py | 2 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AutoScaleConfiguration(Model):
"""AutoScale configuration properties.
:param status: If auto-scale is enabled for all services. Each service can
turn it off individually. Possible values include: 'Enabled', 'Disabled'.
Default value: "Disabled" .
:type status: str or ~azure.mgmt.machinelearningcompute.models.Status
:param min_replicas: The minimum number of replicas for each service.
Default value: 1 .
:type min_replicas: int
:param max_replicas: The maximum number of replicas for each service.
Default value: 100 .
:type max_replicas: int
:param target_utilization: The target utilization.
:type target_utilization: float
:param refresh_period_in_seconds: Refresh period in seconds.
:type refresh_period_in_seconds: int
"""
_validation = {
'min_replicas': {'minimum': 1},
'max_replicas': {'minimum': 1},
}
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'min_replicas': {'key': 'minReplicas', 'type': 'int'},
'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
'target_utilization': {'key': 'targetUtilization', 'type': 'float'},
'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
}
def __init__(self, status="Disabled", min_replicas=1, max_replicas=100, target_utilization=None, refresh_period_in_seconds=None):
super(AutoScaleConfiguration, self).__init__()
self.status = status
self.min_replicas = min_replicas
self.max_replicas = max_replicas
self.target_utilization = target_utilization
self.refresh_period_in_seconds = refresh_period_in_seconds
|
arcyfelix/ML-DL-AI | refs/heads/master | Supervised Learning/GANs/dcgan-tensorflayer/tensorlayer/visualize.py | 1 | #! /usr/bin/python
# -*- coding: utf8 -*-
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
# import matplotlib.pyplot as plt
import numpy as np
import os
## Save images
import scipy.misc
def save_images(images, size, image_path):
"""Save mutiple images into one single image.
Parameters
-----------
images : numpy array [batch, w, h, c]
size : list of two int, row and column number.
number of images should be equal or less than size[0] * size[1]
image_path : string.
Examples
---------
>>> images = np.random.rand(64, 100, 100, 3)
>>> tl.visualize.save_images(images, [8, 8], 'temp.png')
"""
def merge(images, size):
h, w = images.shape[1], images.shape[2]
img = np.zeros((h * size[0], w * size[1], 3))
for idx, image in enumerate(images):
i = idx % size[1]
j = idx // size[1]
img[j*h:j*h+h, i*w:i*w+w, :] = image
return img
def imsave(images, size, path):
return scipy.misc.imsave(path, merge(images, size))
assert len(images) <= size[0] * size[1], "number of images should be equal or less than size[0] * size[1] {}".format(len(images))
return imsave(images, size, image_path)
def W(W=None, second=10, saveable=True, shape=[28,28], name='mnist', fig_idx=2396512):
"""Visualize every columns of the weight matrix to a group of Greyscale img.
Parameters
----------
W : numpy.array
The weight matrix
second : int
The display second(s) for the image(s), if saveable is False.
saveable : boolean
Save or plot the figure.
shape : a list with 2 int
The shape of feature image, MNIST is [28, 80].
name : a string
A name to save the image, if saveable is True.
fig_idx : int
matplotlib figure index.
Examples
--------
>>> tl.visualize.W(network.all_params[0].eval(), second=10, saveable=True, name='weight_of_1st_layer', fig_idx=2012)
"""
if saveable is False:
plt.ion()
fig = plt.figure(fig_idx) # show all feature images
size = W.shape[0]
n_units = W.shape[1]
num_r = int(np.sqrt(n_units)) # 每行显示的个数 若25个hidden unit -> 每行显示5个
num_c = int(np.ceil(n_units/num_r))
count = int(1)
for row in range(1, num_r+1):
for col in range(1, num_c+1):
if count > n_units:
break
a = fig.add_subplot(num_r, num_c, count)
# ------------------------------------------------------------
# plt.imshow(np.reshape(W[:,count-1],(28,28)), cmap='gray')
# ------------------------------------------------------------
feature = W[:,count-1] / np.sqrt( (W[:,count-1]**2).sum())
# feature[feature<0.0001] = 0 # value threshold
# if count == 1 or count == 2:
# print(np.mean(feature))
# if np.std(feature) < 0.03: # condition threshold
# feature = np.zeros_like(feature)
# if np.mean(feature) < -0.015: # condition threshold
# feature = np.zeros_like(feature)
plt.imshow(np.reshape(feature ,(shape[0],shape[1])),
cmap='gray', interpolation="nearest")#, vmin=np.min(feature), vmax=np.max(feature))
# plt.title(name)
# ------------------------------------------------------------
# plt.imshow(np.reshape(W[:,count-1] ,(np.sqrt(size),np.sqrt(size))), cmap='gray', interpolation="nearest")
plt.gca().xaxis.set_major_locator(plt.NullLocator()) # distable tick
plt.gca().yaxis.set_major_locator(plt.NullLocator())
count = count + 1
if saveable:
plt.savefig(name+'.pdf',format='pdf')
else:
plt.draw()
plt.pause(second)
def frame(I=None, second=5, saveable=True, name='frame', cmap=None, fig_idx=12836):
"""Display a frame(image). Make sure OpenAI Gym render() is disable before using it.
Parameters
----------
I : numpy.array
The image
second : int
The display second(s) for the image(s), if saveable is False.
saveable : boolean
Save or plot the figure.
name : a string
A name to save the image, if saveable is True.
cmap : None or string
'gray' for greyscale, None for default, etc.
fig_idx : int
matplotlib figure index.
Examples
--------
>>> env = gym.make("Pong-v0")
>>> observation = env.reset()
>>> tl.visualize.frame(observation)
"""
if saveable is False:
plt.ion()
fig = plt.figure(fig_idx) # show all feature images
if len(I.shape) and I.shape[-1]==1: # (10,10,1) --> (10,10)
I = I[:,:,0]
plt.imshow(I, cmap)
plt.title(name)
# plt.gca().xaxis.set_major_locator(plt.NullLocator()) # distable tick
# plt.gca().yaxis.set_major_locator(plt.NullLocator())
if saveable:
plt.savefig(name+'.pdf',format='pdf')
else:
plt.draw()
plt.pause(second)
def CNN2d(CNN=None, second=10, saveable=True, name='cnn', fig_idx=3119362):
"""Display a group of RGB or Greyscale CNN masks.
Parameters
----------
CNN : numpy.array
The image. e.g: 64 5x5 RGB images can be (5, 5, 3, 64).
second : int
The display second(s) for the image(s), if saveable is False.
saveable : boolean
Save or plot the figure.
name : a string
A name to save the image, if saveable is True.
fig_idx : int
matplotlib figure index.
Examples
--------
>>> tl.visualize.CNN2d(network.all_params[0].eval(), second=10, saveable=True, name='cnn1_mnist', fig_idx=2012)
"""
# print(CNN.shape) # (5, 5, 3, 64)
# exit()
n_mask = CNN.shape[3]
n_row = CNN.shape[0]
n_col = CNN.shape[1]
n_color = CNN.shape[2]
row = int(np.sqrt(n_mask))
col = int(np.ceil(n_mask/row))
plt.ion() # active mode
fig = plt.figure(fig_idx)
count = 1
for ir in range(1, row+1):
for ic in range(1, col+1):
if count > n_mask:
break
a = fig.add_subplot(col, row, count)
# print(CNN[:,:,:,count-1].shape, n_row, n_col) # (5, 1, 32) 5 5
# exit()
# plt.imshow(
# np.reshape(CNN[count-1,:,:,:], (n_row, n_col)),
# cmap='gray', interpolation="nearest") # theano
if n_color == 1:
plt.imshow(
np.reshape(CNN[:,:,:,count-1], (n_row, n_col)),
cmap='gray', interpolation="nearest")
elif n_color == 3:
plt.imshow(
np.reshape(CNN[:,:,:,count-1], (n_row, n_col, n_color)),
cmap='gray', interpolation="nearest")
else:
raise Exception("Unknown n_color")
plt.gca().xaxis.set_major_locator(plt.NullLocator()) # distable tick
plt.gca().yaxis.set_major_locator(plt.NullLocator())
count = count + 1
if saveable:
plt.savefig(name+'.pdf',format='pdf')
else:
plt.draw()
plt.pause(second)
def images2d(images=None, second=10, saveable=True, name='images', dtype=None,
fig_idx=3119362):
"""Display a group of RGB or Greyscale images.
Parameters
----------
images : numpy.array
The images.
second : int
The display second(s) for the image(s), if saveable is False.
saveable : boolean
Save or plot the figure.
name : a string
A name to save the image, if saveable is True.
dtype : None or numpy data type
The data type for displaying the images.
fig_idx : int
matplotlib figure index.
Examples
--------
>>> X_train, y_train, X_test, y_test = tl.files.load_cifar10_dataset(shape=(-1, 32, 32, 3), plotable=False)
>>> tl.visualize.images2d(X_train[0:100,:,:,:], second=10, saveable=False, name='cifar10', dtype=np.uint8, fig_idx=20212)
"""
# print(images.shape) # (50000, 32, 32, 3)
# exit()
if dtype:
images = np.asarray(images, dtype=dtype)
n_mask = images.shape[0]
n_row = images.shape[1]
n_col = images.shape[2]
n_color = images.shape[3]
row = int(np.sqrt(n_mask))
col = int(np.ceil(n_mask/row))
plt.ion() # active mode
fig = plt.figure(fig_idx)
count = 1
for ir in range(1, row+1):
for ic in range(1, col+1):
if count > n_mask:
break
a = fig.add_subplot(col, row, count)
# print(images[:,:,:,count-1].shape, n_row, n_col) # (5, 1, 32) 5 5
# plt.imshow(
# np.reshape(images[count-1,:,:,:], (n_row, n_col)),
# cmap='gray', interpolation="nearest") # theano
if n_color == 1:
plt.imshow(
np.reshape(images[count-1,:,:], (n_row, n_col)),
cmap='gray', interpolation="nearest")
# plt.title(name)
elif n_color == 3:
plt.imshow(images[count-1,:,:],
cmap='gray', interpolation="nearest")
# plt.title(name)
else:
raise Exception("Unknown n_color")
plt.gca().xaxis.set_major_locator(plt.NullLocator()) # distable tick
plt.gca().yaxis.set_major_locator(plt.NullLocator())
count = count + 1
if saveable:
plt.savefig(name+'.pdf',format='pdf')
else:
plt.draw()
plt.pause(second)
def tsne_embedding(embeddings, reverse_dictionary, plot_only=500,
second=5, saveable=False, name='tsne', fig_idx=9862):
"""Visualize the embeddings by using t-SNE.
Parameters
----------
embeddings : a matrix
The images.
reverse_dictionary : a dictionary
id_to_word, mapping id to unique word.
plot_only : int
The number of examples to plot, choice the most common words.
second : int
The display second(s) for the image(s), if saveable is False.
saveable : boolean
Save or plot the figure.
name : a string
A name to save the image, if saveable is True.
fig_idx : int
matplotlib figure index.
Examples
--------
>>> see 'tutorial_word2vec_basic.py'
>>> final_embeddings = normalized_embeddings.eval()
>>> tl.visualize.tsne_embedding(final_embeddings, labels, reverse_dictionary,
... plot_only=500, second=5, saveable=False, name='tsne')
"""
def plot_with_labels(low_dim_embs, labels, figsize=(18, 18), second=5,
saveable=True, name='tsne', fig_idx=9862):
assert low_dim_embs.shape[0] >= len(labels), "More labels than embeddings"
if saveable is False:
plt.ion()
plt.figure(fig_idx)
plt.figure(figsize=figsize) #in inches
for i, label in enumerate(labels):
x, y = low_dim_embs[i,:]
plt.scatter(x, y)
plt.annotate(label,
xy=(x, y),
xytext=(5, 2),
textcoords='offset points',
ha='right',
va='bottom')
if saveable:
plt.savefig(name+'.pdf',format='pdf')
else:
plt.draw()
plt.pause(second)
try:
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
from six.moves import xrange
tsne = TSNE(perplexity=30, n_components=2, init='pca', n_iter=5000)
# plot_only = 500
low_dim_embs = tsne.fit_transform(embeddings[:plot_only,:])
labels = [reverse_dictionary[i] for i in xrange(plot_only)]
plot_with_labels(low_dim_embs, labels, second=second, saveable=saveable, \
name=name, fig_idx=fig_idx)
except ImportError:
print("Please install sklearn and matplotlib to visualize embeddings.")
#
|
cypreess/django-plans | refs/heads/master | plans/quota.py | 1 | def get_user_quota(user):
"""
Tiny helper for getting quota dict for user
If user has expired plan, return default plan or None
"""
from .models import Plan
plan = Plan.get_current_plan(user)
return plan.get_quota_dict()
|
sammyshj/gci | refs/heads/master | tests/dbmigration/new_models/db.py | 33 | if not request.env.web2py_runtime_gae:
db = DAL("sqlite://storage.sqlite")
else:
db = DAL("google:datastore")
session.connect(request, response, db=db)
# This should detect one table disappeared, one field disappeared and one field added
db.define_table("main",
Field("remove_name")
)
# This should detect one field disappeared and one field added
db.define_table("renaming",
Field("renamed2")
)
# This should detect one field disappeared, one field added and one table added
db.define_table("edit",
Field("new_id", "integer"),
Field("name",
default = "1")
)
db.define_table("added",
Field("name")
)
# END =========================================================================
|
vsajip/yowsup | refs/heads/master | yowsup/layers/axolotl/protocolentities/test_iq_keys_set.py | 68 | from yowsup.layers.protocol_iq.protocolentities.test_iq import IqProtocolEntityTest
from yowsup.layers.axolotl.protocolentities import SetKeysIqProtocolEntity
from yowsup.structs import ProtocolTreeNode
class SetKeysIqProtocolEntityTest(IqProtocolEntityTest):
def setUp(self):
super(SetKeysIqProtocolEntityTest, self).setUp()
# self.ProtocolEntity = SetKeysIqProtocolEntity
#
# regNode = ProtocolTreeNode("registration", data = "abcd")
# idNode = ProtocolTreeNode("identity", data = "efgh")
# typeNode = ProtocolTreeNode("type", data = "ijkl")
# listNode = ProtocolTreeNode("list")
# for i in range(0, 2):
# keyNode = ProtocolTreeNode("key", children=[
# ProtocolTreeNode("id", data = "id_%s" % i),
# ProtocolTreeNode("value", data = "val_%s" % i)
# ])
# listNode.addChild(keyNode)
#
# self.node.addChildren([regNode, idNode, typeNode, listNode])
|
yangchandle/FlaskTaskr | refs/heads/master | env/lib/python3.5/site-packages/werkzeug/local.py | 148 | # -*- coding: utf-8 -*-
"""
werkzeug.local
~~~~~~~~~~~~~~
This module implements context-local objects.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from functools import update_wrapper
from werkzeug.wsgi import ClosingIterator
from werkzeug._compat import PY2, implements_bool
# since each thread has its own greenlet we can just use those as identifiers
# for the context. If greenlets are not available we fall back to the
# current thread ident depending on where it is.
try:
from greenlet import getcurrent as get_ident
except ImportError:
try:
from thread import get_ident
except ImportError:
from _thread import get_ident
def release_local(local):
"""Releases the contents of the local for the current context.
This makes it possible to use locals without a manager.
Example::
>>> loc = Local()
>>> loc.foo = 42
>>> release_local(loc)
>>> hasattr(loc, 'foo')
False
With this function one can release :class:`Local` objects as well
as :class:`LocalStack` objects. However it is not possible to
release data held by proxies that way, one always has to retain
a reference to the underlying local object in order to be able
to release it.
.. versionadded:: 0.6.1
"""
local.__release_local__()
class Local(object):
__slots__ = ('__storage__', '__ident_func__')
def __init__(self):
object.__setattr__(self, '__storage__', {})
object.__setattr__(self, '__ident_func__', get_ident)
def __iter__(self):
return iter(self.__storage__.items())
def __call__(self, proxy):
"""Create a proxy for a name."""
return LocalProxy(self, proxy)
def __release_local__(self):
self.__storage__.pop(self.__ident_func__(), None)
def __getattr__(self, name):
try:
return self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
ident = self.__ident_func__()
storage = self.__storage__
try:
storage[ident][name] = value
except KeyError:
storage[ident] = {name: value}
def __delattr__(self, name):
try:
del self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
class LocalStack(object):
"""This class works similar to a :class:`Local` but keeps a stack
of objects instead. This is best explained with an example::
>>> ls = LocalStack()
>>> ls.push(42)
>>> ls.top
42
>>> ls.push(23)
>>> ls.top
23
>>> ls.pop()
23
>>> ls.top
42
They can be force released by using a :class:`LocalManager` or with
the :func:`release_local` function but the correct way is to pop the
item from the stack after using. When the stack is empty it will
no longer be bound to the current context (and as such released).
By calling the stack without arguments it returns a proxy that resolves to
the topmost item on the stack.
.. versionadded:: 0.6.1
"""
def __init__(self):
self._local = Local()
def __release_local__(self):
self._local.__release_local__()
def _get__ident_func__(self):
return self._local.__ident_func__
def _set__ident_func__(self, value):
object.__setattr__(self._local, '__ident_func__', value)
__ident_func__ = property(_get__ident_func__, _set__ident_func__)
del _get__ident_func__, _set__ident_func__
def __call__(self):
def _lookup():
rv = self.top
if rv is None:
raise RuntimeError('object unbound')
return rv
return LocalProxy(_lookup)
def push(self, obj):
"""Pushes a new item to the stack"""
rv = getattr(self._local, 'stack', None)
if rv is None:
self._local.stack = rv = []
rv.append(obj)
return rv
def pop(self):
"""Removes the topmost item from the stack, will return the
old value or `None` if the stack was already empty.
"""
stack = getattr(self._local, 'stack', None)
if stack is None:
return None
elif len(stack) == 1:
release_local(self._local)
return stack[-1]
else:
return stack.pop()
@property
def top(self):
"""The topmost item on the stack. If the stack is empty,
`None` is returned.
"""
try:
return self._local.stack[-1]
except (AttributeError, IndexError):
return None
class LocalManager(object):
"""Local objects cannot manage themselves. For that you need a local
manager. You can pass a local manager multiple locals or add them later
by appending them to `manager.locals`. Everytime the manager cleans up
it, will clean up all the data left in the locals for this context.
The `ident_func` parameter can be added to override the default ident
function for the wrapped locals.
.. versionchanged:: 0.6.1
Instead of a manager the :func:`release_local` function can be used
as well.
.. versionchanged:: 0.7
`ident_func` was added.
"""
def __init__(self, locals=None, ident_func=None):
if locals is None:
self.locals = []
elif isinstance(locals, Local):
self.locals = [locals]
else:
self.locals = list(locals)
if ident_func is not None:
self.ident_func = ident_func
for local in self.locals:
object.__setattr__(local, '__ident_func__', ident_func)
else:
self.ident_func = get_ident
def get_ident(self):
"""Return the context identifier the local objects use internally for
this context. You cannot override this method to change the behavior
but use it to link other context local objects (such as SQLAlchemy's
scoped sessions) to the Werkzeug locals.
.. versionchanged:: 0.7
You can pass a different ident function to the local manager that
will then be propagated to all the locals passed to the
constructor.
"""
return self.ident_func()
def cleanup(self):
"""Manually clean up the data in the locals for this context. Call
this at the end of the request or use `make_middleware()`.
"""
for local in self.locals:
release_local(local)
def make_middleware(self, app):
"""Wrap a WSGI application so that cleaning up happens after
request end.
"""
def application(environ, start_response):
return ClosingIterator(app(environ, start_response), self.cleanup)
return application
def middleware(self, func):
"""Like `make_middleware` but for decorating functions.
Example usage::
@manager.middleware
def application(environ, start_response):
...
The difference to `make_middleware` is that the function passed
will have all the arguments copied from the inner application
(name, docstring, module).
"""
return update_wrapper(self.make_middleware(func), func)
def __repr__(self):
return '<%s storages: %d>' % (
self.__class__.__name__,
len(self.locals)
)
@implements_bool
class LocalProxy(object):
"""Acts as a proxy for a werkzeug local. Forwards all operations to
a proxied object. The only operations not supported for forwarding
are right handed operands and any kind of assignment.
Example usage::
from werkzeug.local import Local
l = Local()
# these are proxies
request = l('request')
user = l('user')
from werkzeug.local import LocalStack
_response_local = LocalStack()
# this is a proxy
response = _response_local()
Whenever something is bound to l.user / l.request the proxy objects
will forward all operations. If no object is bound a :exc:`RuntimeError`
will be raised.
To create proxies to :class:`Local` or :class:`LocalStack` objects,
call the object as shown above. If you want to have a proxy to an
object looked up by a function, you can (as of Werkzeug 0.6.1) pass
a function to the :class:`LocalProxy` constructor::
session = LocalProxy(lambda: get_current_request().session)
.. versionchanged:: 0.6.1
The class can be instanciated with a callable as well now.
"""
__slots__ = ('__local', '__dict__', '__name__')
def __init__(self, local, name=None):
object.__setattr__(self, '_LocalProxy__local', local)
object.__setattr__(self, '__name__', name)
def _get_current_object(self):
"""Return the current object. This is useful if you want the real
object behind the proxy at a time for performance reasons or because
you want to pass the object into a different context.
"""
if not hasattr(self.__local, '__release_local__'):
return self.__local()
try:
return getattr(self.__local, self.__name__)
except AttributeError:
raise RuntimeError('no object bound to %s' % self.__name__)
@property
def __dict__(self):
try:
return self._get_current_object().__dict__
except RuntimeError:
raise AttributeError('__dict__')
def __repr__(self):
try:
obj = self._get_current_object()
except RuntimeError:
return '<%s unbound>' % self.__class__.__name__
return repr(obj)
def __bool__(self):
try:
return bool(self._get_current_object())
except RuntimeError:
return False
def __unicode__(self):
try:
return unicode(self._get_current_object())
except RuntimeError:
return repr(self)
def __dir__(self):
try:
return dir(self._get_current_object())
except RuntimeError:
return []
def __getattr__(self, name):
if name == '__members__':
return dir(self._get_current_object())
return getattr(self._get_current_object(), name)
def __setitem__(self, key, value):
self._get_current_object()[key] = value
def __delitem__(self, key):
del self._get_current_object()[key]
if PY2:
__getslice__ = lambda x, i, j: x._get_current_object()[i:j]
def __setslice__(self, i, j, seq):
self._get_current_object()[i:j] = seq
def __delslice__(self, i, j):
del self._get_current_object()[i:j]
__setattr__ = lambda x, n, v: setattr(x._get_current_object(), n, v)
__delattr__ = lambda x, n: delattr(x._get_current_object(), n)
__str__ = lambda x: str(x._get_current_object())
__lt__ = lambda x, o: x._get_current_object() < o
__le__ = lambda x, o: x._get_current_object() <= o
__eq__ = lambda x, o: x._get_current_object() == o
__ne__ = lambda x, o: x._get_current_object() != o
__gt__ = lambda x, o: x._get_current_object() > o
__ge__ = lambda x, o: x._get_current_object() >= o
__cmp__ = lambda x, o: cmp(x._get_current_object(), o)
__hash__ = lambda x: hash(x._get_current_object())
__call__ = lambda x, *a, **kw: x._get_current_object()(*a, **kw)
__len__ = lambda x: len(x._get_current_object())
__getitem__ = lambda x, i: x._get_current_object()[i]
__iter__ = lambda x: iter(x._get_current_object())
__contains__ = lambda x, i: i in x._get_current_object()
__add__ = lambda x, o: x._get_current_object() + o
__sub__ = lambda x, o: x._get_current_object() - o
__mul__ = lambda x, o: x._get_current_object() * o
__floordiv__ = lambda x, o: x._get_current_object() // o
__mod__ = lambda x, o: x._get_current_object() % o
__divmod__ = lambda x, o: x._get_current_object().__divmod__(o)
__pow__ = lambda x, o: x._get_current_object() ** o
__lshift__ = lambda x, o: x._get_current_object() << o
__rshift__ = lambda x, o: x._get_current_object() >> o
__and__ = lambda x, o: x._get_current_object() & o
__xor__ = lambda x, o: x._get_current_object() ^ o
__or__ = lambda x, o: x._get_current_object() | o
__div__ = lambda x, o: x._get_current_object().__div__(o)
__truediv__ = lambda x, o: x._get_current_object().__truediv__(o)
__neg__ = lambda x: -(x._get_current_object())
__pos__ = lambda x: +(x._get_current_object())
__abs__ = lambda x: abs(x._get_current_object())
__invert__ = lambda x: ~(x._get_current_object())
__complex__ = lambda x: complex(x._get_current_object())
__int__ = lambda x: int(x._get_current_object())
__long__ = lambda x: long(x._get_current_object())
__float__ = lambda x: float(x._get_current_object())
__oct__ = lambda x: oct(x._get_current_object())
__hex__ = lambda x: hex(x._get_current_object())
__index__ = lambda x: x._get_current_object().__index__()
__coerce__ = lambda x, o: x._get_current_object().__coerce__(x, o)
__enter__ = lambda x: x._get_current_object().__enter__()
__exit__ = lambda x, *a, **kw: x._get_current_object().__exit__(*a, **kw)
__radd__ = lambda x, o: o + x._get_current_object()
__rsub__ = lambda x, o: o - x._get_current_object()
__rmul__ = lambda x, o: o * x._get_current_object()
__rdiv__ = lambda x, o: o / x._get_current_object()
if PY2:
__rtruediv__ = lambda x, o: x._get_current_object().__rtruediv__(o)
else:
__rtruediv__ = __rdiv__
__rfloordiv__ = lambda x, o: o // x._get_current_object()
__rmod__ = lambda x, o: o % x._get_current_object()
__rdivmod__ = lambda x, o: x._get_current_object().__rdivmod__(o)
|
devendermishrajio/nova | refs/heads/master | nova/volume/encryptors/nop.py | 61 | # Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from nova.volume.encryptors import base
LOG = logging.getLogger(__name__)
class NoOpEncryptor(base.VolumeEncryptor):
"""A VolumeEncryptor that does nothing.
This class exists solely to wrap regular (i.e., unencrypted) volumes so
that they do not require special handling with respect to an encrypted
volume. This implementation performs no action when a volume is attached
or detached.
"""
def __init__(self, connection_info, **kwargs):
super(NoOpEncryptor, self).__init__(connection_info, **kwargs)
def attach_volume(self, context):
pass
def detach_volume(self):
pass
|
Microvellum/Fluid-Designer | refs/heads/master | win64-vc/2.78/python/lib/urllib/robotparser.py | 9 | """ robotparser.py
Copyright (C) 2000 Bastian Kleineidam
You can choose between two licenses when using this package:
1) GNU GPLv2
2) PSF license for Python 2.2
The robots.txt Exclusion Protocol is implemented as specified in
http://www.robotstxt.org/norobots-rfc.txt
"""
import urllib.parse, urllib.request
__all__ = ["RobotFileParser"]
class RobotFileParser:
""" This class provides a set of methods to read, parse and answer
questions about a single robots.txt file.
"""
def __init__(self, url=''):
self.entries = []
self.default_entry = None
self.disallow_all = False
self.allow_all = False
self.set_url(url)
self.last_checked = 0
def mtime(self):
"""Returns the time the robots.txt file was last fetched.
This is useful for long-running web spiders that need to
check for new robots.txt files periodically.
"""
return self.last_checked
def modified(self):
"""Sets the time the robots.txt file was last fetched to the
current time.
"""
import time
self.last_checked = time.time()
def set_url(self, url):
"""Sets the URL referring to a robots.txt file."""
self.url = url
self.host, self.path = urllib.parse.urlparse(url)[1:3]
def read(self):
"""Reads the robots.txt URL and feeds it to the parser."""
try:
f = urllib.request.urlopen(self.url)
except urllib.error.HTTPError as err:
if err.code in (401, 403):
self.disallow_all = True
elif err.code >= 400 and err.code < 500:
self.allow_all = True
else:
raw = f.read()
self.parse(raw.decode("utf-8").splitlines())
def _add_entry(self, entry):
if "*" in entry.useragents:
# the default entry is considered last
if self.default_entry is None:
# the first default entry wins
self.default_entry = entry
else:
self.entries.append(entry)
def parse(self, lines):
"""Parse the input lines from a robots.txt file.
We allow that a user-agent: line is not preceded by
one or more blank lines.
"""
# states:
# 0: start state
# 1: saw user-agent line
# 2: saw an allow or disallow line
state = 0
entry = Entry()
self.modified()
for line in lines:
if not line:
if state == 1:
entry = Entry()
state = 0
elif state == 2:
self._add_entry(entry)
entry = Entry()
state = 0
# remove optional comment and strip line
i = line.find('#')
if i >= 0:
line = line[:i]
line = line.strip()
if not line:
continue
line = line.split(':', 1)
if len(line) == 2:
line[0] = line[0].strip().lower()
line[1] = urllib.parse.unquote(line[1].strip())
if line[0] == "user-agent":
if state == 2:
self._add_entry(entry)
entry = Entry()
entry.useragents.append(line[1])
state = 1
elif line[0] == "disallow":
if state != 0:
entry.rulelines.append(RuleLine(line[1], False))
state = 2
elif line[0] == "allow":
if state != 0:
entry.rulelines.append(RuleLine(line[1], True))
state = 2
if state == 2:
self._add_entry(entry)
def can_fetch(self, useragent, url):
"""using the parsed robots.txt decide if useragent can fetch url"""
if self.disallow_all:
return False
if self.allow_all:
return True
# Until the robots.txt file has been read or found not
# to exist, we must assume that no url is allowable.
# This prevents false positives when a user erronenously
# calls can_fetch() before calling read().
if not self.last_checked:
return False
# search for given user agent matches
# the first match counts
parsed_url = urllib.parse.urlparse(urllib.parse.unquote(url))
url = urllib.parse.urlunparse(('','',parsed_url.path,
parsed_url.params,parsed_url.query, parsed_url.fragment))
url = urllib.parse.quote(url)
if not url:
url = "/"
for entry in self.entries:
if entry.applies_to(useragent):
return entry.allowance(url)
# try the default entry last
if self.default_entry:
return self.default_entry.allowance(url)
# agent not found ==> access granted
return True
def __str__(self):
return ''.join([str(entry) + "\n" for entry in self.entries])
class RuleLine:
"""A rule line is a single "Allow:" (allowance==True) or "Disallow:"
(allowance==False) followed by a path."""
def __init__(self, path, allowance):
if path == '' and not allowance:
# an empty value means allow all
allowance = True
path = urllib.parse.urlunparse(urllib.parse.urlparse(path))
self.path = urllib.parse.quote(path)
self.allowance = allowance
def applies_to(self, filename):
return self.path == "*" or filename.startswith(self.path)
def __str__(self):
return ("Allow" if self.allowance else "Disallow") + ": " + self.path
class Entry:
"""An entry has one or more user-agents and zero or more rulelines"""
def __init__(self):
self.useragents = []
self.rulelines = []
def __str__(self):
ret = []
for agent in self.useragents:
ret.extend(["User-agent: ", agent, "\n"])
for line in self.rulelines:
ret.extend([str(line), "\n"])
return ''.join(ret)
def applies_to(self, useragent):
"""check if this entry applies to the specified agent"""
# split the name token and make it lower case
useragent = useragent.split("/")[0].lower()
for agent in self.useragents:
if agent == '*':
# we have the catch-all agent
return True
agent = agent.lower()
if agent in useragent:
return True
return False
def allowance(self, filename):
"""Preconditions:
- our agent applies to this entry
- filename is URL decoded"""
for line in self.rulelines:
if line.applies_to(filename):
return line.allowance
return True
|
jfhumann/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/pytest/doc/en/genapi.py | 203 | import textwrap
import inspect
class Writer:
def __init__(self, clsname):
self.clsname = clsname
def __enter__(self):
self.file = open("%s.api" % self.clsname, "w")
return self
def __exit__(self, *args):
self.file.close()
print "wrote", self.file.name
def line(self, line):
self.file.write(line+"\n")
def docmethod(self, method):
doc = " ".join(method.__doc__.split())
indent = " "
w = textwrap.TextWrapper(initial_indent=indent,
subsequent_indent=indent)
spec = inspect.getargspec(method)
del spec.args[0]
self.line(".. py:method:: " + method.__name__ +
inspect.formatargspec(*spec))
self.line("")
self.line(w.fill(doc))
self.line("")
def pytest_funcarg__a(request):
with Writer("request") as writer:
writer.docmethod(request.getfuncargvalue)
writer.docmethod(request.cached_setup)
writer.docmethod(request.addfinalizer)
writer.docmethod(request.applymarker)
def test_hello(a):
pass
|
vitan/hue | refs/heads/master | desktop/core/ext-py/Django-1.6.10/tests/admin_ordering/tests.py | 49 | from __future__ import absolute_import, unicode_literals
from django.test import TestCase, RequestFactory
from django.contrib import admin
from django.contrib.admin.options import ModelAdmin
from django.contrib.auth.models import User
from .models import (Band, Song, SongInlineDefaultOrdering,
SongInlineNewOrdering, DynOrderingBandAdmin)
class MockRequest(object):
pass
class MockSuperUser(object):
def has_perm(self, perm):
return True
request = MockRequest()
request.user = MockSuperUser()
class TestAdminOrdering(TestCase):
"""
Let's make sure that ModelAdmin.get_queryset uses the ordering we define
in ModelAdmin rather that ordering defined in the model's inner Meta
class.
"""
def setUp(self):
self.request_factory = RequestFactory()
b1 = Band(name='Aerosmith', bio='', rank=3)
b1.save()
b2 = Band(name='Radiohead', bio='', rank=1)
b2.save()
b3 = Band(name='Van Halen', bio='', rank=2)
b3.save()
def test_default_ordering(self):
"""
The default ordering should be by name, as specified in the inner Meta
class.
"""
ma = ModelAdmin(Band, None)
names = [b.name for b in ma.get_queryset(request)]
self.assertEqual(['Aerosmith', 'Radiohead', 'Van Halen'], names)
def test_specified_ordering(self):
"""
Let's use a custom ModelAdmin that changes the ordering, and make sure
it actually changes.
"""
class BandAdmin(ModelAdmin):
ordering = ('rank',) # default ordering is ('name',)
ma = BandAdmin(Band, None)
names = [b.name for b in ma.get_queryset(request)]
self.assertEqual(['Radiohead', 'Van Halen', 'Aerosmith'], names)
def test_dynamic_ordering(self):
"""
Let's use a custom ModelAdmin that changes the ordering dinamically.
"""
super_user = User.objects.create(username='admin', is_superuser=True)
other_user = User.objects.create(username='other')
request = self.request_factory.get('/')
request.user = super_user
ma = DynOrderingBandAdmin(Band, None)
names = [b.name for b in ma.get_queryset(request)]
self.assertEqual(['Radiohead', 'Van Halen', 'Aerosmith'], names)
request.user = other_user
names = [b.name for b in ma.get_queryset(request)]
self.assertEqual(['Aerosmith', 'Radiohead', 'Van Halen'], names)
class TestInlineModelAdminOrdering(TestCase):
"""
Let's make sure that InlineModelAdmin.get_queryset uses the ordering we
define in InlineModelAdmin.
"""
def setUp(self):
b = Band(name='Aerosmith', bio='', rank=3)
b.save()
self.b = b
s1 = Song(band=b, name='Pink', duration=235)
s1.save()
s2 = Song(band=b, name='Dude (Looks Like a Lady)', duration=264)
s2.save()
s3 = Song(band=b, name='Jaded', duration=214)
s3.save()
def test_default_ordering(self):
"""
The default ordering should be by name, as specified in the inner Meta
class.
"""
inline = SongInlineDefaultOrdering(self.b, None)
names = [s.name for s in inline.get_queryset(request)]
self.assertEqual(['Dude (Looks Like a Lady)', 'Jaded', 'Pink'], names)
def test_specified_ordering(self):
"""
Let's check with ordering set to something different than the default.
"""
inline = SongInlineNewOrdering(self.b, None)
names = [s.name for s in inline.get_queryset(request)]
self.assertEqual(['Jaded', 'Pink', 'Dude (Looks Like a Lady)'], names)
class TestRelatedFieldsAdminOrdering(TestCase):
def setUp(self):
self.b1 = Band(name='Pink Floyd', bio='', rank=1)
self.b1.save()
self.b2 = Band(name='Foo Fighters', bio='', rank=5)
self.b2.save()
# we need to register a custom ModelAdmin (instead of just using
# ModelAdmin) because the field creator tries to find the ModelAdmin
# for the related model
class SongAdmin(admin.ModelAdmin):
pass
admin.site.register(Song, SongAdmin)
def check_ordering_of_field_choices(self, correct_ordering):
fk_field = admin.site._registry[Song].formfield_for_foreignkey(Song.band.field)
m2m_field = admin.site._registry[Song].formfield_for_manytomany(Song.other_interpreters.field)
self.assertEqual(list(fk_field.queryset), correct_ordering)
self.assertEqual(list(m2m_field.queryset), correct_ordering)
def test_no_admin_fallback_to_model_ordering(self):
# should be ordered by name (as defined by the model)
self.check_ordering_of_field_choices([self.b2, self.b1])
def test_admin_with_no_ordering_fallback_to_model_ordering(self):
class NoOrderingBandAdmin(admin.ModelAdmin):
pass
admin.site.register(Band, NoOrderingBandAdmin)
# should be ordered by name (as defined by the model)
self.check_ordering_of_field_choices([self.b2, self.b1])
def test_admin_ordering_beats_model_ordering(self):
class StaticOrderingBandAdmin(admin.ModelAdmin):
ordering = ('rank', )
admin.site.register(Band, StaticOrderingBandAdmin)
# should be ordered by rank (defined by the ModelAdmin)
self.check_ordering_of_field_choices([self.b1, self.b2])
def test_custom_queryset_still_wins(self):
"""Test that custom queryset has still precedence (#21405)"""
class SongAdmin(admin.ModelAdmin):
# Exclude one of the two Bands from the querysets
def formfield_for_foreignkey(self, db_field, **kwargs):
if db_field.name == 'band':
kwargs["queryset"] = Band.objects.filter(rank__gt=2)
return super(SongAdmin, self).formfield_for_foreignkey(db_field, **kwargs)
def formfield_for_manytomany(self, db_field, **kwargs):
if db_field.name == 'other_interpreters':
kwargs["queryset"] = Band.objects.filter(rank__gt=2)
return super(SongAdmin, self).formfield_for_foreignkey(db_field, **kwargs)
class StaticOrderingBandAdmin(admin.ModelAdmin):
ordering = ('rank',)
admin.site.unregister(Song)
admin.site.register(Song, SongAdmin)
admin.site.register(Band, StaticOrderingBandAdmin)
self.check_ordering_of_field_choices([self.b2])
def tearDown(self):
admin.site.unregister(Song)
if Band in admin.site._registry:
admin.site.unregister(Band)
|
gpg/gpgme | refs/heads/master | lang/python/src/constants/event.py | 1 | # Copyright (C) 2004 Igor Belyi <[email protected]>
# Copyright (C) 2002 John Goerzen <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from __future__ import absolute_import, print_function, unicode_literals
from gpg import util
util.process_constants('GPGME_EVENT_', globals())
del absolute_import, print_function, unicode_literals, util
|
Reagankm/KnockKnock | refs/heads/master | venv/lib/python3.4/site-packages/numpy/matrixlib/defmatrix.py | 38 | from __future__ import division, absolute_import, print_function
__all__ = ['matrix', 'bmat', 'mat', 'asmatrix']
import sys
import numpy.core.numeric as N
from numpy.core.numeric import concatenate, isscalar, binary_repr, identity, asanyarray
from numpy.core.numerictypes import issubdtype
# make translation table
_numchars = '0123456789.-+jeEL'
if sys.version_info[0] >= 3:
class _NumCharTable:
def __getitem__(self, i):
if chr(i) in _numchars:
return chr(i)
else:
return None
_table = _NumCharTable()
def _eval(astr):
str_ = astr.translate(_table)
if not str_:
raise TypeError("Invalid data string supplied: " + astr)
else:
return eval(str_)
else:
_table = [None]*256
for k in range(256):
_table[k] = chr(k)
_table = ''.join(_table)
_todelete = []
for k in _table:
if k not in _numchars:
_todelete.append(k)
_todelete = ''.join(_todelete)
del k
def _eval(astr):
str_ = astr.translate(_table, _todelete)
if not str_:
raise TypeError("Invalid data string supplied: " + astr)
else:
return eval(str_)
def _convert_from_string(data):
rows = data.split(';')
newdata = []
count = 0
for row in rows:
trow = row.split(',')
newrow = []
for col in trow:
temp = col.split()
newrow.extend(map(_eval, temp))
if count == 0:
Ncols = len(newrow)
elif len(newrow) != Ncols:
raise ValueError("Rows not the same size.")
count += 1
newdata.append(newrow)
return newdata
def asmatrix(data, dtype=None):
"""
Interpret the input as a matrix.
Unlike `matrix`, `asmatrix` does not make a copy if the input is already
a matrix or an ndarray. Equivalent to ``matrix(data, copy=False)``.
Parameters
----------
data : array_like
Input data.
Returns
-------
mat : matrix
`data` interpreted as a matrix.
Examples
--------
>>> x = np.array([[1, 2], [3, 4]])
>>> m = np.asmatrix(x)
>>> x[0,0] = 5
>>> m
matrix([[5, 2],
[3, 4]])
"""
return matrix(data, dtype=dtype, copy=False)
def matrix_power(M, n):
"""
Raise a square matrix to the (integer) power `n`.
For positive integers `n`, the power is computed by repeated matrix
squarings and matrix multiplications. If ``n == 0``, the identity matrix
of the same shape as M is returned. If ``n < 0``, the inverse
is computed and then raised to the ``abs(n)``.
Parameters
----------
M : ndarray or matrix object
Matrix to be "powered." Must be square, i.e. ``M.shape == (m, m)``,
with `m` a positive integer.
n : int
The exponent can be any integer or long integer, positive,
negative, or zero.
Returns
-------
M**n : ndarray or matrix object
The return value is the same shape and type as `M`;
if the exponent is positive or zero then the type of the
elements is the same as those of `M`. If the exponent is
negative the elements are floating-point.
Raises
------
LinAlgError
If the matrix is not numerically invertible.
See Also
--------
matrix
Provides an equivalent function as the exponentiation operator
(``**``, not ``^``).
Examples
--------
>>> from numpy import linalg as LA
>>> i = np.array([[0, 1], [-1, 0]]) # matrix equiv. of the imaginary unit
>>> LA.matrix_power(i, 3) # should = -i
array([[ 0, -1],
[ 1, 0]])
>>> LA.matrix_power(np.matrix(i), 3) # matrix arg returns matrix
matrix([[ 0, -1],
[ 1, 0]])
>>> LA.matrix_power(i, 0)
array([[1, 0],
[0, 1]])
>>> LA.matrix_power(i, -3) # should = 1/(-i) = i, but w/ f.p. elements
array([[ 0., 1.],
[-1., 0.]])
Somewhat more sophisticated example
>>> q = np.zeros((4, 4))
>>> q[0:2, 0:2] = -i
>>> q[2:4, 2:4] = i
>>> q # one of the three quarternion units not equal to 1
array([[ 0., -1., 0., 0.],
[ 1., 0., 0., 0.],
[ 0., 0., 0., 1.],
[ 0., 0., -1., 0.]])
>>> LA.matrix_power(q, 2) # = -np.eye(4)
array([[-1., 0., 0., 0.],
[ 0., -1., 0., 0.],
[ 0., 0., -1., 0.],
[ 0., 0., 0., -1.]])
"""
M = asanyarray(M)
if len(M.shape) != 2 or M.shape[0] != M.shape[1]:
raise ValueError("input must be a square array")
if not issubdtype(type(n), int):
raise TypeError("exponent must be an integer")
from numpy.linalg import inv
if n==0:
M = M.copy()
M[:] = identity(M.shape[0])
return M
elif n<0:
M = inv(M)
n *= -1
result = M
if n <= 3:
for _ in range(n-1):
result=N.dot(result, M)
return result
# binary decomposition to reduce the number of Matrix
# multiplications for n > 3.
beta = binary_repr(n)
Z, q, t = M, 0, len(beta)
while beta[t-q-1] == '0':
Z = N.dot(Z, Z)
q += 1
result = Z
for k in range(q+1, t):
Z = N.dot(Z, Z)
if beta[t-k-1] == '1':
result = N.dot(result, Z)
return result
class matrix(N.ndarray):
"""
matrix(data, dtype=None, copy=True)
Returns a matrix from an array-like object, or from a string of data.
A matrix is a specialized 2-D array that retains its 2-D nature
through operations. It has certain special operators, such as ``*``
(matrix multiplication) and ``**`` (matrix power).
Parameters
----------
data : array_like or string
If `data` is a string, it is interpreted as a matrix with commas
or spaces separating columns, and semicolons separating rows.
dtype : data-type
Data-type of the output matrix.
copy : bool
If `data` is already an `ndarray`, then this flag determines
whether the data is copied (the default), or whether a view is
constructed.
See Also
--------
array
Examples
--------
>>> a = np.matrix('1 2; 3 4')
>>> print a
[[1 2]
[3 4]]
>>> np.matrix([[1, 2], [3, 4]])
matrix([[1, 2],
[3, 4]])
"""
__array_priority__ = 10.0
def __new__(subtype, data, dtype=None, copy=True):
if isinstance(data, matrix):
dtype2 = data.dtype
if (dtype is None):
dtype = dtype2
if (dtype2 == dtype) and (not copy):
return data
return data.astype(dtype)
if isinstance(data, N.ndarray):
if dtype is None:
intype = data.dtype
else:
intype = N.dtype(dtype)
new = data.view(subtype)
if intype != data.dtype:
return new.astype(intype)
if copy: return new.copy()
else: return new
if isinstance(data, str):
data = _convert_from_string(data)
# now convert data to an array
arr = N.array(data, dtype=dtype, copy=copy)
ndim = arr.ndim
shape = arr.shape
if (ndim > 2):
raise ValueError("matrix must be 2-dimensional")
elif ndim == 0:
shape = (1, 1)
elif ndim == 1:
shape = (1, shape[0])
order = False
if (ndim == 2) and arr.flags.fortran:
order = True
if not (order or arr.flags.contiguous):
arr = arr.copy()
ret = N.ndarray.__new__(subtype, shape, arr.dtype,
buffer=arr,
order=order)
return ret
def __array_finalize__(self, obj):
self._getitem = False
if (isinstance(obj, matrix) and obj._getitem): return
ndim = self.ndim
if (ndim == 2):
return
if (ndim > 2):
newshape = tuple([x for x in self.shape if x > 1])
ndim = len(newshape)
if ndim == 2:
self.shape = newshape
return
elif (ndim > 2):
raise ValueError("shape too large to be a matrix.")
else:
newshape = self.shape
if ndim == 0:
self.shape = (1, 1)
elif ndim == 1:
self.shape = (1, newshape[0])
return
def __getitem__(self, index):
self._getitem = True
try:
out = N.ndarray.__getitem__(self, index)
finally:
self._getitem = False
if not isinstance(out, N.ndarray):
return out
if out.ndim == 0:
return out[()]
if out.ndim == 1:
sh = out.shape[0]
# Determine when we should have a column array
try:
n = len(index)
except:
n = 0
if n > 1 and isscalar(index[1]):
out.shape = (sh, 1)
else:
out.shape = (1, sh)
return out
def __mul__(self, other):
if isinstance(other, (N.ndarray, list, tuple)) :
# This promotes 1-D vectors to row vectors
return N.dot(self, asmatrix(other))
if isscalar(other) or not hasattr(other, '__rmul__') :
return N.dot(self, other)
return NotImplemented
def __rmul__(self, other):
return N.dot(other, self)
def __imul__(self, other):
self[:] = self * other
return self
def __pow__(self, other):
return matrix_power(self, other)
def __ipow__(self, other):
self[:] = self ** other
return self
def __rpow__(self, other):
return NotImplemented
def __repr__(self):
s = repr(self.__array__()).replace('array', 'matrix')
# now, 'matrix' has 6 letters, and 'array' 5, so the columns don't
# line up anymore. We need to add a space.
l = s.splitlines()
for i in range(1, len(l)):
if l[i]:
l[i] = ' ' + l[i]
return '\n'.join(l)
def __str__(self):
return str(self.__array__())
def _align(self, axis):
"""A convenience function for operations that need to preserve axis
orientation.
"""
if axis is None:
return self[0, 0]
elif axis==0:
return self
elif axis==1:
return self.transpose()
else:
raise ValueError("unsupported axis")
def _collapse(self, axis):
"""A convenience function for operations that want to collapse
to a scalar like _align, but are using keepdims=True
"""
if axis is None:
return self[0, 0]
else:
return self
# Necessary because base-class tolist expects dimension
# reduction by x[0]
def tolist(self):
"""
Return the matrix as a (possibly nested) list.
See `ndarray.tolist` for full documentation.
See Also
--------
ndarray.tolist
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.tolist()
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11]]
"""
return self.__array__().tolist()
# To preserve orientation of result...
def sum(self, axis=None, dtype=None, out=None):
"""
Returns the sum of the matrix elements, along the given axis.
Refer to `numpy.sum` for full documentation.
See Also
--------
numpy.sum
Notes
-----
This is the same as `ndarray.sum`, except that where an `ndarray` would
be returned, a `matrix` object is returned instead.
Examples
--------
>>> x = np.matrix([[1, 2], [4, 3]])
>>> x.sum()
10
>>> x.sum(axis=1)
matrix([[3],
[7]])
>>> x.sum(axis=1, dtype='float')
matrix([[ 3.],
[ 7.]])
>>> out = np.zeros((1, 2), dtype='float')
>>> x.sum(axis=1, dtype='float', out=out)
matrix([[ 3.],
[ 7.]])
"""
return N.ndarray.sum(self, axis, dtype, out, keepdims=True)._collapse(axis)
def mean(self, axis=None, dtype=None, out=None):
"""
Returns the average of the matrix elements along the given axis.
Refer to `numpy.mean` for full documentation.
See Also
--------
numpy.mean
Notes
-----
Same as `ndarray.mean` except that, where that returns an `ndarray`,
this returns a `matrix` object.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3, 4)))
>>> x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.mean()
5.5
>>> x.mean(0)
matrix([[ 4., 5., 6., 7.]])
>>> x.mean(1)
matrix([[ 1.5],
[ 5.5],
[ 9.5]])
"""
return N.ndarray.mean(self, axis, dtype, out, keepdims=True)._collapse(axis)
def std(self, axis=None, dtype=None, out=None, ddof=0):
"""
Return the standard deviation of the array elements along the given axis.
Refer to `numpy.std` for full documentation.
See Also
--------
numpy.std
Notes
-----
This is the same as `ndarray.std`, except that where an `ndarray` would
be returned, a `matrix` object is returned instead.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3, 4)))
>>> x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.std()
3.4520525295346629
>>> x.std(0)
matrix([[ 3.26598632, 3.26598632, 3.26598632, 3.26598632]])
>>> x.std(1)
matrix([[ 1.11803399],
[ 1.11803399],
[ 1.11803399]])
"""
return N.ndarray.std(self, axis, dtype, out, ddof, keepdims=True)._collapse(axis)
def var(self, axis=None, dtype=None, out=None, ddof=0):
"""
Returns the variance of the matrix elements, along the given axis.
Refer to `numpy.var` for full documentation.
See Also
--------
numpy.var
Notes
-----
This is the same as `ndarray.var`, except that where an `ndarray` would
be returned, a `matrix` object is returned instead.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3, 4)))
>>> x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.var()
11.916666666666666
>>> x.var(0)
matrix([[ 10.66666667, 10.66666667, 10.66666667, 10.66666667]])
>>> x.var(1)
matrix([[ 1.25],
[ 1.25],
[ 1.25]])
"""
return N.ndarray.var(self, axis, dtype, out, ddof, keepdims=True)._collapse(axis)
def prod(self, axis=None, dtype=None, out=None):
"""
Return the product of the array elements over the given axis.
Refer to `prod` for full documentation.
See Also
--------
prod, ndarray.prod
Notes
-----
Same as `ndarray.prod`, except, where that returns an `ndarray`, this
returns a `matrix` object instead.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.prod()
0
>>> x.prod(0)
matrix([[ 0, 45, 120, 231]])
>>> x.prod(1)
matrix([[ 0],
[ 840],
[7920]])
"""
return N.ndarray.prod(self, axis, dtype, out, keepdims=True)._collapse(axis)
def any(self, axis=None, out=None):
"""
Test whether any array element along a given axis evaluates to True.
Refer to `numpy.any` for full documentation.
Parameters
----------
axis : int, optional
Axis along which logical OR is performed
out : ndarray, optional
Output to existing array instead of creating new one, must have
same shape as expected output
Returns
-------
any : bool, ndarray
Returns a single bool if `axis` is ``None``; otherwise,
returns `ndarray`
"""
return N.ndarray.any(self, axis, out, keepdims=True)._collapse(axis)
def all(self, axis=None, out=None):
"""
Test whether all matrix elements along a given axis evaluate to True.
Parameters
----------
See `numpy.all` for complete descriptions
See Also
--------
numpy.all
Notes
-----
This is the same as `ndarray.all`, but it returns a `matrix` object.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> y = x[0]; y
matrix([[0, 1, 2, 3]])
>>> (x == y)
matrix([[ True, True, True, True],
[False, False, False, False],
[False, False, False, False]], dtype=bool)
>>> (x == y).all()
False
>>> (x == y).all(0)
matrix([[False, False, False, False]], dtype=bool)
>>> (x == y).all(1)
matrix([[ True],
[False],
[False]], dtype=bool)
"""
return N.ndarray.all(self, axis, out, keepdims=True)._collapse(axis)
def max(self, axis=None, out=None):
"""
Return the maximum value along an axis.
Parameters
----------
See `amax` for complete descriptions
See Also
--------
amax, ndarray.max
Notes
-----
This is the same as `ndarray.max`, but returns a `matrix` object
where `ndarray.max` would return an ndarray.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.max()
11
>>> x.max(0)
matrix([[ 8, 9, 10, 11]])
>>> x.max(1)
matrix([[ 3],
[ 7],
[11]])
"""
return N.ndarray.max(self, axis, out, keepdims=True)._collapse(axis)
def argmax(self, axis=None, out=None):
"""
Indices of the maximum values along an axis.
Parameters
----------
See `numpy.argmax` for complete descriptions
See Also
--------
numpy.argmax
Notes
-----
This is the same as `ndarray.argmax`, but returns a `matrix` object
where `ndarray.argmax` would return an `ndarray`.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.argmax()
11
>>> x.argmax(0)
matrix([[2, 2, 2, 2]])
>>> x.argmax(1)
matrix([[3],
[3],
[3]])
"""
return N.ndarray.argmax(self, axis, out)._align(axis)
def min(self, axis=None, out=None):
"""
Return the minimum value along an axis.
Parameters
----------
See `amin` for complete descriptions.
See Also
--------
amin, ndarray.min
Notes
-----
This is the same as `ndarray.min`, but returns a `matrix` object
where `ndarray.min` would return an ndarray.
Examples
--------
>>> x = -np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, -1, -2, -3],
[ -4, -5, -6, -7],
[ -8, -9, -10, -11]])
>>> x.min()
-11
>>> x.min(0)
matrix([[ -8, -9, -10, -11]])
>>> x.min(1)
matrix([[ -3],
[ -7],
[-11]])
"""
return N.ndarray.min(self, axis, out, keepdims=True)._collapse(axis)
def argmin(self, axis=None, out=None):
"""
Return the indices of the minimum values along an axis.
Parameters
----------
See `numpy.argmin` for complete descriptions.
See Also
--------
numpy.argmin
Notes
-----
This is the same as `ndarray.argmin`, but returns a `matrix` object
where `ndarray.argmin` would return an `ndarray`.
Examples
--------
>>> x = -np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, -1, -2, -3],
[ -4, -5, -6, -7],
[ -8, -9, -10, -11]])
>>> x.argmin()
11
>>> x.argmin(0)
matrix([[2, 2, 2, 2]])
>>> x.argmin(1)
matrix([[3],
[3],
[3]])
"""
return N.ndarray.argmin(self, axis, out)._align(axis)
def ptp(self, axis=None, out=None):
"""
Peak-to-peak (maximum - minimum) value along the given axis.
Refer to `numpy.ptp` for full documentation.
See Also
--------
numpy.ptp
Notes
-----
Same as `ndarray.ptp`, except, where that would return an `ndarray` object,
this returns a `matrix` object.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.ptp()
11
>>> x.ptp(0)
matrix([[8, 8, 8, 8]])
>>> x.ptp(1)
matrix([[3],
[3],
[3]])
"""
return N.ndarray.ptp(self, axis, out)._align(axis)
def getI(self):
"""
Returns the (multiplicative) inverse of invertible `self`.
Parameters
----------
None
Returns
-------
ret : matrix object
If `self` is non-singular, `ret` is such that ``ret * self`` ==
``self * ret`` == ``np.matrix(np.eye(self[0,:].size)`` all return
``True``.
Raises
------
numpy.linalg.LinAlgError: Singular matrix
If `self` is singular.
See Also
--------
linalg.inv
Examples
--------
>>> m = np.matrix('[1, 2; 3, 4]'); m
matrix([[1, 2],
[3, 4]])
>>> m.getI()
matrix([[-2. , 1. ],
[ 1.5, -0.5]])
>>> m.getI() * m
matrix([[ 1., 0.],
[ 0., 1.]])
"""
M, N = self.shape
if M == N:
from numpy.dual import inv as func
else:
from numpy.dual import pinv as func
return asmatrix(func(self))
def getA(self):
"""
Return `self` as an `ndarray` object.
Equivalent to ``np.asarray(self)``.
Parameters
----------
None
Returns
-------
ret : ndarray
`self` as an `ndarray`
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.getA()
array([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
"""
return self.__array__()
def getA1(self):
"""
Return `self` as a flattened `ndarray`.
Equivalent to ``np.asarray(x).ravel()``
Parameters
----------
None
Returns
-------
ret : ndarray
`self`, 1-D, as an `ndarray`
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.getA1()
array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
"""
return self.__array__().ravel()
def getT(self):
"""
Returns the transpose of the matrix.
Does *not* conjugate! For the complex conjugate transpose, use ``.H``.
Parameters
----------
None
Returns
-------
ret : matrix object
The (non-conjugated) transpose of the matrix.
See Also
--------
transpose, getH
Examples
--------
>>> m = np.matrix('[1, 2; 3, 4]')
>>> m
matrix([[1, 2],
[3, 4]])
>>> m.getT()
matrix([[1, 3],
[2, 4]])
"""
return self.transpose()
def getH(self):
"""
Returns the (complex) conjugate transpose of `self`.
Equivalent to ``np.transpose(self)`` if `self` is real-valued.
Parameters
----------
None
Returns
-------
ret : matrix object
complex conjugate transpose of `self`
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4)))
>>> z = x - 1j*x; z
matrix([[ 0. +0.j, 1. -1.j, 2. -2.j, 3. -3.j],
[ 4. -4.j, 5. -5.j, 6. -6.j, 7. -7.j],
[ 8. -8.j, 9. -9.j, 10.-10.j, 11.-11.j]])
>>> z.getH()
matrix([[ 0. +0.j, 4. +4.j, 8. +8.j],
[ 1. +1.j, 5. +5.j, 9. +9.j],
[ 2. +2.j, 6. +6.j, 10.+10.j],
[ 3. +3.j, 7. +7.j, 11.+11.j]])
"""
if issubclass(self.dtype.type, N.complexfloating):
return self.transpose().conjugate()
else:
return self.transpose()
T = property(getT, None)
A = property(getA, None)
A1 = property(getA1, None)
H = property(getH, None)
I = property(getI, None)
def _from_string(str, gdict, ldict):
rows = str.split(';')
rowtup = []
for row in rows:
trow = row.split(',')
newrow = []
for x in trow:
newrow.extend(x.split())
trow = newrow
coltup = []
for col in trow:
col = col.strip()
try:
thismat = ldict[col]
except KeyError:
try:
thismat = gdict[col]
except KeyError:
raise KeyError("%s not found" % (col,))
coltup.append(thismat)
rowtup.append(concatenate(coltup, axis=-1))
return concatenate(rowtup, axis=0)
def bmat(obj, ldict=None, gdict=None):
"""
Build a matrix object from a string, nested sequence, or array.
Parameters
----------
obj : str or array_like
Input data. Names of variables in the current scope may be
referenced, even if `obj` is a string.
Returns
-------
out : matrix
Returns a matrix object, which is a specialized 2-D array.
See Also
--------
matrix
Examples
--------
>>> A = np.mat('1 1; 1 1')
>>> B = np.mat('2 2; 2 2')
>>> C = np.mat('3 4; 5 6')
>>> D = np.mat('7 8; 9 0')
All the following expressions construct the same block matrix:
>>> np.bmat([[A, B], [C, D]])
matrix([[1, 1, 2, 2],
[1, 1, 2, 2],
[3, 4, 7, 8],
[5, 6, 9, 0]])
>>> np.bmat(np.r_[np.c_[A, B], np.c_[C, D]])
matrix([[1, 1, 2, 2],
[1, 1, 2, 2],
[3, 4, 7, 8],
[5, 6, 9, 0]])
>>> np.bmat('A,B; C,D')
matrix([[1, 1, 2, 2],
[1, 1, 2, 2],
[3, 4, 7, 8],
[5, 6, 9, 0]])
"""
if isinstance(obj, str):
if gdict is None:
# get previous frame
frame = sys._getframe().f_back
glob_dict = frame.f_globals
loc_dict = frame.f_locals
else:
glob_dict = gdict
loc_dict = ldict
return matrix(_from_string(obj, glob_dict, loc_dict))
if isinstance(obj, (tuple, list)):
# [[A,B],[C,D]]
arr_rows = []
for row in obj:
if isinstance(row, N.ndarray): # not 2-d
return matrix(concatenate(obj, axis=-1))
else:
arr_rows.append(concatenate(row, axis=-1))
return matrix(concatenate(arr_rows, axis=0))
if isinstance(obj, N.ndarray):
return matrix(obj)
mat = asmatrix
|
MaxGuevara/quark | refs/heads/master | qa/rpc-tests/forknotify.py | 161 | #!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test -alertnotify
#
from test_framework import BitcoinTestFramework
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
import os
import shutil
class ForkNotifyTest(BitcoinTestFramework):
alert_filename = None # Set by setup_network
def setup_network(self):
self.nodes = []
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
with open(self.alert_filename, 'w') as f:
pass # Just open then close to create zero-length file
self.nodes.append(start_node(0, self.options.tmpdir,
["-blockversion=2", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]))
# Node1 mines block.version=211 blocks
self.nodes.append(start_node(1, self.options.tmpdir,
["-blockversion=211"]))
connect_nodes(self.nodes[1], 0)
self.is_network_split = False
self.sync_all()
def run_test(self):
# Mine 51 up-version blocks
self.nodes[1].setgenerate(True, 51)
self.sync_all()
# -alertnotify should trigger on the 51'st,
# but mine and sync another to give
# -alertnotify time to write
self.nodes[1].setgenerate(True, 1)
self.sync_all()
with open(self.alert_filename, 'r') as f:
alert_text = f.read()
if len(alert_text) == 0:
raise AssertionError("-alertnotify did not warn of up-version blocks")
# Mine more up-version blocks, should not get more alerts:
self.nodes[1].setgenerate(True, 1)
self.sync_all()
self.nodes[1].setgenerate(True, 1)
self.sync_all()
with open(self.alert_filename, 'r') as f:
alert_text2 = f.read()
if alert_text != alert_text2:
raise AssertionError("-alertnotify excessive warning of up-version blocks")
if __name__ == '__main__':
ForkNotifyTest().main()
|
nzurbrugg/ShyreDb | refs/heads/master | pedigree/migrations/0002_auto_20151005_0203.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('pedigree', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='dog',
name='gender',
field=models.SmallIntegerField(choices=[(1, 'Female'), (2, 'Male')], null=True, default=1, blank=True),
),
]
|
gibxxi/nzbToMedia | refs/heads/master | libs/unidecode/x00c.py | 252 | data = (
'[?]', # 0x00
'N', # 0x01
'N', # 0x02
'H', # 0x03
'[?]', # 0x04
'a', # 0x05
'aa', # 0x06
'i', # 0x07
'ii', # 0x08
'u', # 0x09
'uu', # 0x0a
'R', # 0x0b
'L', # 0x0c
'[?]', # 0x0d
'e', # 0x0e
'ee', # 0x0f
'ai', # 0x10
'[?]', # 0x11
'o', # 0x12
'oo', # 0x13
'au', # 0x14
'k', # 0x15
'kh', # 0x16
'g', # 0x17
'gh', # 0x18
'ng', # 0x19
'c', # 0x1a
'ch', # 0x1b
'j', # 0x1c
'jh', # 0x1d
'ny', # 0x1e
'tt', # 0x1f
'tth', # 0x20
'dd', # 0x21
'ddh', # 0x22
'nn', # 0x23
't', # 0x24
'th', # 0x25
'd', # 0x26
'dh', # 0x27
'n', # 0x28
'[?]', # 0x29
'p', # 0x2a
'ph', # 0x2b
'b', # 0x2c
'bh', # 0x2d
'm', # 0x2e
'y', # 0x2f
'r', # 0x30
'rr', # 0x31
'l', # 0x32
'll', # 0x33
'[?]', # 0x34
'v', # 0x35
'sh', # 0x36
'ss', # 0x37
's', # 0x38
'h', # 0x39
'[?]', # 0x3a
'[?]', # 0x3b
'[?]', # 0x3c
'[?]', # 0x3d
'aa', # 0x3e
'i', # 0x3f
'ii', # 0x40
'u', # 0x41
'uu', # 0x42
'R', # 0x43
'RR', # 0x44
'[?]', # 0x45
'e', # 0x46
'ee', # 0x47
'ai', # 0x48
'[?]', # 0x49
'o', # 0x4a
'oo', # 0x4b
'au', # 0x4c
'', # 0x4d
'[?]', # 0x4e
'[?]', # 0x4f
'[?]', # 0x50
'[?]', # 0x51
'[?]', # 0x52
'[?]', # 0x53
'[?]', # 0x54
'+', # 0x55
'+', # 0x56
'[?]', # 0x57
'[?]', # 0x58
'[?]', # 0x59
'[?]', # 0x5a
'[?]', # 0x5b
'[?]', # 0x5c
'[?]', # 0x5d
'[?]', # 0x5e
'[?]', # 0x5f
'RR', # 0x60
'LL', # 0x61
'[?]', # 0x62
'[?]', # 0x63
'[?]', # 0x64
'[?]', # 0x65
'0', # 0x66
'1', # 0x67
'2', # 0x68
'3', # 0x69
'4', # 0x6a
'5', # 0x6b
'6', # 0x6c
'7', # 0x6d
'8', # 0x6e
'9', # 0x6f
'[?]', # 0x70
'[?]', # 0x71
'[?]', # 0x72
'[?]', # 0x73
'[?]', # 0x74
'[?]', # 0x75
'[?]', # 0x76
'[?]', # 0x77
'[?]', # 0x78
'[?]', # 0x79
'[?]', # 0x7a
'[?]', # 0x7b
'[?]', # 0x7c
'[?]', # 0x7d
'[?]', # 0x7e
'[?]', # 0x7f
'[?]', # 0x80
'[?]', # 0x81
'N', # 0x82
'H', # 0x83
'[?]', # 0x84
'a', # 0x85
'aa', # 0x86
'i', # 0x87
'ii', # 0x88
'u', # 0x89
'uu', # 0x8a
'R', # 0x8b
'L', # 0x8c
'[?]', # 0x8d
'e', # 0x8e
'ee', # 0x8f
'ai', # 0x90
'[?]', # 0x91
'o', # 0x92
'oo', # 0x93
'au', # 0x94
'k', # 0x95
'kh', # 0x96
'g', # 0x97
'gh', # 0x98
'ng', # 0x99
'c', # 0x9a
'ch', # 0x9b
'j', # 0x9c
'jh', # 0x9d
'ny', # 0x9e
'tt', # 0x9f
'tth', # 0xa0
'dd', # 0xa1
'ddh', # 0xa2
'nn', # 0xa3
't', # 0xa4
'th', # 0xa5
'd', # 0xa6
'dh', # 0xa7
'n', # 0xa8
'[?]', # 0xa9
'p', # 0xaa
'ph', # 0xab
'b', # 0xac
'bh', # 0xad
'm', # 0xae
'y', # 0xaf
'r', # 0xb0
'rr', # 0xb1
'l', # 0xb2
'll', # 0xb3
'[?]', # 0xb4
'v', # 0xb5
'sh', # 0xb6
'ss', # 0xb7
's', # 0xb8
'h', # 0xb9
'[?]', # 0xba
'[?]', # 0xbb
'[?]', # 0xbc
'[?]', # 0xbd
'aa', # 0xbe
'i', # 0xbf
'ii', # 0xc0
'u', # 0xc1
'uu', # 0xc2
'R', # 0xc3
'RR', # 0xc4
'[?]', # 0xc5
'e', # 0xc6
'ee', # 0xc7
'ai', # 0xc8
'[?]', # 0xc9
'o', # 0xca
'oo', # 0xcb
'au', # 0xcc
'', # 0xcd
'[?]', # 0xce
'[?]', # 0xcf
'[?]', # 0xd0
'[?]', # 0xd1
'[?]', # 0xd2
'[?]', # 0xd3
'[?]', # 0xd4
'+', # 0xd5
'+', # 0xd6
'[?]', # 0xd7
'[?]', # 0xd8
'[?]', # 0xd9
'[?]', # 0xda
'[?]', # 0xdb
'[?]', # 0xdc
'[?]', # 0xdd
'lll', # 0xde
'[?]', # 0xdf
'RR', # 0xe0
'LL', # 0xe1
'[?]', # 0xe2
'[?]', # 0xe3
'[?]', # 0xe4
'[?]', # 0xe5
'0', # 0xe6
'1', # 0xe7
'2', # 0xe8
'3', # 0xe9
'4', # 0xea
'5', # 0xeb
'6', # 0xec
'7', # 0xed
'8', # 0xee
'9', # 0xef
'[?]', # 0xf0
'[?]', # 0xf1
'[?]', # 0xf2
'[?]', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
|
aequitas/home-assistant | refs/heads/dev | homeassistant/components/plex/__init__.py | 9 | """The plex component."""
|
imvu/bluesteel | refs/heads/master | app/logic/benchmark/migrations/0017_benchmarkfluctuationwaiverentry.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-05-05 18:49
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('gitrepo', '0004_auto_20160723_1739'),
('benchmark', '0016_benchmarkdefinitionentry_priority'),
]
operations = [
migrations.CreateModel(
name='BenchmarkFluctuationWaiverEntry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notification_allowed', models.BooleanField(default=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('git_project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fluctuation_waiver_git_project', to='gitrepo.GitProjectEntry')),
('git_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fluctuation_waiver_git_project', to='gitrepo.GitUserEntry')),
],
),
]
|
carlory/kubernetes | refs/heads/master | hack/boilerplate/boilerplate.py | 92 | #!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
import datetime
import difflib
import glob
import os
import re
import sys
parser = argparse.ArgumentParser()
parser.add_argument(
"filenames",
help="list of files to check, all files if unspecified",
nargs='*')
rootdir = os.path.dirname(__file__) + "/../../"
rootdir = os.path.abspath(rootdir)
parser.add_argument(
"--rootdir", default=rootdir, help="root directory to examine")
default_boilerplate_dir = os.path.join(rootdir, "hack/boilerplate")
parser.add_argument(
"--boilerplate-dir", default=default_boilerplate_dir)
parser.add_argument(
"-v", "--verbose",
help="give verbose output regarding why a file does not pass",
action="store_true")
args = parser.parse_args()
verbose_out = sys.stderr if args.verbose else open("/dev/null", "w")
def get_refs():
refs = {}
for path in glob.glob(os.path.join(args.boilerplate_dir, "boilerplate.*.txt")):
extension = os.path.basename(path).split(".")[1]
ref_file = open(path, 'r')
ref = ref_file.read().splitlines()
ref_file.close()
refs[extension] = ref
return refs
def is_generated_file(filename, data, regexs):
for d in skipped_ungenerated_files:
if d in filename:
return False
p = regexs["generated"]
return p.search(data)
def file_passes(filename, refs, regexs):
try:
f = open(filename, 'r')
except Exception as exc:
print("Unable to open %s: %s" % (filename, exc), file=verbose_out)
return False
data = f.read()
f.close()
# determine if the file is automatically generated
generated = is_generated_file(filename, data, regexs)
basename = os.path.basename(filename)
extension = file_extension(filename)
if generated:
if extension == "go":
extension = "generatego"
elif extension == "bzl":
extension = "generatebzl"
if extension != "":
ref = refs[extension]
else:
ref = refs[basename]
# remove extra content from the top of files
if extension == "go" or extension == "generatego":
p = regexs["go_build_constraints"]
(data, found) = p.subn("", data, 1)
elif extension in ["sh", "py"]:
p = regexs["shebang"]
(data, found) = p.subn("", data, 1)
data = data.splitlines()
# if our test file is smaller than the reference it surely fails!
if len(ref) > len(data):
print('File %s smaller than reference (%d < %d)' %
(filename, len(data), len(ref)),
file=verbose_out)
return False
# trim our file to the same number of lines as the reference file
data = data[:len(ref)]
p = regexs["year"]
for d in data:
if p.search(d):
if generated:
print('File %s has the YEAR field, but it should not be in generated file' %
filename, file=verbose_out)
else:
print('File %s has the YEAR field, but missing the year of date' %
filename, file=verbose_out)
return False
if not generated:
# Replace all occurrences of the regex "2014|2015|2016|2017|2018" with "YEAR"
p = regexs["date"]
for i, d in enumerate(data):
(data[i], found) = p.subn('YEAR', d)
if found != 0:
break
# if we don't match the reference at this point, fail
if ref != data:
print("Header in %s does not match reference, diff:" %
filename, file=verbose_out)
if args.verbose:
print(file=verbose_out)
for line in difflib.unified_diff(ref, data, 'reference', filename, lineterm=''):
print(line, file=verbose_out)
print(file=verbose_out)
return False
return True
def file_extension(filename):
return os.path.splitext(filename)[1].split(".")[-1].lower()
skipped_dirs = ['third_party', '_gopath', '_output', '.git', 'cluster/env.sh',
"vendor", "test/e2e/generated/bindata.go", "hack/boilerplate/test",
"staging/src/k8s.io/kubectl/pkg/generated/bindata.go"]
# list all the files contain 'DO NOT EDIT', but are not generated
skipped_ungenerated_files = [
'hack/lib/swagger.sh', 'hack/boilerplate/boilerplate.py']
def normalize_files(files):
newfiles = []
for pathname in files:
if any(x in pathname for x in skipped_dirs):
continue
newfiles.append(pathname)
for i, pathname in enumerate(newfiles):
if not os.path.isabs(pathname):
newfiles[i] = os.path.join(args.rootdir, pathname)
return newfiles
def get_files(extensions):
files = []
if len(args.filenames) > 0:
files = args.filenames
else:
for root, dirs, walkfiles in os.walk(args.rootdir):
# don't visit certain dirs. This is just a performance improvement
# as we would prune these later in normalize_files(). But doing it
# cuts down the amount of filesystem walking we do and cuts down
# the size of the file list
for d in skipped_dirs:
if d in dirs:
dirs.remove(d)
for name in walkfiles:
pathname = os.path.join(root, name)
files.append(pathname)
files = normalize_files(files)
outfiles = []
for pathname in files:
basename = os.path.basename(pathname)
extension = file_extension(pathname)
if extension in extensions or basename in extensions:
outfiles.append(pathname)
return outfiles
def get_dates():
years = datetime.datetime.now().year
return '(%s)' % '|'.join((str(year) for year in range(2014, years+1)))
def get_regexs():
regexs = {}
# Search for "YEAR" which exists in the boilerplate, but shouldn't in the real thing
regexs["year"] = re.compile('YEAR')
# get_dates return 2014, 2015, 2016, 2017, or 2018 until the current year as a regex like: "(2014|2015|2016|2017|2018)";
# company holder names can be anything
regexs["date"] = re.compile(get_dates())
# strip // +build \n\n build constraints
regexs["go_build_constraints"] = re.compile(
r"^(// \+build.*\n)+\n", re.MULTILINE)
# strip #!.* from scripts
regexs["shebang"] = re.compile(r"^(#!.*\n)\n*", re.MULTILINE)
# Search for generated files
regexs["generated"] = re.compile('DO NOT EDIT')
return regexs
def main():
regexs = get_regexs()
refs = get_refs()
filenames = get_files(refs.keys())
for filename in filenames:
if not file_passes(filename, refs, regexs):
print(filename, file=sys.stdout)
return 0
if __name__ == "__main__":
sys.exit(main())
|
twobob/buildroot-kindle | refs/heads/master | output/build/libglib2-2.30.3/gio/gdbus-2.0/codegen/codegen.py | 2 | # -*- Mode: Python -*-
# GDBus - GLib D-Bus Library
#
# Copyright (C) 2008-2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307, USA.
#
# Author: David Zeuthen <[email protected]>
import sys
from . import config
from . import utils
from . import dbustypes
# ----------------------------------------------------------------------------------------------------
class CodeGenerator:
def __init__(self, ifaces, namespace, interface_prefix, generate_objmanager, docbook_gen, h, c):
self.docbook_gen = docbook_gen
self.generate_objmanager = generate_objmanager
self.ifaces = ifaces
self.h = h
self.c = c
self.namespace = namespace
if len(namespace) > 0:
if utils.is_ugly_case(namespace):
self.namespace = namespace.replace('_', '')
self.ns_upper = namespace.upper() + '_'
self.ns_lower = namespace.lower() + '_'
else:
self.ns_upper = utils.camel_case_to_uscore(namespace).upper() + '_'
self.ns_lower = utils.camel_case_to_uscore(namespace).lower() + '_'
else:
self.ns_upper = ''
self.ns_lower = ''
self.interface_prefix = interface_prefix
self.header_guard = self.h.name.upper().replace('.', '_').replace('-', '_').replace('/', '_')
# ----------------------------------------------------------------------------------------------------
def generate_intro(self):
self.c.write('/*\n'
' * Generated by gdbus-codegen %s. DO NOT EDIT.\n'
' *\n'
' * The license of this code is the same as for the source it was derived from.\n'
' */\n'
'\n'
%(config.VERSION))
self.c.write('#ifdef HAVE_CONFIG_H\n'
'# include "config.h"\n'
'#endif\n'
'\n'
'#include "%s"\n'
'\n'%(self.h.name))
self.c.write('#ifdef G_OS_UNIX\n'
'# include <gio/gunixfdlist.h>\n'
'#endif\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' GDBusArgInfo parent_struct;\n'
' gboolean use_gvariant;\n'
'} _ExtendedGDBusArgInfo;\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' GDBusMethodInfo parent_struct;\n'
' const gchar *signal_name;\n'
' gboolean pass_fdlist;\n'
'} _ExtendedGDBusMethodInfo;\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' GDBusSignalInfo parent_struct;\n'
' const gchar *signal_name;\n'
'} _ExtendedGDBusSignalInfo;\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' GDBusPropertyInfo parent_struct;\n'
' const gchar *hyphen_name;\n'
' gboolean use_gvariant;\n'
'} _ExtendedGDBusPropertyInfo;\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' GDBusInterfaceInfo parent_struct;\n'
' const gchar *hyphen_name;\n'
'} _ExtendedGDBusInterfaceInfo;\n'
'\n')
self.c.write('typedef struct\n'
'{\n'
' const _ExtendedGDBusPropertyInfo *info;\n'
' guint prop_id;\n'
' GValue orig_value; /* the value before the change */\n'
'} ChangedProperty;\n'
'\n'
'static void\n'
'_changed_property_free (ChangedProperty *data)\n'
'{\n'
' g_value_unset (&data->orig_value);\n'
' g_free (data);\n'
'}\n'
'\n')
self.c.write('static gboolean\n'
'_g_strv_equal0 (gchar **a, gchar **b)\n'
'{\n'
' gboolean ret = FALSE;\n'
' guint n;\n'
' if (a == NULL && b == NULL)\n'
' {\n'
' ret = TRUE;\n'
' goto out;\n'
' }\n'
' if (a == NULL || b == NULL)\n'
' goto out;\n'
' if (g_strv_length (a) != g_strv_length (b))\n'
' goto out;\n'
' for (n = 0; a[n] != NULL; n++)\n'
' if (g_strcmp0 (a[n], b[n]) != 0)\n'
' goto out;\n'
' ret = TRUE;\n'
'out:\n'
' return ret;\n'
'}\n'
'\n')
self.c.write('static gboolean\n'
'_g_variant_equal0 (GVariant *a, GVariant *b)\n'
'{\n'
' gboolean ret = FALSE;\n'
' if (a == NULL && b == NULL)\n'
' {\n'
' ret = TRUE;\n'
' goto out;\n'
' }\n'
' if (a == NULL || b == NULL)\n'
' goto out;\n'
' ret = g_variant_equal (a, b);\n'
'out:\n'
' return ret;\n'
'}\n'
'\n')
# simplified - only supports the types we use
self.c.write('G_GNUC_UNUSED static gboolean\n'
'_g_value_equal (const GValue *a, const GValue *b)\n'
'{\n'
' gboolean ret = FALSE;\n'
' g_assert (G_VALUE_TYPE (a) == G_VALUE_TYPE (b));\n'
' switch (G_VALUE_TYPE (a))\n'
' {\n'
' case G_TYPE_BOOLEAN:\n'
' ret = (g_value_get_boolean (a) == g_value_get_boolean (b));\n'
' break;\n'
' case G_TYPE_UCHAR:\n'
' ret = (g_value_get_uchar (a) == g_value_get_uchar (b));\n'
' break;\n'
' case G_TYPE_INT:\n'
' ret = (g_value_get_int (a) == g_value_get_int (b));\n'
' break;\n'
' case G_TYPE_UINT:\n'
' ret = (g_value_get_uint (a) == g_value_get_uint (b));\n'
' break;\n'
' case G_TYPE_INT64:\n'
' ret = (g_value_get_int64 (a) == g_value_get_int64 (b));\n'
' break;\n'
' case G_TYPE_UINT64:\n'
' ret = (g_value_get_uint64 (a) == g_value_get_uint64 (b));\n'
' break;\n'
' case G_TYPE_DOUBLE:\n'
' ret = (g_value_get_double (a) == g_value_get_double (b));\n'
' break;\n'
' case G_TYPE_STRING:\n'
' ret = (g_strcmp0 (g_value_get_string (a), g_value_get_string (b)) == 0);\n'
' break;\n'
' case G_TYPE_VARIANT:\n'
' ret = _g_variant_equal0 (g_value_get_variant (a), g_value_get_variant (b));\n'
' break;\n'
' default:\n'
' if (G_VALUE_TYPE (a) == G_TYPE_STRV)\n'
' ret = _g_strv_equal0 (g_value_get_boxed (a), g_value_get_boxed (b));\n'
' else\n'
' g_critical ("_g_value_equal() does not handle type %s", g_type_name (G_VALUE_TYPE (a)));\n'
' break;\n'
' }\n'
' return ret;\n'
'}\n'
'\n')
self.h.write('/*\n'
' * Generated by gdbus-codegen %s. DO NOT EDIT.\n'
' *\n'
' * The license of this code is the same as for the source it was derived from.\n'
' */\n'
'\n'
'#ifndef __%s__\n'
'#define __%s__\n'
'\n'%(config.VERSION, self.header_guard, self.header_guard))
self.h.write('#include <gio/gio.h>\n'
'\n'
'G_BEGIN_DECLS\n'
'\n')
# ----------------------------------------------------------------------------------------------------
def declare_types(self):
for i in self.ifaces:
self.h.write('\n')
self.h.write('/* ------------------------------------------------------------------------ */\n')
self.h.write('/* Declarations for %s */\n'%i.name)
self.h.write('\n')
# First the GInterface
self.h.write('#define %sTYPE_%s (%s_get_type ())\n'%(i.ns_upper, i.name_upper, i.name_lower))
self.h.write('#define %s%s(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_%s, %s))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %sIS_%s(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_%s))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
self.h.write('#define %s%s_GET_IFACE(o) (G_TYPE_INSTANCE_GET_INTERFACE ((o), %sTYPE_%s, %sIface))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('\n')
self.h.write('struct _%s;\n'%(i.camel_name))
self.h.write('typedef struct _%s %s;\n'%(i.camel_name, i.camel_name))
self.h.write('typedef struct _%sIface %sIface;\n'%(i.camel_name, i.camel_name))
self.h.write('\n')
self.h.write('struct _%sIface\n'%(i.camel_name))
self.h.write('{\n')
self.h.write(' GTypeInterface parent_iface;\n')
function_pointers = {}
# vfuncs for methods
if len(i.methods) > 0:
self.h.write('\n')
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
key = (m.since, '_method_%s'%m.name_lower)
value = ' gboolean (*handle_%s) (\n'%(m.name_lower)
value += ' %s *object,\n'%(i.camel_name)
value += ' GDBusMethodInvocation *invocation'%()
if unix_fd:
value += ',\n GUnixFDList *fd_list'
for a in m.in_args:
value += ',\n %sarg_%s'%(a.ctype_in, a.name)
value += ');\n\n'
function_pointers[key] = value
# vfuncs for signals
if len(i.signals) > 0:
self.h.write('\n')
for s in i.signals:
key = (s.since, '_signal_%s'%s.name_lower)
value = ' void (*%s) (\n'%(s.name_lower)
value += ' %s *object'%(i.camel_name)
for a in s.args:
value += ',\n %sarg_%s'%(a.ctype_in, a.name)
value += ');\n\n'
function_pointers[key] = value
# vfuncs for properties
if len(i.properties) > 0:
self.h.write('\n')
for p in i.properties:
key = (p.since, '_prop_get_%s'%p.name_lower)
value = ' %s (*get_%s) (%s *object);\n\n'%(p.arg.ctype_in, p.name_lower, i.camel_name)
function_pointers[key] = value
# Sort according to @since tag, then name.. this ensures
# that the function pointers don't change order assuming
# judicious use of @since
#
# Also use a proper version comparison function so e.g.
# 10.0 comes after 2.0.
#
# See https://bugzilla.gnome.org/show_bug.cgi?id=647577#c5
# for discussion
for key in sorted(function_pointers.keys(), key=utils.version_cmp_key):
self.h.write('%s'%function_pointers[key])
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %s_get_type (void) G_GNUC_CONST;\n'%(i.name_lower))
self.h.write('\n')
self.h.write('GDBusInterfaceInfo *%s_interface_info (void);\n'%(i.name_lower))
self.h.write('guint %s_override_properties (GObjectClass *klass, guint property_id_begin);\n'%(i.name_lower))
self.h.write('\n')
# Then method call completion functions
if len(i.methods) > 0:
self.h.write('\n')
self.h.write('/* D-Bus method call completion functions: */\n')
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
if m.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_complete_%s (\n'
' %s *object,\n'
' GDBusMethodInvocation *invocation'%(i.name_lower, m.name_lower, i.camel_name))
if unix_fd:
self.h.write(',\n GUnixFDList *fd_list')
for a in m.out_args:
self.h.write(',\n %s%s'%(a.ctype_in, a.name))
self.h.write(');\n')
self.h.write('\n')
self.h.write('\n')
# Then signal emission functions
if len(i.signals) > 0:
self.h.write('\n')
self.h.write('/* D-Bus signal emissions functions: */\n')
for s in i.signals:
if s.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_emit_%s (\n'
' %s *object'%(i.name_lower, s.name_lower, i.camel_name))
for a in s.args:
self.h.write(',\n %sarg_%s'%(a.ctype_in, a.name))
self.h.write(');\n')
self.h.write('\n')
self.h.write('\n')
# Then method call declarations
if len(i.methods) > 0:
self.h.write('\n')
self.h.write('/* D-Bus method calls: */\n')
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
# async begin
if m.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_call_%s (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.h.write(',\n %sarg_%s'%(a.ctype_in, a.name))
if unix_fd:
self.h.write(',\n GUnixFDList *fd_list')
self.h.write(',\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data);\n')
self.h.write('\n')
# async finish
if m.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('gboolean %s_call_%s_finish (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.out_args:
self.h.write(',\n %sout_%s'%(a.ctype_out, a.name))
if unix_fd:
self.h.write(',\n GUnixFDList **out_fd_list')
self.h.write(',\n'
' GAsyncResult *res,\n'
' GError **error);\n')
self.h.write('\n')
# sync
if m.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('gboolean %s_call_%s_sync (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.h.write(',\n %sarg_%s'%(a.ctype_in, a.name))
if unix_fd:
self.h.write(',\n GUnixFDList *fd_list')
for a in m.out_args:
self.h.write(',\n %sout_%s'%(a.ctype_out, a.name))
if unix_fd:
self.h.write(',\n GUnixFDList **out_fd_list')
self.h.write(',\n'
' GCancellable *cancellable,\n'
' GError **error);\n')
self.h.write('\n')
self.h.write('\n')
# Then the property accessor declarations
if len(i.properties) > 0:
self.h.write('\n')
self.h.write('/* D-Bus property accessors: */\n')
for p in i.properties:
# getter
if p.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s%s_get_%s (%s *object);\n'%(p.arg.ctype_in, i.name_lower, p.name_lower, i.camel_name))
if p.arg.free_func != None:
if p.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s%s_dup_%s (%s *object);\n'%(p.arg.ctype_in_dup, i.name_lower, p.name_lower, i.camel_name))
# setter
if p.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_set_%s (%s *object, %svalue);\n'%(i.name_lower, p.name_lower, i.camel_name, p.arg.ctype_in, ))
self.h.write('\n')
# Then the proxy
self.h.write('\n')
self.h.write('/* ---- */\n')
self.h.write('\n')
self.h.write('#define %sTYPE_%s_PROXY (%s_proxy_get_type ())\n'%(i.ns_upper, i.name_upper, i.name_lower))
self.h.write('#define %s%s_PROXY(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_%s_PROXY, %sProxy))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %s%s_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), %sTYPE_%s_PROXY, %sProxyClass))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %s%s_PROXY_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), %sTYPE_%s_PROXY, %sProxyClass))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %sIS_%s_PROXY(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_%s_PROXY))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
self.h.write('#define %sIS_%s_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), %sTYPE_%s_PROXY))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
self.h.write('\n')
self.h.write('typedef struct _%sProxy %sProxy;\n'%(i.camel_name, i.camel_name))
self.h.write('typedef struct _%sProxyClass %sProxyClass;\n'%(i.camel_name, i.camel_name))
self.h.write('typedef struct _%sProxyPrivate %sProxyPrivate;\n'%(i.camel_name, i.camel_name))
self.h.write('\n')
self.h.write('struct _%sProxy\n'%(i.camel_name))
self.h.write('{\n')
self.h.write(' /*< private >*/\n')
self.h.write(' GDBusProxy parent_instance;\n')
self.h.write(' %sProxyPrivate *priv;\n'%(i.camel_name))
self.h.write('};\n')
self.h.write('\n')
self.h.write('struct _%sProxyClass\n'%(i.camel_name))
self.h.write('{\n')
self.h.write(' GDBusProxyClass parent_class;\n')
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %s_proxy_get_type (void) G_GNUC_CONST;\n'%(i.name_lower))
self.h.write('\n')
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_proxy_new (\n'
' GDBusConnection *connection,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data);\n'
%(i.name_lower))
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s *%s_proxy_new_finish (\n'
' GAsyncResult *res,\n'
' GError **error);\n'
%(i.camel_name, i.name_lower))
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s *%s_proxy_new_sync (\n'
' GDBusConnection *connection,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error);\n'
%(i.camel_name, i.name_lower))
self.h.write('\n')
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('void %s_proxy_new_for_bus (\n'
' GBusType bus_type,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data);\n'
%(i.name_lower))
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s *%s_proxy_new_for_bus_finish (\n'
' GAsyncResult *res,\n'
' GError **error);\n'
%(i.camel_name, i.name_lower))
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s *%s_proxy_new_for_bus_sync (\n'
' GBusType bus_type,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error);\n'
%(i.camel_name, i.name_lower))
self.h.write('\n')
# Then the skeleton
self.h.write('\n')
self.h.write('/* ---- */\n')
self.h.write('\n')
self.h.write('#define %sTYPE_%s_SKELETON (%s_skeleton_get_type ())\n'%(i.ns_upper, i.name_upper, i.name_lower))
self.h.write('#define %s%s_SKELETON(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_%s_SKELETON, %sSkeleton))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %s%s_SKELETON_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), %sTYPE_%s_SKELETON, %sSkeletonClass))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %s%s_SKELETON_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), %sTYPE_%s_SKELETON, %sSkeletonClass))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper, i.camel_name))
self.h.write('#define %sIS_%s_SKELETON(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_%s_SKELETON))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
self.h.write('#define %sIS_%s_SKELETON_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), %sTYPE_%s_SKELETON))\n'%(i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
self.h.write('\n')
self.h.write('typedef struct _%sSkeleton %sSkeleton;\n'%(i.camel_name, i.camel_name))
self.h.write('typedef struct _%sSkeletonClass %sSkeletonClass;\n'%(i.camel_name, i.camel_name))
self.h.write('typedef struct _%sSkeletonPrivate %sSkeletonPrivate;\n'%(i.camel_name, i.camel_name))
self.h.write('\n')
self.h.write('struct _%sSkeleton\n'%(i.camel_name))
self.h.write('{\n')
self.h.write(' /*< private >*/\n')
self.h.write(' GDBusInterfaceSkeleton parent_instance;\n')
self.h.write(' %sSkeletonPrivate *priv;\n'%(i.camel_name))
self.h.write('};\n')
self.h.write('\n')
self.h.write('struct _%sSkeletonClass\n'%(i.camel_name))
self.h.write('{\n')
self.h.write(' GDBusInterfaceSkeletonClass parent_class;\n')
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %s_skeleton_get_type (void) G_GNUC_CONST;\n'%(i.name_lower))
self.h.write('\n')
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write('%s *%s_skeleton_new (void);\n'%(i.camel_name, i.name_lower))
self.h.write('\n')
# Finally, the Object, ObjectProxy, ObjectSkeleton and ObjectManagerClient
if self.generate_objmanager:
self.h.write('\n')
self.h.write('/* ---- */\n')
self.h.write('\n')
self.h.write('#define %sTYPE_OBJECT (%sobject_get_type ())\n'%(self.ns_upper, self.ns_lower))
self.h.write('#define %sOBJECT(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_OBJECT, %sObject))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sIS_OBJECT(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_OBJECT))\n'%(self.ns_upper, self.ns_upper))
self.h.write('#define %sOBJECT_GET_IFACE(o) (G_TYPE_INSTANCE_GET_INTERFACE ((o), %sTYPE_OBJECT, %sObject))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('\n')
self.h.write('struct _%sObject;\n'%(self.namespace))
self.h.write('typedef struct _%sObject %sObject;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectIface %sObjectIface;\n'%(self.namespace, self.namespace))
self.h.write('\n')
self.h.write('struct _%sObjectIface\n'%(self.namespace))
self.h.write('{\n'
' GTypeInterface parent_iface;\n'
'};\n'
'\n')
self.h.write('GType %sobject_get_type (void) G_GNUC_CONST;\n'
'\n'
%(self.ns_lower))
for i in self.ifaces:
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write ('%s *%sobject_get_%s (%sObject *object);\n'
%(i.camel_name, self.ns_lower, i.name_upper.lower(), self.namespace))
for i in self.ifaces:
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write ('%s *%sobject_peek_%s (%sObject *object);\n'
%(i.camel_name, self.ns_lower, i.name_upper.lower(), self.namespace))
self.h.write('\n')
self.h.write('#define %sTYPE_OBJECT_PROXY (%sobject_proxy_get_type ())\n'%(self.ns_upper, self.ns_lower))
self.h.write('#define %sOBJECT_PROXY(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_OBJECT_PROXY, %sObjectProxy))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), %sTYPE_OBJECT_PROXY, %sObjectProxyClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_PROXY_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), %sTYPE_OBJECT_PROXY, %sObjectProxyClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sIS_OBJECT_PROXY(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_OBJECT_PROXY))\n'%(self.ns_upper, self.ns_upper))
self.h.write('#define %sIS_OBJECT_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), %sTYPE_OBJECT_PROXY))\n'%(self.ns_upper, self.ns_upper))
self.h.write('\n')
self.h.write('typedef struct _%sObjectProxy %sObjectProxy;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectProxyClass %sObjectProxyClass;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectProxyPrivate %sObjectProxyPrivate;\n'%(self.namespace, self.namespace))
self.h.write('\n')
self.h.write('struct _%sObjectProxy\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' /*< private >*/\n')
self.h.write(' GDBusObjectProxy parent_instance;\n')
self.h.write(' %sObjectProxyPrivate *priv;\n'%(self.namespace))
self.h.write('};\n')
self.h.write('\n')
self.h.write('struct _%sObjectProxyClass\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' GDBusObjectProxyClass parent_class;\n')
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %sobject_proxy_get_type (void) G_GNUC_CONST;\n'%(self.ns_lower))
self.h.write('%sObjectProxy *%sobject_proxy_new (GDBusConnection *connection, const gchar *object_path);\n'%(self.namespace, self.ns_lower))
self.h.write('\n')
self.h.write('#define %sTYPE_OBJECT_SKELETON (%sobject_skeleton_get_type ())\n'%(self.ns_upper, self.ns_lower))
self.h.write('#define %sOBJECT_SKELETON(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_OBJECT_SKELETON, %sObjectSkeleton))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_SKELETON_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), %sTYPE_OBJECT_SKELETON, %sObjectSkeletonClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_SKELETON_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), %sTYPE_OBJECT_SKELETON, %sObjectSkeletonClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sIS_OBJECT_SKELETON(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_OBJECT_SKELETON))\n'%(self.ns_upper, self.ns_upper))
self.h.write('#define %sIS_OBJECT_SKELETON_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), %sTYPE_OBJECT_SKELETON))\n'%(self.ns_upper, self.ns_upper))
self.h.write('\n')
self.h.write('typedef struct _%sObjectSkeleton %sObjectSkeleton;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectSkeletonClass %sObjectSkeletonClass;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectSkeletonPrivate %sObjectSkeletonPrivate;\n'%(self.namespace, self.namespace))
self.h.write('\n')
self.h.write('struct _%sObjectSkeleton\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' /*< private >*/\n')
self.h.write(' GDBusObjectSkeleton parent_instance;\n')
self.h.write(' %sObjectSkeletonPrivate *priv;\n'%(self.namespace))
self.h.write('};\n')
self.h.write('\n')
self.h.write('struct _%sObjectSkeletonClass\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' GDBusObjectSkeletonClass parent_class;\n')
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %sobject_skeleton_get_type (void) G_GNUC_CONST;\n'%(self.ns_lower))
self.h.write('%sObjectSkeleton *%sobject_skeleton_new (const gchar *object_path);\n'
%(self.namespace, self.ns_lower))
for i in self.ifaces:
if i.deprecated:
self.h.write('G_GNUC_DEPRECATED ')
self.h.write ('void %sobject_skeleton_set_%s (%sObjectSkeleton *object, %s *interface_);\n'
%(self.ns_lower, i.name_upper.lower(), self.namespace, i.camel_name))
self.h.write('\n')
self.h.write('/* ---- */\n')
self.h.write('\n')
self.h.write('#define %sTYPE_OBJECT_MANAGER_CLIENT (%sobject_manager_client_get_type ())\n'%(self.ns_upper, self.ns_lower))
self.h.write('#define %sOBJECT_MANAGER_CLIENT(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), %sTYPE_OBJECT_MANAGER_CLIENT, %sObjectManagerClient))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_MANAGER_CLIENT_CLASS(k) (G_TYPE_CHECK_CLASS_CAST ((k), %sTYPE_OBJECT_MANAGER_CLIENT, %sObjectManagerClientClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sOBJECT_MANAGER_CLIENT_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), %sTYPE_OBJECT_MANAGER_CLIENT, %sObjectManagerClientClass))\n'%(self.ns_upper, self.ns_upper, self.namespace))
self.h.write('#define %sIS_OBJECT_MANAGER_CLIENT(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), %sTYPE_OBJECT_MANAGER_CLIENT))\n'%(self.ns_upper, self.ns_upper))
self.h.write('#define %sIS_OBJECT_MANAGER_CLIENT_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), %sTYPE_OBJECT_MANAGER_CLIENT))\n'%(self.ns_upper, self.ns_upper))
self.h.write('\n')
self.h.write('typedef struct _%sObjectManagerClient %sObjectManagerClient;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectManagerClientClass %sObjectManagerClientClass;\n'%(self.namespace, self.namespace))
self.h.write('typedef struct _%sObjectManagerClientPrivate %sObjectManagerClientPrivate;\n'%(self.namespace, self.namespace))
self.h.write('\n')
self.h.write('struct _%sObjectManagerClient\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' /*< private >*/\n')
self.h.write(' GDBusObjectManagerClient parent_instance;\n')
self.h.write(' %sObjectManagerClientPrivate *priv;\n'%(self.namespace))
self.h.write('};\n')
self.h.write('\n')
self.h.write('struct _%sObjectManagerClientClass\n'%(self.namespace))
self.h.write('{\n')
self.h.write(' GDBusObjectManagerClientClass parent_class;\n')
self.h.write('};\n')
self.h.write('\n')
self.h.write('GType %sobject_manager_client_get_type (void) G_GNUC_CONST;\n'%(self.ns_lower))
self.h.write('\n')
self.h.write('GType %sobject_manager_client_get_proxy_type (GDBusObjectManagerClient *manager, const gchar *object_path, const gchar *interface_name, gpointer user_data);\n'%(self.ns_lower))
self.h.write('\n')
self.h.write('void %sobject_manager_client_new (\n'
' GDBusConnection *connection,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data);\n'
%(self.ns_lower))
self.h.write('GDBusObjectManager *%sobject_manager_client_new_finish (\n'
' GAsyncResult *res,\n'
' GError **error);\n'
%(self.ns_lower))
self.h.write('GDBusObjectManager *%sobject_manager_client_new_sync (\n'
' GDBusConnection *connection,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error);\n'
%(self.ns_lower))
self.h.write('\n')
self.h.write('void %sobject_manager_client_new_for_bus (\n'
' GBusType bus_type,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data);\n'
%(self.ns_lower))
self.h.write('GDBusObjectManager *%sobject_manager_client_new_for_bus_finish (\n'
' GAsyncResult *res,\n'
' GError **error);\n'
%(self.ns_lower))
self.h.write('GDBusObjectManager *%sobject_manager_client_new_for_bus_sync (\n'
' GBusType bus_type,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error);\n'
%(self.ns_lower))
self.h.write('\n')
# ----------------------------------------------------------------------------------------------------
def generate_outro(self):
self.h.write('\n'
'G_END_DECLS\n'
'\n'
'#endif /* __%s__ */\n'%(self.header_guard))
# ----------------------------------------------------------------------------------------------------
def generate_annotations(self, prefix, annotations):
if annotations == None:
return
n = 0
for a in annotations:
#self.generate_annotations('%s_%d'%(prefix, n), a.get_annotations())
# skip internal annotations
if a.key.startswith('org.gtk.GDBus'):
continue
self.c.write('static const GDBusAnnotationInfo %s_%d =\n'
'{\n'
' -1,\n'
' "%s",\n'
' "%s",\n'%(prefix, n, a.key, a.value))
if len(a.annotations) == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &%s_%d_pointers\n'%(prefix, n))
self.c.write('};\n'
'\n')
n += 1
if n > 0:
self.c.write('static const GDBusAnnotationInfo * const %s_pointers[] =\n'
'{\n'%(prefix))
m = 0;
for a in annotations:
if a.key.startswith('org.gtk.GDBus'):
continue
self.c.write(' &%s_%d,\n'%(prefix, m))
m += 1
self.c.write(' NULL\n'
'};\n'
'\n')
return n
def generate_args(self, prefix, args):
for a in args:
num_anno = self.generate_annotations('%s_arg_%s_annotation_info'%(prefix, a.name), a.annotations)
self.c.write('static const _ExtendedGDBusArgInfo %s_%s =\n'
'{\n'
' {\n'
' -1,\n'
' "%s",\n'
' "%s",\n'%(prefix, a.name, a.name, a.signature))
if num_anno == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &%s_arg_%s_annotation_info_pointers\n'%(prefix, a.name))
self.c.write(' },\n')
if not utils.lookup_annotation(a.annotations, 'org.gtk.GDBus.C.ForceGVariant'):
self.c.write(' FALSE\n')
else:
self.c.write(' TRUE\n')
self.c.write('};\n'
'\n')
if len(args) > 0:
self.c.write('static const _ExtendedGDBusArgInfo * const %s_pointers[] =\n'
'{\n'%(prefix))
for a in args:
self.c.write(' &%s_%s,\n'%(prefix, a.name))
self.c.write(' NULL\n'
'};\n'
'\n')
def generate_introspection_for_interface(self, i):
self.c.write('/* ---- Introspection data for %s ---- */\n'
'\n'%(i.name))
if len(i.methods) > 0:
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
self.generate_args('_%s_method_info_%s_IN_ARG'%(i.name_lower, m.name_lower), m.in_args)
self.generate_args('_%s_method_info_%s_OUT_ARG'%(i.name_lower, m.name_lower), m.out_args)
num_anno = self.generate_annotations('_%s_method_%s_annotation_info'%(i.name_lower, m.name_lower), m.annotations)
self.c.write('static const _ExtendedGDBusMethodInfo _%s_method_info_%s =\n'
'{\n'
' {\n'
' -1,\n'
' "%s",\n'%(i.name_lower, m.name_lower, m.name))
if len(m.in_args) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusArgInfo **) &_%s_method_info_%s_IN_ARG_pointers,\n'%(i.name_lower, m.name_lower))
if len(m.out_args) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusArgInfo **) &_%s_method_info_%s_OUT_ARG_pointers,\n'%(i.name_lower, m.name_lower))
if num_anno == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &_%s_method_%s_annotation_info_pointers\n'%(i.name_lower, m.name_lower))
self.c.write(' },\n'
' "handle-%s",\n'
' %s\n'
%(m.name_hyphen, 'TRUE' if unix_fd else 'FALSE'))
self.c.write('};\n'
'\n')
self.c.write('static const _ExtendedGDBusMethodInfo * const _%s_method_info_pointers[] =\n'
'{\n'%(i.name_lower))
for m in i.methods:
self.c.write(' &_%s_method_info_%s,\n'%(i.name_lower, m.name_lower))
self.c.write(' NULL\n'
'};\n'
'\n')
# ---
if len(i.signals) > 0:
for s in i.signals:
self.generate_args('_%s_signal_info_%s_ARG'%(i.name_lower, s.name_lower), s.args)
num_anno = self.generate_annotations('_%s_signal_%s_annotation_info'%(i.name_lower, s.name_lower), s.annotations)
self.c.write('static const _ExtendedGDBusSignalInfo _%s_signal_info_%s =\n'
'{\n'
' {\n'
' -1,\n'
' "%s",\n'%(i.name_lower, s.name_lower, s.name))
if len(s.args) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusArgInfo **) &_%s_signal_info_%s_ARG_pointers,\n'%(i.name_lower, s.name_lower))
if num_anno == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &_%s_signal_%s_annotation_info_pointers\n'%(i.name_lower, s.name_lower))
self.c.write(' },\n'
' "%s"\n'
%(s.name_hyphen))
self.c.write('};\n'
'\n')
self.c.write('static const _ExtendedGDBusSignalInfo * const _%s_signal_info_pointers[] =\n'
'{\n'%(i.name_lower))
for s in i.signals:
self.c.write(' &_%s_signal_info_%s,\n'%(i.name_lower, s.name_lower))
self.c.write(' NULL\n'
'};\n'
'\n')
# ---
if len(i.properties) > 0:
for p in i.properties:
if p.readable and p.writable:
access = 'G_DBUS_PROPERTY_INFO_FLAGS_READABLE | G_DBUS_PROPERTY_INFO_FLAGS_WRITABLE'
elif p.readable:
access = 'G_DBUS_PROPERTY_INFO_FLAGS_READABLE'
elif p.writable:
access = 'G_DBUS_PROPERTY_INFO_FLAGS_WRITABLE'
else:
access = 'G_DBUS_PROPERTY_INFO_FLAGS_NONE'
num_anno = self.generate_annotations('_%s_property_%s_annotation_info'%(i.name_lower, p.name_lower), p.annotations)
self.c.write('static const _ExtendedGDBusPropertyInfo _%s_property_info_%s =\n'
'{\n'
' {\n'
' -1,\n'
' "%s",\n'
' "%s",\n'
' %s,\n'%(i.name_lower, p.name_lower, p.name, p.arg.signature, access))
if num_anno == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &_%s_property_%s_annotation_info_pointers\n'%(i.name_lower, p.name_lower))
self.c.write(' },\n'
' "%s",\n'
%(p.name_hyphen))
if not utils.lookup_annotation(p.annotations, 'org.gtk.GDBus.C.ForceGVariant'):
self.c.write(' FALSE\n')
else:
self.c.write(' TRUE\n')
self.c.write('};\n'
'\n')
self.c.write('static const _ExtendedGDBusPropertyInfo * const _%s_property_info_pointers[] =\n'
'{\n'%(i.name_lower))
for p in i.properties:
self.c.write(' &_%s_property_info_%s,\n'%(i.name_lower, p.name_lower))
self.c.write(' NULL\n'
'};\n'
'\n')
num_anno = self.generate_annotations('_%s_annotation_info'%(i.name_lower), i.annotations)
self.c.write('static const _ExtendedGDBusInterfaceInfo _%s_interface_info =\n'
'{\n'
' {\n'
' -1,\n'
' "%s",\n'%(i.name_lower, i.name))
if len(i.methods) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusMethodInfo **) &_%s_method_info_pointers,\n'%(i.name_lower))
if len(i.signals) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusSignalInfo **) &_%s_signal_info_pointers,\n'%(i.name_lower))
if len(i.properties) == 0:
self.c.write(' NULL,\n')
else:
self.c.write(' (GDBusPropertyInfo **) &_%s_property_info_pointers,\n'%(i.name_lower))
if num_anno == 0:
self.c.write(' NULL\n')
else:
self.c.write(' (GDBusAnnotationInfo **) &_%s_annotation_info_pointers\n'%(i.name_lower))
self.c.write(' },\n'
' "%s",\n'
'};\n'
'\n'
%(i.name_hyphen))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_interface_info:\n'
' *\n'
' * Gets a machine-readable description of the #%s D-Bus interface.\n'
' *\n'
' * Returns: (transfer none): A #GDBusInterfaceInfo. Do not free.\n'
%(i.name_lower, i.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('GDBusInterfaceInfo *\n'
'%s_interface_info (void)\n'
'{\n'
' return (GDBusInterfaceInfo *) &_%s_interface_info;\n'
'}\n'
'\n'%(i.name_lower, i.name_lower))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_override_properties:\n'
' * @klass: The class structure for a #GObject<!-- -->-derived class.\n'
' * @property_id_begin: The property id to assign to the first overridden property.\n'
' *\n'
' * Overrides all #GObject properties in the #%s interface for a concrete class.\n'
' * The properties are overridden in the order they are defined.\n'
' *\n'
' * Returns: The last property id.\n'
%(i.name_lower, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('guint\n'
'%s_override_properties (GObjectClass *klass, guint property_id_begin)\n'
'{\n'%(i.name_lower))
for p in i.properties:
self.c.write (' g_object_class_override_property (klass, property_id_begin++, "%s");\n'%(p.name_hyphen))
self.c.write(' return property_id_begin - 1;\n'
'}\n'
'\n')
self.c.write('\n')
# ----------------------------------------------------------------------------------------------------
def generate_interface(self, i):
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s:\n'
' *\n'
' * Abstract interface type for the D-Bus interface #%s.\n'
%(i.camel_name, i.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sIface:\n'
' * @parent_iface: The parent interface.\n'
%(i.camel_name), False))
doc_bits = {}
if len(i.methods) > 0:
for m in i.methods:
key = (m.since, '_method_%s'%m.name_lower)
value = '@handle_%s: '%(m.name_lower)
value += 'Handler for the #%s::handle-%s signal.'%(i.camel_name, m.name_hyphen)
doc_bits[key] = value
if len(i.signals) > 0:
for s in i.signals:
key = (s.since, '_signal_%s'%s.name_lower)
value = '@%s: '%(s.name_lower)
value += 'Handler for the #%s::%s signal.'%(i.camel_name, s.name_hyphen)
doc_bits[key] = value
if len(i.properties) > 0:
for p in i.properties:
key = (p.since, '_prop_get_%s'%p.name_lower)
value = '@get_%s: '%(p.name_lower)
value += 'Getter for the #%s:%s property.'%(i.camel_name, p.name_hyphen)
doc_bits[key] = value
for key in sorted(doc_bits.keys(), key=utils.version_cmp_key):
self.c.write(' * %s\n'%doc_bits[key])
self.c.write(self.docbook_gen.expand(
' *\n'
' * Virtual table for the D-Bus interface #%s.\n'
%(i.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write('static void\n'
'%s_default_init (%sIface *iface)\n'
'{\n'%(i.name_lower, i.camel_name));
if len(i.methods) > 0:
self.c.write(' /* GObject signals for incoming D-Bus method calls: */\n')
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
self.c.write(self.docbook_gen.expand(
' /**\n'
' * %s::handle-%s:\n'
' * @object: A #%s.\n'
' * @invocation: A #GDBusMethodInvocation.\n'
%(i.camel_name, m.name_hyphen, i.camel_name), False))
if unix_fd:
self.c.write (' * @fd_list: (allow-none): A #GUnixFDList or %NULL.\n')
for a in m.in_args:
self.c.write (' * @arg_%s: Argument passed by remote caller.\n'%(a.name))
self.c.write(self.docbook_gen.expand(
' *\n'
' * Signal emitted when a remote caller is invoking the %s.%s() D-Bus method.\n'
' *\n'
' * If a signal handler returns %%TRUE, it means the signal handler will handle the invocation (e.g. take a reference to @invocation and eventually call %s_complete_%s() or e.g. g_dbus_method_invocation_return_error() on it) and no order signal handlers will run. If no signal handler handles the invocation, the %%G_DBUS_ERROR_UNKNOWN_METHOD error is returned.\n'
' *\n'
' * Returns: %%TRUE if the invocation was handled, %%FALSE to let other signal handlers run.\n'
%(i.name, m.name, i.name_lower, m.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(m, self.c, 2)
if unix_fd:
extra_args = 2
else:
extra_args = 1
self.c.write(' g_signal_new ("handle-%s",\n'
' G_TYPE_FROM_INTERFACE (iface),\n'
' G_SIGNAL_RUN_LAST,\n'
' G_STRUCT_OFFSET (%sIface, handle_%s),\n'
' g_signal_accumulator_true_handled,\n'
' NULL,\n' # accu_data
' g_cclosure_marshal_generic,\n'
' G_TYPE_BOOLEAN,\n'
' %d,\n'
' G_TYPE_DBUS_METHOD_INVOCATION'
%(m.name_hyphen, i.camel_name, m.name_lower, len(m.in_args) + extra_args))
if unix_fd:
self.c.write(', G_TYPE_UNIX_FD_LIST')
for a in m.in_args:
self.c.write (', %s'%(a.gtype))
self.c.write(');\n')
self.c.write('\n')
if len(i.signals) > 0:
self.c.write(' /* GObject signals for received D-Bus signals: */\n')
for s in i.signals:
self.c.write(self.docbook_gen.expand(
' /**\n'
' * %s::%s:\n'
' * @object: A #%s.\n'
%(i.camel_name, s.name_hyphen, i.camel_name), False))
for a in s.args:
self.c.write (' * @arg_%s: Argument.\n'%(a.name))
self.c.write(self.docbook_gen.expand(
' *\n'
' * On the client-side, this signal is emitted whenever the D-Bus signal #%s::%s is received.\n'
' *\n'
' * On the service-side, this signal can be used with e.g. g_signal_emit_by_name() to make the object emit the D-Bus signal.\n'
%(i.name, s.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(s, self.c, 2)
self.c.write(' g_signal_new ("%s",\n'
' G_TYPE_FROM_INTERFACE (iface),\n'
' G_SIGNAL_RUN_LAST,\n'
' G_STRUCT_OFFSET (%sIface, %s),\n'
' NULL,\n' # accumulator
' NULL,\n' # accu_data
' g_cclosure_marshal_generic,\n'
' G_TYPE_NONE,\n'
' %d'
%(s.name_hyphen, i.camel_name, s.name_lower, len(s.args)))
for a in s.args:
self.c.write (', %s'%(a.gtype))
self.c.write(');\n')
self.c.write('\n')
if len(i.properties) > 0:
self.c.write(' /* GObject properties for D-Bus properties: */\n')
for p in i.properties:
if p.readable and p.writable:
hint = 'Since the D-Bus property for this #GObject property is both readable and writable, it is meaningful to both read from it and write to it on both the service- and client-side.'
elif p.readable:
hint = 'Since the D-Bus property for this #GObject property is readable but not writable, it is meaningful to read from it on both the client- and service-side. It is only meaningful, however, to write to it on the service-side.'
elif p.writable:
hint = 'Since the D-Bus property for this #GObject property is writable but not readable, it is meaningful to write to it on both the client- and service-side. It is only meaningful, however, to read from it on the service-side.'
else:
raise RuntimeError('Cannot handle property %s that neither readable nor writable'%(p.name))
self.c.write(self.docbook_gen.expand(
' /**\n'
' * %s:%s:\n'
' *\n'
' * Represents the D-Bus property #%s:%s.\n'
' *\n'
' * %s\n'
%(i.camel_name, p.name_hyphen, i.name, p.name, hint), False))
self.write_gtkdoc_deprecated_and_since_and_close(p, self.c, 2)
self.c.write(' g_object_interface_install_property (iface,\n')
if p.arg.gtype == 'G_TYPE_VARIANT':
s = 'g_param_spec_variant ("%s", "%s", "%s", G_VARIANT_TYPE ("%s"), NULL'%(p.name_hyphen, p.name, p.name, p.arg.signature)
elif p.arg.signature == 'b':
s = 'g_param_spec_boolean ("%s", "%s", "%s", FALSE'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'y':
s = 'g_param_spec_uchar ("%s", "%s", "%s", 0, 255, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'n':
s = 'g_param_spec_int ("%s", "%s", "%s", G_MININT16, G_MAXINT16, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'q':
s = 'g_param_spec_uint ("%s", "%s", "%s", 0, G_MAXUINT16, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'i':
s = 'g_param_spec_int ("%s", "%s", "%s", G_MININT32, G_MAXINT32, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'u':
s = 'g_param_spec_uint ("%s", "%s", "%s", 0, G_MAXUINT32, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'x':
s = 'g_param_spec_int64 ("%s", "%s", "%s", G_MININT64, G_MAXINT64, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 't':
s = 'g_param_spec_uint64 ("%s", "%s", "%s", 0, G_MAXUINT64, 0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'd':
s = 'g_param_spec_double ("%s", "%s", "%s", -G_MAXDOUBLE, G_MAXDOUBLE, 0.0'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 's':
s = 'g_param_spec_string ("%s", "%s", "%s", NULL'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'o':
s = 'g_param_spec_string ("%s", "%s", "%s", NULL'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'g':
s = 'g_param_spec_string ("%s", "%s", "%s", NULL'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'ay':
s = 'g_param_spec_string ("%s", "%s", "%s", NULL'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'as':
s = 'g_param_spec_boxed ("%s", "%s", "%s", G_TYPE_STRV'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'ao':
s = 'g_param_spec_boxed ("%s", "%s", "%s", G_TYPE_STRV'%(p.name_hyphen, p.name, p.name)
elif p.arg.signature == 'aay':
s = 'g_param_spec_boxed ("%s", "%s", "%s", G_TYPE_STRV'%(p.name_hyphen, p.name, p.name)
else:
raise RuntimeError('Unsupported gtype %s for GParamSpec'%(p.arg.gtype))
self.c.write(' %s, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));'%s);
self.c.write('\n')
self.c.write('}\n'
'\n')
self.c.write('typedef %sIface %sInterface;\n'%(i.camel_name, i.camel_name))
self.c.write('G_DEFINE_INTERFACE (%s, %s, G_TYPE_OBJECT);\n'%(i.camel_name, i.name_lower))
self.c.write('\n')
# ----------------------------------------------------------------------------------------------------
def generate_property_accessors(self, i):
for p in i.properties:
# getter
if p.readable and p.writable:
hint = 'Since this D-Bus property is both readable and writable, it is meaningful to use this function on both the client- and service-side.'
elif p.readable:
hint = 'Since this D-Bus property is readable, it is meaningful to use this function on both the client- and service-side.'
elif p.writable:
hint = 'Since this D-Bus property is not readable, it is only meaningful to use this function on the service-side.'
else:
raise RuntimeError('Cannot handle property %s that neither readable nor writable'%(p.name))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_get_%s: (skip)\n'
' * @object: A #%s.\n'
' *\n'
' * Gets the value of the #%s:%s D-Bus property.\n'
' *\n'
' * %s\n'
' *\n'
%(i.name_lower, p.name_lower, i.camel_name, i.name, p.name, hint), False))
if p.arg.free_func != None:
self.c.write(' * <warning>The returned value is only valid until the property changes so on the client-side it is only safe to use this function on the thread where @object was constructed. Use %s_dup_%s() if on another thread.</warning>\n'
' *\n'
' * Returns: (transfer none): The property value or %%NULL if the property is not set. Do not free the returned value, it belongs to @object.\n'
%(i.name_lower, p.name_lower))
else:
self.c.write(' * Returns: The property value.\n')
self.write_gtkdoc_deprecated_and_since_and_close(p, self.c, 0)
self.c.write('%s\n'
'%s_get_%s (%s *object)\n'
'{\n'%(p.arg.ctype_in, i.name_lower, p.name_lower, i.camel_name))
self.c.write(' return %s%s_GET_IFACE (object)->get_%s (object);\n'%(i.ns_upper, i.name_upper, p.name_lower))
self.c.write('}\n')
self.c.write('\n')
if p.arg.free_func != None:
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_dup_%s: (skip)\n'
' * @object: A #%s.\n'
' *\n'
' * Gets a copy of the #%s:%s D-Bus property.\n'
' *\n'
' * %s\n'
' *\n'
' * Returns: (transfer full): The property value or %%NULL if the property is not set. The returned value should be freed with %s().\n'
%(i.name_lower, p.name_lower, i.camel_name, i.name, p.name, hint, p.arg.free_func), False))
self.write_gtkdoc_deprecated_and_since_and_close(p, self.c, 0)
self.c.write('%s\n'
'%s_dup_%s (%s *object)\n'
'{\n'
' %svalue;\n'%(p.arg.ctype_in_dup, i.name_lower, p.name_lower, i.camel_name, p.arg.ctype_in_dup))
self.c.write(' g_object_get (G_OBJECT (object), "%s", &value, NULL);\n'%(p.name_hyphen))
self.c.write(' return value;\n')
self.c.write('}\n')
self.c.write('\n')
# setter
if p.readable and p.writable:
hint = 'Since this D-Bus property is both readable and writable, it is meaningful to use this function on both the client- and service-side.'
elif p.readable:
hint = 'Since this D-Bus property is not writable, it is only meaningful to use this function on the service-side.'
elif p.writable:
hint = 'Since this D-Bus property is writable, it is meaningful to use this function on both the client- and service-side.'
else:
raise RuntimeError('Cannot handle property %s that neither readable nor writable'%(p.name))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_set_%s: (skip)\n'
' * @object: A #%s.\n'
' * @value: The value to set.\n'
' *\n'
' * Sets the #%s:%s D-Bus property to @value.\n'
' *\n'
' * %s\n'
%(i.name_lower, p.name_lower, i.camel_name, i.name, p.name, hint), False))
self.write_gtkdoc_deprecated_and_since_and_close(p, self.c, 0)
self.c.write('void\n'
'%s_set_%s (%s *object, %svalue)\n'
'{\n'%(i.name_lower, p.name_lower, i.camel_name, p.arg.ctype_in, ))
self.c.write(' g_object_set (G_OBJECT (object), "%s", value, NULL);\n'%(p.name_hyphen))
self.c.write('}\n')
self.c.write('\n')
# ---------------------------------------------------------------------------------------------------
def generate_signal_emitters(self, i):
for s in i.signals:
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_emit_%s:\n'
' * @object: A #%s.\n'
%(i.name_lower, s.name_lower, i.camel_name), False))
for a in s.args:
self.c.write(' * @arg_%s: Argument to pass with the signal.\n'%(a.name))
self.c.write(self.docbook_gen.expand(
' *\n'
' * Emits the #%s::%s D-Bus signal.\n'
%(i.name, s.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(s, self.c, 0)
self.c.write('void\n'
'%s_emit_%s (\n'
' %s *object'%(i.name_lower, s.name_lower, i.camel_name))
for a in s.args:
self.c.write(',\n %sarg_%s'%(a.ctype_in, a.name))
self.c.write(')\n'
'{\n'
' g_signal_emit_by_name (object, "%s"'%(s.name_hyphen))
for a in s.args:
self.c.write(', arg_%s'%a.name)
self.c.write(');\n')
self.c.write('}\n'
'\n')
# ---------------------------------------------------------------------------------------------------
def generate_method_calls(self, i):
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
# async begin
self.c.write('/**\n'
' * %s_call_%s:\n'
' * @proxy: A #%sProxy.\n'
%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.c.write(' * @arg_%s: Argument to pass with the method invocation.\n'%(a.name))
if unix_fd:
self.c.write(' * @fd_list: (allow-none): A #GUnixFDList or %NULL.\n')
self.c.write(self.docbook_gen.expand(
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @callback: A #GAsyncReadyCallback to call when the request is satisfied or %%NULL.\n'
' * @user_data: User data to pass to @callback.\n'
' *\n'
' * Asynchronously invokes the %s.%s() D-Bus method on @proxy.\n'
' * When the operation is finished, @callback will be invoked in the <link linkend="g-main-context-push-thread-default">thread-default main loop</link> of the thread you are calling this method from.\n'
' * You can then call %s_call_%s_finish() to get the result of the operation.\n'
' *\n'
' * See %s_call_%s_sync() for the synchronous, blocking version of this method.\n'
%(i.name, m.name, i.name_lower, m.name_lower, i.name_lower, m.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(m, self.c, 0)
self.c.write('void\n'
'%s_call_%s (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.c.write(',\n %sarg_%s'%(a.ctype_in, a.name))
if unix_fd:
self.c.write(',\n GUnixFDList *fd_list')
self.c.write(',\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data)\n'
'{\n')
if unix_fd:
self.c.write(' g_dbus_proxy_call_with_unix_fd_list (G_DBUS_PROXY (proxy),\n')
else:
self.c.write(' g_dbus_proxy_call (G_DBUS_PROXY (proxy),\n')
self.c.write(' "%s",\n'
' g_variant_new ("('%(m.name))
for a in m.in_args:
self.c.write('%s'%(a.format_in))
self.c.write(')"')
for a in m.in_args:
self.c.write(',\n arg_%s'%(a.name))
self.c.write('),\n'
' G_DBUS_CALL_FLAGS_NONE,\n'
' -1,\n')
if unix_fd:
self.c.write(' fd_list,\n')
self.c.write(' cancellable,\n'
' callback,\n'
' user_data);\n')
self.c.write('}\n'
'\n')
# async finish
self.c.write('/**\n'
' * %s_call_%s_finish:\n'
' * @proxy: A #%sProxy.\n'
%(i.name_lower, m.name_lower, i.camel_name))
for a in m.out_args:
self.c.write(' * @out_%s: (out): Return location for return parameter or %%NULL to ignore.\n'%(a.name))
if unix_fd:
self.c.write(' * @out_fd_list: (out): Return location for a #GUnixFDList or %NULL.\n')
self.c.write(self.docbook_gen.expand(
' * @res: The #GAsyncResult obtained from the #GAsyncReadyCallback passed to %s_call_%s().\n'
' * @error: Return location for error or %%NULL.\n'
' *\n'
' * Finishes an operation started with %s_call_%s().\n'
' *\n'
' * Returns: (skip): %%TRUE if the call succeded, %%FALSE if @error is set.\n'
%(i.name_lower, m.name_lower, i.name_lower, m.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(m, self.c, 0)
self.c.write('gboolean\n'
'%s_call_%s_finish (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.out_args:
self.c.write(',\n %sout_%s'%(a.ctype_out, a.name))
if unix_fd:
self.c.write(',\n GUnixFDList **out_fd_list')
self.c.write(',\n'
' GAsyncResult *res,\n'
' GError **error)\n'
'{\n'
' GVariant *_ret;\n')
if unix_fd:
self.c.write(' _ret = g_dbus_proxy_call_with_unix_fd_list_finish (G_DBUS_PROXY (proxy), out_fd_list, res, error);\n')
else:
self.c.write(' _ret = g_dbus_proxy_call_finish (G_DBUS_PROXY (proxy), res, error);\n')
self.c.write(' if (_ret == NULL)\n'
' goto _out;\n')
self.c.write(' g_variant_get (_ret,\n'
' \"(')
for a in m.out_args:
self.c.write('%s'%(a.format_out))
self.c.write(')"')
for a in m.out_args:
self.c.write(',\n out_%s'%(a.name))
self.c.write(');\n'
' g_variant_unref (_ret);\n')
self.c.write('_out:\n'
' return _ret != NULL;\n'
'}\n'
'\n')
# sync
self.c.write('/**\n'
' * %s_call_%s_sync:\n'
' * @proxy: A #%sProxy.\n'
%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.c.write(' * @arg_%s: Argument to pass with the method invocation.\n'%(a.name))
if unix_fd:
self.c.write(' * @fd_list: (allow-none): A #GUnixFDList or %NULL.\n')
for a in m.out_args:
self.c.write(' * @out_%s: (out): Return location for return parameter or %%NULL to ignore.\n'%(a.name))
if unix_fd:
self.c.write(' * @out_fd_list: (out): Return location for a #GUnixFDList or %NULL.\n')
self.c.write(self.docbook_gen.expand(
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @error: Return location for error or %%NULL.\n'
' *\n'
' * Synchronously invokes the %s.%s() D-Bus method on @proxy. The calling thread is blocked until a reply is received.\n'
' *\n'
' * See %s_call_%s() for the asynchronous version of this method.\n'
' *\n'
' * Returns: (skip): %%TRUE if the call succeded, %%FALSE if @error is set.\n'
%(i.name, m.name, i.name_lower, m.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(m, self.c, 0)
self.c.write('gboolean\n'
'%s_call_%s_sync (\n'
' %s *proxy'%(i.name_lower, m.name_lower, i.camel_name))
for a in m.in_args:
self.c.write(',\n %sarg_%s'%(a.ctype_in, a.name))
if unix_fd:
self.c.write(',\n GUnixFDList *fd_list')
for a in m.out_args:
self.c.write(',\n %sout_%s'%(a.ctype_out, a.name))
if unix_fd:
self.c.write(',\n GUnixFDList **out_fd_list')
self.c.write(',\n'
' GCancellable *cancellable,\n'
' GError **error)\n'
'{\n'
' GVariant *_ret;\n')
if unix_fd:
self.c.write(' _ret = g_dbus_proxy_call_with_unix_fd_list_sync (G_DBUS_PROXY (proxy),\n')
else:
self.c.write(' _ret = g_dbus_proxy_call_sync (G_DBUS_PROXY (proxy),\n')
self.c.write(' "%s",\n'
' g_variant_new ("('%(m.name))
for a in m.in_args:
self.c.write('%s'%(a.format_in))
self.c.write(')"')
for a in m.in_args:
self.c.write(',\n arg_%s'%(a.name))
self.c.write('),\n'
' G_DBUS_CALL_FLAGS_NONE,\n'
' -1,\n')
if unix_fd:
self.c.write(' fd_list,\n'
' out_fd_list,\n')
self.c.write(' cancellable,\n'
' error);\n'
' if (_ret == NULL)\n'
' goto _out;\n')
self.c.write(' g_variant_get (_ret,\n'
' \"(')
for a in m.out_args:
self.c.write('%s'%(a.format_out))
self.c.write(')"')
for a in m.out_args:
self.c.write(',\n out_%s'%(a.name))
self.c.write(');\n'
' g_variant_unref (_ret);\n')
self.c.write('_out:\n'
' return _ret != NULL;\n'
'}\n'
'\n')
# ---------------------------------------------------------------------------------------------------
def generate_method_completers(self, i):
for m in i.methods:
unix_fd = False
if utils.lookup_annotation(m.annotations, 'org.gtk.GDBus.C.UnixFD'):
unix_fd = True
self.c.write('/**\n'
' * %s_complete_%s:\n'
' * @object: A #%s.\n'
' * @invocation: (transfer full): A #GDBusMethodInvocation.\n'
%(i.name_lower, m.name_lower, i.camel_name))
if unix_fd:
self.c.write (' * @fd_list: (allow-none): A #GUnixFDList or %NULL.\n')
for a in m.out_args:
self.c.write(' * @%s: Parameter to return.\n'%(a.name))
self.c.write(self.docbook_gen.expand(
' *\n'
' * Helper function used in service implementations to finish handling invocations of the %s.%s() D-Bus method. If you instead want to finish handling an invocation by returning an error, use g_dbus_method_invocation_return_error() or similar.\n'
' *\n'
' * This method will free @invocation, you cannot use it afterwards.\n'
%(i.name, m.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(m, self.c, 0)
self.c.write('void\n'
'%s_complete_%s (\n'
' %s *object,\n'
' GDBusMethodInvocation *invocation'%(i.name_lower, m.name_lower, i.camel_name))
if unix_fd:
self.c.write(',\n GUnixFDList *fd_list')
for a in m.out_args:
self.c.write(',\n %s%s'%(a.ctype_in, a.name))
self.c.write(')\n'
'{\n')
if unix_fd:
self.c.write(' g_dbus_method_invocation_return_value_with_unix_fd_list (invocation,\n'
' g_variant_new ("(')
else:
self.c.write(' g_dbus_method_invocation_return_value (invocation,\n'
' g_variant_new ("(')
for a in m.out_args:
self.c.write('%s'%(a.format_in))
self.c.write(')"')
for a in m.out_args:
self.c.write(',\n %s'%(a.name))
if unix_fd:
self.c.write('),\n fd_list);\n')
else:
self.c.write('));\n')
self.c.write('}\n'
'\n')
# ---------------------------------------------------------------------------------------------------
def generate_proxy(self, i):
# class boilerplate
self.c.write('/* ------------------------------------------------------------------------ */\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sProxy:\n'
' *\n'
' * The #%sProxy structure contains only private data and should only be accessed using the provided API.\n'
%(i.camel_name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sProxyClass:\n'
' * @parent_class: The parent class.\n'
' *\n'
' * Class structure for #%sProxy.\n'
%(i.camel_name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write('struct _%sProxyPrivate\n'
'{\n'
' GData *qdata;\n'
'};\n'
'\n'%i.camel_name)
self.c.write('static void %s_proxy_iface_init (%sIface *iface);\n'
'\n'%(i.name_lower, i.camel_name))
self.c.write('G_DEFINE_TYPE_WITH_CODE (%sProxy, %s_proxy, G_TYPE_DBUS_PROXY,\n'%(i.camel_name, i.name_lower))
self.c.write(' G_IMPLEMENT_INTERFACE (%sTYPE_%s, %s_proxy_iface_init));\n\n'%(i.ns_upper, i.name_upper, i.name_lower))
# finalize
self.c.write('static void\n'
'%s_proxy_finalize (GObject *object)\n'
'{\n'%(i.name_lower))
self.c.write(' %sProxy *proxy = %s%s_PROXY (object);\n'%(i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' g_datalist_clear (&proxy->priv->qdata);\n')
self.c.write(' G_OBJECT_CLASS (%s_proxy_parent_class)->finalize (object);\n'
'}\n'
'\n'%(i.name_lower))
# property accessors
#
# Note that we are guaranteed that prop_id starts at 1 and is
# laid out in the same order as introspection data pointers
#
self.c.write('static void\n'
'%s_proxy_get_property (GObject *object,\n'
' guint prop_id,\n'
' GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'%(i.name_lower))
if len(i.properties) > 0:
self.c.write(' const _ExtendedGDBusPropertyInfo *info;\n'
' GVariant *variant;\n'
' g_assert (prop_id != 0 && prop_id - 1 < %d);\n'
' info = _%s_property_info_pointers[prop_id - 1];\n'
' variant = g_dbus_proxy_get_cached_property (G_DBUS_PROXY (object), info->parent_struct.name);\n'
' if (info->use_gvariant)\n'
' {\n'
' g_value_set_variant (value, variant);\n'
' }\n'
' else\n'
' {\n'
# could be that we don't have the value in cache - in that case, we do
# nothing and the user gets the default value for the GType
' if (variant != NULL)\n'
' g_dbus_gvariant_to_gvalue (variant, value);\n'
' }\n'
' if (variant != NULL)\n'
' g_variant_unref (variant);\n'
%(len(i.properties), i.name_lower))
self.c.write('}\n'
'\n')
if len(i.properties) > 0:
self.c.write('static void\n'
'%s_proxy_set_property_cb (GDBusProxy *proxy,\n'
' GAsyncResult *res,\n'
' gpointer user_data)\n'
'{\n'%(i.name_lower))
self.c.write(' const _ExtendedGDBusPropertyInfo *info = user_data;\n'
' GError *error;\n'
' error = NULL;\n'
' if (!g_dbus_proxy_call_finish (proxy, res, &error))\n'
' {\n'
' g_warning ("Error setting property `%%s\' on interface %s: %%s (%%s, %%d)",\n'
' info->parent_struct.name, \n'
' error->message, g_quark_to_string (error->domain), error->code);\n'
' g_error_free (error);\n'
' }\n'
%(i.name))
self.c.write('}\n'
'\n')
self.c.write('static void\n'
'%s_proxy_set_property (GObject *object,\n'
' guint prop_id,\n'
' const GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'%(i.name_lower))
if len(i.properties) > 0:
self.c.write(' const _ExtendedGDBusPropertyInfo *info;\n'
' GVariant *variant;\n'
' g_assert (prop_id != 0 && prop_id - 1 < %d);\n'
' info = _%s_property_info_pointers[prop_id - 1];\n'
' variant = g_dbus_gvalue_to_gvariant (value, G_VARIANT_TYPE (info->parent_struct.signature));\n'
' g_dbus_proxy_call (G_DBUS_PROXY (object),\n'
' "org.freedesktop.DBus.Properties.Set",\n'
' g_variant_new ("(ssv)", "%s", info->parent_struct.name, variant),\n'
' G_DBUS_CALL_FLAGS_NONE,\n'
' -1,\n'
' NULL, (GAsyncReadyCallback) %s_proxy_set_property_cb, (gpointer) info);\n'
' g_variant_unref (variant);\n'
%(len(i.properties), i.name_lower, i.name, i.name_lower))
self.c.write('}\n'
'\n')
# signal received
self.c.write('static void\n'
'%s_proxy_g_signal (GDBusProxy *proxy,\n'
' const gchar *sender_name,\n'
' const gchar *signal_name,\n'
' GVariant *parameters)\n'
'{\n'%(i.name_lower))
self.c.write(' _ExtendedGDBusSignalInfo *info;\n'
' GVariantIter iter;\n'
' GVariant *child;\n'
' GValue *paramv;\n'
' guint num_params;\n'
' guint n;\n'
' guint signal_id;\n');
# Note: info could be NULL if we are talking to a newer version of the interface
self.c.write(' info = (_ExtendedGDBusSignalInfo *) g_dbus_interface_info_lookup_signal ((GDBusInterfaceInfo *) &_%s_interface_info, signal_name);\n'
' if (info == NULL)\n'
' return;\n'
%(i.name_lower))
self.c.write (' num_params = g_variant_n_children (parameters);\n'
' paramv = g_new0 (GValue, num_params + 1);\n'
' g_value_init (¶mv[0], %sTYPE_%s);\n'
' g_value_set_object (¶mv[0], proxy);\n'
%(i.ns_upper, i.name_upper))
self.c.write(' g_variant_iter_init (&iter, parameters);\n'
' n = 1;\n'
' while ((child = g_variant_iter_next_value (&iter)) != NULL)\n'
' {\n'
' _ExtendedGDBusArgInfo *arg_info = (_ExtendedGDBusArgInfo *) info->parent_struct.args[n - 1];\n'
' if (arg_info->use_gvariant)\n'
' {\n'
' g_value_init (¶mv[n], G_TYPE_VARIANT);\n'
' g_value_set_variant (¶mv[n], child);\n'
' n++;\n'
' }\n'
' else\n'
' g_dbus_gvariant_to_gvalue (child, ¶mv[n++]);\n'
' g_variant_unref (child);\n'
' }\n'
)
self.c.write(' signal_id = g_signal_lookup (info->signal_name, %sTYPE_%s);\n'
%(i.ns_upper, i.name_upper))
self.c.write(' g_signal_emitv (paramv, signal_id, 0, NULL);\n')
self.c.write(' for (n = 0; n < num_params + 1; n++)\n'
' g_value_unset (¶mv[n]);\n'
' g_free (paramv);\n')
self.c.write('}\n'
'\n')
# property changed
self.c.write('static void\n'
'%s_proxy_g_properties_changed (GDBusProxy *_proxy,\n'
' GVariant *changed_properties,\n'
' const gchar *const *invalidated_properties)\n'
'{\n'%(i.name_lower))
# Note: info could be NULL if we are talking to a newer version of the interface
self.c.write(' %sProxy *proxy = %s%s_PROXY (_proxy);\n'
' guint n;\n'
' const gchar *key;\n'
' GVariantIter *iter;\n'
' _ExtendedGDBusPropertyInfo *info;\n'
' g_variant_get (changed_properties, "a{sv}", &iter);\n'
' while (g_variant_iter_next (iter, "{&sv}", &key, NULL))\n'
' {\n'
' info = (_ExtendedGDBusPropertyInfo *) g_dbus_interface_info_lookup_property ((GDBusInterfaceInfo *) &_%s_interface_info, key);\n'
' g_datalist_remove_data (&proxy->priv->qdata, key);\n'
' if (info != NULL)\n'
' g_object_notify (G_OBJECT (proxy), info->hyphen_name);\n'
' }\n'
' g_variant_iter_free (iter);\n'
' for (n = 0; invalidated_properties[n] != NULL; n++)\n'
' {\n'
' info = (_ExtendedGDBusPropertyInfo *) g_dbus_interface_info_lookup_property ((GDBusInterfaceInfo *) &_%s_interface_info, invalidated_properties[n]);\n'
' g_datalist_remove_data (&proxy->priv->qdata, invalidated_properties[n]);\n'
' if (info != NULL)\n'
' g_object_notify (G_OBJECT (proxy), info->hyphen_name);\n'
' }\n'
'}\n'
'\n'
%(i.camel_name, i.ns_upper, i.name_upper,
i.name_lower, i.name_lower))
# property vfuncs
for p in i.properties:
nul_value = '0'
if p.arg.free_func != None:
nul_value = 'NULL'
self.c.write('static %s\n'
'%s_proxy_get_%s (%s *object)\n'
'{\n'
' %sProxy *proxy = %s%s_PROXY (object);\n'
' GVariant *variant;\n'
' %svalue = %s;\n'%(p.arg.ctype_in, i.name_lower, p.name_lower, i.camel_name,
i.camel_name, i.ns_upper, i.name_upper,
p.arg.ctype_in, nul_value))
# For some property types, we have to free the returned
# value (or part of it, e.g. the container) because of how
# GVariant works.. see https://bugzilla.gnome.org/show_bug.cgi?id=657100
# for details
#
free_container = False;
if p.arg.gvariant_get == 'g_variant_get_strv' or p.arg.gvariant_get == 'g_variant_get_objpathv' or p.arg.gvariant_get == 'g_variant_get_bytestring_array':
free_container = True;
# If already using an old value for strv, objpathv, bytestring_array (see below),
# then just return that... that way the result from multiple consecutive calls
# to the getter are valid as long as they're freed
#
if free_container:
self.c.write(' value = g_datalist_get_data (&proxy->priv->qdata, \"%s\");\n'
' if (value != NULL)\n'
' return value;\n'
%(p.name))
self.c.write(' variant = g_dbus_proxy_get_cached_property (G_DBUS_PROXY (proxy), \"%s\");\n'%(p.name))
if p.arg.gtype == 'G_TYPE_VARIANT':
self.c.write(' value = variant;\n')
self.c.write(' if (variant != NULL)\n')
self.c.write(' g_variant_unref (variant);\n')
else:
self.c.write(' if (variant != NULL)\n'
' {\n')
extra_len = ''
if p.arg.gvariant_get == 'g_variant_get_string' or p.arg.gvariant_get == 'g_variant_get_strv' or p.arg.gvariant_get == 'g_variant_get_objv' or p.arg.gvariant_get == 'g_variant_get_bytestring_array':
extra_len = ', NULL'
self.c.write(' value = %s (variant%s);\n'%(p.arg.gvariant_get, extra_len))
if free_container:
self.c.write(' g_datalist_set_data_full (&proxy->priv->qdata, \"%s\", (gpointer) value, g_free);\n'
%(p.name))
self.c.write(' g_variant_unref (variant);\n')
self.c.write(' }\n')
self.c.write(' return value;\n')
self.c.write('}\n')
self.c.write('\n')
# class boilerplate
self.c.write('static void\n'
'%s_proxy_init (%sProxy *proxy)\n'
'{\n'
' proxy->priv = G_TYPE_INSTANCE_GET_PRIVATE (proxy, %sTYPE_%s_PROXY, %sProxyPrivate);\n'
' g_dbus_proxy_set_interface_info (G_DBUS_PROXY (proxy), %s_interface_info ());\n'
'}\n'
'\n'
%(i.name_lower, i.camel_name,
i.ns_upper, i.name_upper, i.camel_name,
i.name_lower))
self.c.write('static void\n'
'%s_proxy_class_init (%sProxyClass *klass)\n'
'{\n'
' GObjectClass *gobject_class;\n'
' GDBusProxyClass *proxy_class;\n'
'\n'
' g_type_class_add_private (klass, sizeof (%sProxyPrivate));\n'
'\n'
' gobject_class = G_OBJECT_CLASS (klass);\n'
' gobject_class->finalize = %s_proxy_finalize;\n'
' gobject_class->get_property = %s_proxy_get_property;\n'
' gobject_class->set_property = %s_proxy_set_property;\n'
'\n'
' proxy_class = G_DBUS_PROXY_CLASS (klass);\n'
' proxy_class->g_signal = %s_proxy_g_signal;\n'
' proxy_class->g_properties_changed = %s_proxy_g_properties_changed;\n'
'\n'%(i.name_lower, i.camel_name,
i.camel_name,
i.name_lower, i.name_lower, i.name_lower, i.name_lower, i.name_lower))
if len(i.properties) > 0:
self.c.write('\n'
' %s_override_properties (gobject_class, 1);\n'%(i.name_lower))
self.c.write('}\n'
'\n')
self.c.write('static void\n'
'%s_proxy_iface_init (%sIface *iface)\n'
'{\n'%(i.name_lower, i.camel_name))
for p in i.properties:
self.c.write(' iface->get_%s = %s_proxy_get_%s;\n'%(p.name_lower, i.name_lower, p.name_lower))
self.c.write('}\n'
'\n')
# constructors
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_proxy_new:\n'
' * @connection: A #GDBusConnection.\n'
' * @flags: Flags from the #GDBusProxyFlags enumeration.\n'
' * @name: (allow-none): A bus name (well-known or unique) or %%NULL if @connection is not a message bus connection.\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @callback: A #GAsyncReadyCallback to call when the request is satisfied.\n'
' * @user_data: User data to pass to @callback.\n'
' *\n'
' * Asynchronously creates a proxy for the D-Bus interface #%s. See g_dbus_proxy_new() for more details.\n'
' *\n'
' * When the operation is finished, @callback will be invoked in the <link linkend="g-main-context-push-thread-default">thread-default main loop</link> of the thread you are calling this method from.\n'
' * You can then call %s_proxy_new_finish() to get the result of the operation.\n'
' *\n'
' * See %s_proxy_new_sync() for the synchronous, blocking version of this constructor.\n'
%(i.name_lower, i.name, i.name_lower, i.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('void\n'
'%s_proxy_new (\n'
' GDBusConnection *connection,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data)\n'
'{\n'
' g_async_initable_new_async (%sTYPE_%s_PROXY, G_PRIORITY_DEFAULT, cancellable, callback, user_data, "g-flags", flags, "g-name", name, "g-connection", connection, "g-object-path", object_path, "g-interface-name", "%s", NULL);\n'
'}\n'
'\n'
%(i.name_lower, i.ns_upper, i.name_upper, i.name))
self.c.write('/**\n'
' * %s_proxy_new_finish:\n'
' * @res: The #GAsyncResult obtained from the #GAsyncReadyCallback passed to %s_proxy_new().\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Finishes an operation started with %s_proxy_new().\n'
' *\n'
' * Returns: (transfer full) (type %sProxy): The constructed proxy object or %%NULL if @error is set.\n'
%(i.name_lower, i.name_lower, i.name_lower, i.camel_name))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('%s *\n'
'%s_proxy_new_finish (\n'
' GAsyncResult *res,\n'
' GError **error)\n'
'{\n'
' GObject *ret;\n'
' GObject *source_object;\n'
' source_object = g_async_result_get_source_object (res);\n'
' ret = g_async_initable_new_finish (G_ASYNC_INITABLE (source_object), res, error);\n'
' g_object_unref (source_object);\n'
' if (ret != NULL)\n'
' return %s%s (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(i.camel_name, i.name_lower, i.ns_upper, i.name_upper))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_proxy_new_sync:\n'
' * @connection: A #GDBusConnection.\n'
' * @flags: Flags from the #GDBusProxyFlags enumeration.\n'
' * @name: (allow-none): A bus name (well-known or unique) or %%NULL if @connection is not a message bus connection.\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Synchronously creates a proxy for the D-Bus interface #%s. See g_dbus_proxy_new_sync() for more details.\n'
' *\n'
' * The calling thread is blocked until a reply is received.\n'
' *\n'
' * See %s_proxy_new() for the asynchronous version of this constructor.\n'
' *\n'
' * Returns: (transfer full) (type %sProxy): The constructed proxy object or %%NULL if @error is set.\n'
%(i.name_lower, i.name, i.name_lower, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('%s *\n'
'%s_proxy_new_sync (\n'
' GDBusConnection *connection,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error)\n'
'{\n'
' GInitable *ret;\n'
' ret = g_initable_new (%sTYPE_%s_PROXY, cancellable, error, "g-flags", flags, "g-name", name, "g-connection", connection, "g-object-path", object_path, "g-interface-name", "%s", NULL);\n'
' if (ret != NULL)\n'
' return %s%s (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(i.camel_name, i.name_lower, i.ns_upper, i.name_upper, i.name, i.ns_upper, i.name_upper))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_proxy_new_for_bus:\n'
' * @bus_type: A #GBusType.\n'
' * @flags: Flags from the #GDBusProxyFlags enumeration.\n'
' * @name: A bus name (well-known or unique).\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @callback: A #GAsyncReadyCallback to call when the request is satisfied.\n'
' * @user_data: User data to pass to @callback.\n'
' *\n'
' * Like %s_proxy_new() but takes a #GBusType instead of a #GDBusConnection.\n'
' *\n'
' * When the operation is finished, @callback will be invoked in the <link linkend="g-main-context-push-thread-default">thread-default main loop</link> of the thread you are calling this method from.\n'
' * You can then call %s_proxy_new_for_bus_finish() to get the result of the operation.\n'
' *\n'
' * See %s_proxy_new_for_bus_sync() for the synchronous, blocking version of this constructor.\n'
%(i.name_lower, i.name_lower, i.name_lower, i.name_lower), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('void\n'
'%s_proxy_new_for_bus (\n'
' GBusType bus_type,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data)\n'
'{\n'
' g_async_initable_new_async (%sTYPE_%s_PROXY, G_PRIORITY_DEFAULT, cancellable, callback, user_data, "g-flags", flags, "g-name", name, "g-bus-type", bus_type, "g-object-path", object_path, "g-interface-name", "%s", NULL);\n'
'}\n'
'\n'
%(i.name_lower, i.ns_upper, i.name_upper, i.name))
self.c.write('/**\n'
' * %s_proxy_new_for_bus_finish:\n'
' * @res: The #GAsyncResult obtained from the #GAsyncReadyCallback passed to %s_proxy_new_for_bus().\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Finishes an operation started with %s_proxy_new_for_bus().\n'
' *\n'
' * Returns: (transfer full) (type %sProxy): The constructed proxy object or %%NULL if @error is set.\n'
%(i.name_lower, i.name_lower, i.name_lower, i.camel_name))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('%s *\n'
'%s_proxy_new_for_bus_finish (\n'
' GAsyncResult *res,\n'
' GError **error)\n'
'{\n'
' GObject *ret;\n'
' GObject *source_object;\n'
' source_object = g_async_result_get_source_object (res);\n'
' ret = g_async_initable_new_finish (G_ASYNC_INITABLE (source_object), res, error);\n'
' g_object_unref (source_object);\n'
' if (ret != NULL)\n'
' return %s%s (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(i.camel_name, i.name_lower, i.ns_upper, i.name_upper))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_proxy_new_for_bus_sync:\n'
' * @bus_type: A #GBusType.\n'
' * @flags: Flags from the #GDBusProxyFlags enumeration.\n'
' * @name: A bus name (well-known or unique).\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Like %s_proxy_new_sync() but takes a #GBusType instead of a #GDBusConnection.\n'
' *\n'
' * The calling thread is blocked until a reply is received.\n'
' *\n'
' * See %s_proxy_new_for_bus() for the asynchronous version of this constructor.\n'
' *\n'
' * Returns: (transfer full) (type %sProxy): The constructed proxy object or %%NULL if @error is set.\n'
%(i.name_lower, i.name_lower, i.name_lower, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('%s *\n'
'%s_proxy_new_for_bus_sync (\n'
' GBusType bus_type,\n'
' GDBusProxyFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error)\n'
'{\n'
' GInitable *ret;\n'
' ret = g_initable_new (%sTYPE_%s_PROXY, cancellable, error, "g-flags", flags, "g-name", name, "g-bus-type", bus_type, "g-object-path", object_path, "g-interface-name", "%s", NULL);\n'
' if (ret != NULL)\n'
' return %s%s (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(i.camel_name, i.name_lower, i.ns_upper, i.name_upper, i.name, i.ns_upper, i.name_upper))
self.c.write('\n')
# ---------------------------------------------------------------------------------------------------
def generate_skeleton(self, i):
# class boilerplate
self.c.write('/* ------------------------------------------------------------------------ */\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sSkeleton:\n'
' *\n'
' * The #%sSkeleton structure contains only private data and should only be accessed using the provided API.\n'
%(i.camel_name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sSkeletonClass:\n'
' * @parent_class: The parent class.\n'
' *\n'
' * Class structure for #%sSkeleton.\n'
%(i.camel_name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('\n')
self.c.write('struct _%sSkeletonPrivate\n'
'{\n'
' GValueArray *properties;\n'
' GList *changed_properties;\n'
' GSource *changed_properties_idle_source;\n'
' GMainContext *context;\n'
' GMutex *lock;\n'
'};\n'
'\n'%i.camel_name)
self.c.write('static void\n'
'_%s_skeleton_handle_method_call (\n'
' GDBusConnection *connection,\n'
' const gchar *sender,\n'
' const gchar *object_path,\n'
' const gchar *interface_name,\n'
' const gchar *method_name,\n'
' GVariant *parameters,\n'
' GDBusMethodInvocation *invocation,\n'
' gpointer user_data)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (user_data);\n'
' _ExtendedGDBusMethodInfo *info;\n'
' GVariantIter iter;\n'
' GVariant *child;\n'
' GValue *paramv;\n'
' guint num_params;\n'
' guint num_extra;\n'
' guint n;\n'
' guint signal_id;\n'
' GValue return_value = {0};\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' info = (_ExtendedGDBusMethodInfo *) g_dbus_method_invocation_get_method_info (invocation);\n'
' g_assert (info != NULL);\n'
%())
self.c.write (' num_params = g_variant_n_children (parameters);\n'
' num_extra = info->pass_fdlist ? 3 : 2;'
' paramv = g_new0 (GValue, num_params + num_extra);\n'
' n = 0;\n'
' g_value_init (¶mv[n], %sTYPE_%s);\n'
' g_value_set_object (¶mv[n++], skeleton);\n'
' g_value_init (¶mv[n], G_TYPE_DBUS_METHOD_INVOCATION);\n'
' g_value_set_object (¶mv[n++], invocation);\n'
' if (info->pass_fdlist)\n'
' {\n'
'#ifdef G_OS_UNIX\n'
' g_value_init (¶mv[n], G_TYPE_UNIX_FD_LIST);\n'
' g_value_set_object (¶mv[n++], g_dbus_message_get_unix_fd_list (g_dbus_method_invocation_get_message (invocation)));\n'
'#else\n'
' g_assert_not_reached ();\n'
'#endif\n'
' }\n'
%(i.ns_upper, i.name_upper))
self.c.write(' g_variant_iter_init (&iter, parameters);\n'
' while ((child = g_variant_iter_next_value (&iter)) != NULL)\n'
' {\n'
' _ExtendedGDBusArgInfo *arg_info = (_ExtendedGDBusArgInfo *) info->parent_struct.in_args[n - num_extra];\n'
' if (arg_info->use_gvariant)\n'
' {\n'
' g_value_init (¶mv[n], G_TYPE_VARIANT);\n'
' g_value_set_variant (¶mv[n], child);\n'
' n++;\n'
' }\n'
' else\n'
' g_dbus_gvariant_to_gvalue (child, ¶mv[n++]);\n'
' g_variant_unref (child);\n'
' }\n'
)
self.c.write(' signal_id = g_signal_lookup (info->signal_name, %sTYPE_%s);\n'
%(i.ns_upper, i.name_upper))
self.c.write(' g_value_init (&return_value, G_TYPE_BOOLEAN);\n'
' g_signal_emitv (paramv, signal_id, 0, &return_value);\n'
' if (!g_value_get_boolean (&return_value))\n'
' g_dbus_method_invocation_return_error (invocation, G_DBUS_ERROR, G_DBUS_ERROR_UNKNOWN_METHOD, "Method %s is not implemented on interface %s", method_name, interface_name);\n'
' g_value_unset (&return_value);\n'
)
self.c.write(' for (n = 0; n < num_params + num_extra; n++)\n'
' g_value_unset (¶mv[n]);\n'
' g_free (paramv);\n')
self.c.write('}\n'
'\n')
self.c.write('static GVariant *\n'
'_%s_skeleton_handle_get_property (\n'
' GDBusConnection *connection,\n'
' const gchar *sender,\n'
' const gchar *object_path,\n'
' const gchar *interface_name,\n'
' const gchar *property_name,\n'
' GError **error,\n'
' gpointer user_data)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (user_data);\n'
' GValue value = {0};\n'
' GParamSpec *pspec;\n'
' _ExtendedGDBusPropertyInfo *info;\n'
' GVariant *ret;\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' ret = NULL;\n'
' info = (_ExtendedGDBusPropertyInfo *) g_dbus_interface_info_lookup_property ((GDBusInterfaceInfo *) &_%s_interface_info, property_name);\n'
' g_assert (info != NULL);\n'
' pspec = g_object_class_find_property (G_OBJECT_GET_CLASS (skeleton), info->hyphen_name);\n'
' if (pspec == NULL)\n'
' {\n'
' g_set_error (error, G_DBUS_ERROR, G_DBUS_ERROR_INVALID_ARGS, "No property with name %%s", property_name);\n'
' }\n'
' else\n'
' {\n'
' g_value_init (&value, pspec->value_type);\n'
' g_object_get_property (G_OBJECT (skeleton), info->hyphen_name, &value);\n'
' ret = g_dbus_gvalue_to_gvariant (&value, G_VARIANT_TYPE (info->parent_struct.signature));\n'
' g_value_unset (&value);\n'
' }\n'
' return ret;\n'
'}\n'
'\n'
%(i.name_lower))
self.c.write('static gboolean\n'
'_%s_skeleton_handle_set_property (\n'
' GDBusConnection *connection,\n'
' const gchar *sender,\n'
' const gchar *object_path,\n'
' const gchar *interface_name,\n'
' const gchar *property_name,\n'
' GVariant *variant,\n'
' GError **error,\n'
' gpointer user_data)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (user_data);\n'
' GValue value = {0};\n'
' GParamSpec *pspec;\n'
' _ExtendedGDBusPropertyInfo *info;\n'
' gboolean ret;\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' ret = FALSE;\n'
' info = (_ExtendedGDBusPropertyInfo *) g_dbus_interface_info_lookup_property ((GDBusInterfaceInfo *) &_%s_interface_info, property_name);\n'
' g_assert (info != NULL);\n'
' pspec = g_object_class_find_property (G_OBJECT_GET_CLASS (skeleton), info->hyphen_name);\n'
' if (pspec == NULL)\n'
' {\n'
' g_set_error (error, G_DBUS_ERROR, G_DBUS_ERROR_INVALID_ARGS, "No property with name %%s", property_name);\n'
' }\n'
' else\n'
' {\n'
' if (info->use_gvariant)\n'
' g_value_set_variant (&value, variant);\n'
' else\n'
' g_dbus_gvariant_to_gvalue (variant, &value);\n'
' g_object_set_property (G_OBJECT (skeleton), info->hyphen_name, &value);\n'
' g_value_unset (&value);\n'
' ret = TRUE;\n'
' }\n'
' return ret;\n'
'}\n'
'\n'
%(i.name_lower))
self.c.write('static const GDBusInterfaceVTable _%s_skeleton_vtable =\n'
'{\n'
' _%s_skeleton_handle_method_call,\n'
' _%s_skeleton_handle_get_property,\n'
' _%s_skeleton_handle_set_property\n'
'};\n'
'\n'%(i.name_lower, i.name_lower, i.name_lower, i.name_lower))
self.c.write('static GDBusInterfaceInfo *\n'
'%s_skeleton_dbus_interface_get_info (GDBusInterfaceSkeleton *skeleton)\n'
'{\n'
' return %s_interface_info ();\n'
%(i.name_lower, i.name_lower))
self.c.write('}\n'
'\n')
self.c.write('static GDBusInterfaceVTable *\n'
'%s_skeleton_dbus_interface_get_vtable (GDBusInterfaceSkeleton *skeleton)\n'
'{\n'
' return (GDBusInterfaceVTable *) &_%s_skeleton_vtable;\n'
%(i.name_lower, i.name_lower))
self.c.write('}\n'
'\n')
self.c.write('static GVariant *\n'
'%s_skeleton_dbus_interface_get_properties (GDBusInterfaceSkeleton *_skeleton)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (_skeleton);\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper))
self.c.write('\n'
' GVariantBuilder builder;\n'
' guint n;\n'
' g_variant_builder_init (&builder, G_VARIANT_TYPE ("a{sv}"));\n'
' if (_%s_interface_info.parent_struct.properties == NULL)\n'
' goto out;\n'
' for (n = 0; _%s_interface_info.parent_struct.properties[n] != NULL; n++)\n'
' {\n'
' GDBusPropertyInfo *info = _%s_interface_info.parent_struct.properties[n];\n'
' if (info->flags & G_DBUS_PROPERTY_INFO_FLAGS_READABLE)\n'
' {\n'
' GVariant *value;\n'
' value = _%s_skeleton_handle_get_property (g_dbus_interface_skeleton_get_connection (G_DBUS_INTERFACE_SKELETON (skeleton)), NULL, g_dbus_interface_skeleton_get_object_path (G_DBUS_INTERFACE_SKELETON (skeleton)), "%s", info->name, NULL, skeleton);\n'
' if (value != NULL)\n'
' {\n'
' g_variant_take_ref (value);\n'
' g_variant_builder_add (&builder, "{sv}", info->name, value);\n'
' g_variant_unref (value);\n'
' }\n'
' }\n'
' }\n'
'out:\n'
' return g_variant_builder_end (&builder);\n'
'}\n'
'\n'
%(i.name_lower, i.name_lower, i.name_lower, i.name_lower, i.name))
if len(i.properties) > 0:
self.c.write('static gboolean _%s_emit_changed (gpointer user_data);\n'
'\n'
%(i.name_lower))
self.c.write('static void\n'
'%s_skeleton_dbus_interface_flush (GDBusInterfaceSkeleton *_skeleton)\n'
'{\n'
%(i.name_lower))
if len(i.properties) > 0:
self.c.write(' %sSkeleton *skeleton = %s%s_SKELETON (_skeleton);\n'
' gboolean emit_changed = FALSE;\n'
'\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' if (skeleton->priv->changed_properties_idle_source != NULL)\n'
' {\n'
' g_source_destroy (skeleton->priv->changed_properties_idle_source);\n'
' skeleton->priv->changed_properties_idle_source = NULL;\n'
' emit_changed = TRUE;\n'
' }\n'
' g_mutex_unlock (skeleton->priv->lock);\n'
'\n'
' if (emit_changed)\n'
' _%s_emit_changed (skeleton);\n'
%(i.camel_name, i.ns_upper, i.name_upper, i.name_lower))
self.c.write('}\n'
'\n')
for s in i.signals:
self.c.write('static void\n'
'_%s_on_signal_%s (\n'
' %s *object'%(i.name_lower, s.name_lower, i.camel_name))
for a in s.args:
self.c.write(',\n %sarg_%s'%(a.ctype_in, a.name))
self.c.write(')\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'
' GDBusConnection *connection = g_dbus_interface_skeleton_get_connection (G_DBUS_INTERFACE_SKELETON (skeleton));\n'
%(i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' if (connection == NULL)\n'
' return;\n'
' g_dbus_connection_emit_signal (connection,\n'
' NULL, g_dbus_interface_skeleton_get_object_path (G_DBUS_INTERFACE_SKELETON (skeleton)), "%s", "%s",\n'
' g_variant_new ("('
%(i.name, s.name))
for a in s.args:
self.c.write('%s'%(a.format_in))
self.c.write(')"')
for a in s.args:
self.c.write(',\n arg_%s'%(a.name))
self.c.write('), NULL);\n')
self.c.write('}\n'
'\n')
self.c.write('static void %s_skeleton_iface_init (%sIface *iface);\n'
%(i.name_lower, i.camel_name))
self.c.write('G_DEFINE_TYPE_WITH_CODE (%sSkeleton, %s_skeleton, G_TYPE_DBUS_INTERFACE_SKELETON,\n'%(i.camel_name, i.name_lower))
self.c.write(' G_IMPLEMENT_INTERFACE (%sTYPE_%s, %s_skeleton_iface_init));\n\n'%(i.ns_upper, i.name_upper, i.name_lower))
# finalize
self.c.write('static void\n'
'%s_skeleton_finalize (GObject *object)\n'
'{\n'%(i.name_lower))
self.c.write(' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'%(i.camel_name, i.ns_upper, i.name_upper))
if len(i.properties) > 0:
self.c.write(' g_value_array_free (skeleton->priv->properties);\n')
self.c.write(' g_list_foreach (skeleton->priv->changed_properties, (GFunc) _changed_property_free, NULL);\n')
self.c.write(' g_list_free (skeleton->priv->changed_properties);\n')
self.c.write(' if (skeleton->priv->changed_properties_idle_source != NULL)\n')
self.c.write(' g_source_destroy (skeleton->priv->changed_properties_idle_source);\n')
self.c.write(' if (skeleton->priv->context != NULL)\n')
self.c.write(' g_main_context_unref (skeleton->priv->context);\n')
self.c.write(' g_mutex_free (skeleton->priv->lock);\n')
self.c.write(' G_OBJECT_CLASS (%s_skeleton_parent_class)->finalize (object);\n'
'}\n'
'\n'%(i.name_lower))
# property accessors (TODO: generate PropertiesChanged signals in setter)
if len(i.properties) > 0:
self.c.write('static void\n'
'%s_skeleton_get_property (GObject *object,\n'
' guint prop_id,\n'
' GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'%(i.name_lower))
self.c.write(' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'
' g_assert (prop_id != 0 && prop_id - 1 < %d);\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' g_value_copy (&skeleton->priv->properties->values[prop_id - 1], value);\n'
' g_mutex_unlock (skeleton->priv->lock);\n'
%(i.camel_name, i.ns_upper, i.name_upper, len(i.properties)))
self.c.write('}\n'
'\n')
# if property is already scheduled then re-use entry.. though it could be
# that the user did
#
# foo_set_prop_bar (object, "");
# foo_set_prop_bar (object, "blah");
#
# say, every update... In this case, where nothing happens, we obviously
# don't want a PropertiesChanged() event. We can easily check for this
# by comparing against the _original value_ recorded before the first
# change event. If the latest value is not different from the original
# one, we can simply ignore the ChangedProperty
#
self.c.write('static gboolean\n'
'_%s_emit_changed (gpointer user_data)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (user_data);\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' GList *l;\n'
' GVariantBuilder builder;\n'
' GVariantBuilder invalidated_builder;\n'
' guint num_changes;\n'
'\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' g_variant_builder_init (&builder, G_VARIANT_TYPE ("a{sv}"));\n'
' g_variant_builder_init (&invalidated_builder, G_VARIANT_TYPE ("as"));\n'
' for (l = skeleton->priv->changed_properties, num_changes = 0; l != NULL; l = l->next)\n'
' {\n'
' ChangedProperty *cp = l->data;\n'
' GVariant *variant;\n'
' const GValue *cur_value;\n'
'\n'
' cur_value = &skeleton->priv->properties->values[cp->prop_id - 1];\n'
' if (!_g_value_equal (cur_value, &cp->orig_value))\n'
' {\n'
' variant = g_dbus_gvalue_to_gvariant (cur_value, G_VARIANT_TYPE (cp->info->parent_struct.signature));\n'
' g_variant_builder_add (&builder, "{sv}", cp->info->parent_struct.name, variant);\n'
' g_variant_unref (variant);\n'
' num_changes++;\n'
' }\n'
' }\n'
' if (num_changes > 0)\n'
' {\n'
' g_dbus_connection_emit_signal (g_dbus_interface_skeleton_get_connection (G_DBUS_INTERFACE_SKELETON (skeleton)),\n'
' NULL, g_dbus_interface_skeleton_get_object_path (G_DBUS_INTERFACE_SKELETON (skeleton)),\n'
' "org.freedesktop.DBus.Properties",\n'
' "PropertiesChanged",\n'
' g_variant_new ("(sa{sv}as)",\n'
' "%s",\n'
' &builder, &invalidated_builder),\n'
' NULL);\n'
' }\n'
' else\n'
' {\n'
' g_variant_builder_clear (&builder);\n'
' g_variant_builder_clear (&invalidated_builder);\n'
' }\n'
%(i.name))
self.c.write(' g_list_foreach (skeleton->priv->changed_properties, (GFunc) _changed_property_free, NULL);\n')
self.c.write(' g_list_free (skeleton->priv->changed_properties);\n')
self.c.write(' skeleton->priv->changed_properties = NULL;\n')
self.c.write(' skeleton->priv->changed_properties_idle_source = NULL;\n')
self.c.write(' g_mutex_unlock (skeleton->priv->lock);\n')
self.c.write(' return FALSE;\n'
'}\n'
'\n')
# holding lock while being called
self.c.write('static void\n'
'_%s_schedule_emit_changed (%sSkeleton *skeleton, const _ExtendedGDBusPropertyInfo *info, guint prop_id, const GValue *orig_value)\n'
'{\n'
' ChangedProperty *cp;\n'
' GList *l;\n'
' cp = NULL;\n'
' for (l = skeleton->priv->changed_properties; l != NULL; l = l->next)\n'
' {\n'
' ChangedProperty *i_cp = l->data;\n'
' if (i_cp->info == info)\n'
' {\n'
' cp = i_cp;\n'
' break;\n'
' }\n'
' }\n'
%(i.name_lower, i.camel_name))
self.c.write(' if (cp == NULL)\n'
' {\n'
' cp = g_new0 (ChangedProperty, 1);\n'
' cp->prop_id = prop_id;\n'
' cp->info = info;\n'
' skeleton->priv->changed_properties = g_list_prepend (skeleton->priv->changed_properties, cp);\n'
' g_value_init (&cp->orig_value, G_VALUE_TYPE (orig_value));\n'
' g_value_copy (orig_value, &cp->orig_value);\n'
' }\n'
'}\n'
'\n'
%())
# Postpone setting up the refresh source until the ::notify signal is emitted as
# this allows use of g_object_freeze_notify()/g_object_thaw_notify() ...
# This is useful when updating several properties from another thread than
# where the idle will be emitted from
self.c.write('static void\n'
'%s_skeleton_notify (GObject *object,\n'
' GParamSpec *pspec)\n'
'{\n'
' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' if (skeleton->priv->changed_properties != NULL &&\n'
' skeleton->priv->changed_properties_idle_source == NULL)\n'
' {\n'
' skeleton->priv->changed_properties_idle_source = g_idle_source_new ();\n'
' g_source_set_priority (skeleton->priv->changed_properties_idle_source, G_PRIORITY_DEFAULT);\n'
' g_source_set_callback (skeleton->priv->changed_properties_idle_source, _%s_emit_changed, g_object_ref (skeleton), (GDestroyNotify) g_object_unref);\n'
' g_source_attach (skeleton->priv->changed_properties_idle_source, skeleton->priv->context);\n'
' g_source_unref (skeleton->priv->changed_properties_idle_source);\n'
' }\n'
' g_mutex_unlock (skeleton->priv->lock);\n'
'}\n'
'\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper, i.name_lower))
self.c.write('static void\n'
'%s_skeleton_set_property (GObject *object,\n'
' guint prop_id,\n'
' const GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'%(i.name_lower))
self.c.write(' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'
' g_assert (prop_id != 0 && prop_id - 1 < %d);\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' g_object_freeze_notify (object);\n'
' if (!_g_value_equal (value, &skeleton->priv->properties->values[prop_id - 1]))\n'
' {\n'
' if (g_dbus_interface_skeleton_get_connection (G_DBUS_INTERFACE_SKELETON (skeleton)) != NULL)\n'
' _%s_schedule_emit_changed (skeleton, _%s_property_info_pointers[prop_id - 1], prop_id, &skeleton->priv->properties->values[prop_id - 1]);\n'
' g_value_copy (value, &skeleton->priv->properties->values[prop_id - 1]);\n'
' g_object_notify_by_pspec (object, pspec);\n'
' }\n'
' g_mutex_unlock (skeleton->priv->lock);\n'
' g_object_thaw_notify (object);\n'
%(i.camel_name, i.ns_upper, i.name_upper, len(i.properties), i.name_lower, i.name_lower))
self.c.write('}\n'
'\n')
self.c.write('static void\n'
'%s_skeleton_init (%sSkeleton *skeleton)\n'
'{\n'
' skeleton->priv = G_TYPE_INSTANCE_GET_PRIVATE (skeleton, %sTYPE_%s_SKELETON, %sSkeletonPrivate);\n'
%(i.name_lower, i.camel_name, i.ns_upper, i.name_upper, i.camel_name))
self.c.write(' skeleton->priv->lock = g_mutex_new ();\n')
self.c.write(' skeleton->priv->context = g_main_context_get_thread_default ();\n')
self.c.write(' if (skeleton->priv->context != NULL)\n')
self.c.write(' g_main_context_ref (skeleton->priv->context);\n')
if len(i.properties) > 0:
self.c.write(' skeleton->priv->properties = g_value_array_new (%d);\n'%(len(i.properties)))
n = 0
for p in i.properties:
self.c.write(' g_value_array_append (skeleton->priv->properties, NULL);\n')
self.c.write(' g_value_init (&skeleton->priv->properties->values[%d], %s);\n'%(n, p.arg.gtype))
n += 1
self.c.write('}\n'
'\n')
# property vfuncs
n = 0
for p in i.properties:
self.c.write('static %s\n'
'%s_skeleton_get_%s (%s *object)\n'
'{\n'
%(p.arg.ctype_in, i.name_lower, p.name_lower, i.camel_name))
self.c.write(' %sSkeleton *skeleton = %s%s_SKELETON (object);\n'%(i.camel_name, i.ns_upper, i.name_upper))
self.c.write(' %svalue;\n'
' g_mutex_lock (skeleton->priv->lock);\n'
' value = %s (&(skeleton->priv->properties->values[%d]));\n'
' g_mutex_unlock (skeleton->priv->lock);\n'
%(p.arg.ctype_in_g, p.arg.gvalue_get, n))
self.c.write(' return value;\n')
self.c.write('}\n')
self.c.write('\n')
n += 1
self.c.write('static void\n'
'%s_skeleton_class_init (%sSkeletonClass *klass)\n'
'{\n'
' GObjectClass *gobject_class;\n'
' GDBusInterfaceSkeletonClass *skeleton_class;\n'
'\n'
' g_type_class_add_private (klass, sizeof (%sSkeletonPrivate));\n'
'\n'
' gobject_class = G_OBJECT_CLASS (klass);\n'
' gobject_class->finalize = %s_skeleton_finalize;\n'
%(i.name_lower, i.camel_name, i.camel_name, i.name_lower))
if len(i.properties) > 0:
self.c.write(' gobject_class->get_property = %s_skeleton_get_property;\n'
' gobject_class->set_property = %s_skeleton_set_property;\n'
' gobject_class->notify = %s_skeleton_notify;\n'
'\n'%(i.name_lower, i.name_lower, i.name_lower))
self.c.write('\n'
' %s_override_properties (gobject_class, 1);\n'%(i.name_lower))
self.c.write('\n'
' skeleton_class = G_DBUS_INTERFACE_SKELETON_CLASS (klass);\n');
self.c.write(' skeleton_class->get_info = %s_skeleton_dbus_interface_get_info;\n'%(i.name_lower))
self.c.write(' skeleton_class->get_properties = %s_skeleton_dbus_interface_get_properties;\n'%(i.name_lower))
self.c.write(' skeleton_class->flush = %s_skeleton_dbus_interface_flush;\n'%(i.name_lower))
self.c.write(' skeleton_class->get_vtable = %s_skeleton_dbus_interface_get_vtable;\n'%(i.name_lower))
self.c.write('}\n'
'\n')
self.c.write('static void\n'
'%s_skeleton_iface_init (%sIface *iface)\n'
'{\n'
%(i.name_lower, i.camel_name))
for s in i.signals:
self.c.write(' iface->%s = _%s_on_signal_%s;\n'
%(s.name_lower, i.name_lower, s.name_lower))
for p in i.properties:
self.c.write(' iface->get_%s = %s_skeleton_get_%s;\n'%(p.name_lower, i.name_lower, p.name_lower))
self.c.write('}\n'
'\n')
# constructors
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %s_skeleton_new:\n'
' *\n'
' * Creates a skeleton object for the D-Bus interface #%s.\n'
' *\n'
' * Returns: (transfer full) (type %sSkeleton): The skeleton object.\n'
%(i.name_lower, i.name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write('%s *\n'
'%s_skeleton_new (void)\n'
'{\n'
' return %s%s (g_object_new (%sTYPE_%s_SKELETON, NULL));\n'
'}\n'
'\n'%(i.camel_name, i.name_lower, i.ns_upper, i.name_upper, i.ns_upper, i.name_upper))
# ---------------------------------------------------------------------------------------------------
def generate_object(self):
self.c.write('/* ------------------------------------------------------------------------\n'
' * Code for Object, ObjectProxy and ObjectSkeleton\n'
' * ------------------------------------------------------------------------\n'
' */\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * SECTION:%sObject\n'
' * @title: %sObject\n'
' * @short_description: Specialized GDBusObject types\n'
' *\n'
' * This section contains the #%sObject, #%sObjectProxy, and #%sObjectSkeleton types which make it easier to work with objects implementing generated types for D-Bus interfaces.\n'
' */\n'
%(self.namespace, self.namespace, self.namespace, self.namespace, self.namespace), False))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObject:\n'
' *\n'
' * The #%sObject type is a specialized container of interfaces.\n'
' */\n'
%(self.namespace, self.namespace), False))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectIface:\n'
' * @parent_iface: The parent interface.\n'
' *\n'
' * Virtual table for the #%sObject interface.\n'
' */\n'
%(self.namespace, self.namespace), False))
self.c.write('\n')
self.c.write('static void\n'
'%sobject_default_init (%sObjectIface *iface)\n'
'{\n'
%(self.ns_lower, self.namespace));
for i in self.ifaces:
self.c.write(self.docbook_gen.expand(
' /**\n'
' * %sObject:%s:\n'
' *\n'
' * The #%s instance corresponding to the D-Bus interface #%s, if any.\n'
' *\n'
' * Connect to the #GObject::notify signal to get informed of property changes.\n'
%(self.namespace, i.name_hyphen, i.camel_name, i.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 2)
self.c.write(' g_object_interface_install_property (iface, g_param_spec_object ("%s", "%s", "%s", %sTYPE_%s, G_PARAM_READWRITE|G_PARAM_STATIC_STRINGS));\n'
'\n'
%(i.name_hyphen, i.name_hyphen, i.name_hyphen, self.ns_upper, i.name_upper))
self.c.write('}\n'
'\n')
self.c.write('typedef %sObjectIface %sObjectInterface;\n'%(self.namespace, self.namespace))
self.c.write('G_DEFINE_INTERFACE_WITH_CODE (%sObject, %sobject, G_TYPE_OBJECT, g_type_interface_add_prerequisite (g_define_type_id, G_TYPE_DBUS_OBJECT));\n'%(self.namespace, self.ns_lower))
self.c.write('\n')
for i in self.ifaces:
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_get_%s:\n'
' * @object: A #%sObject.\n'
' *\n'
' * Gets the #%s instance for the D-Bus interface #%s on @object, if any.\n'
' *\n'
' * Returns: (transfer full): A #%s that must be freed with g_object_unref() or %%NULL if @object does not implement the interface.\n'
%(self.ns_lower, i.name_upper.lower(), self.namespace, i.camel_name, i.name, i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write ('%s *%sobject_get_%s (%sObject *object)\n'
%(i.camel_name, self.ns_lower, i.name_upper.lower(), self.namespace))
self.c.write('{\n'
' GDBusInterface *ret;\n'
' ret = g_dbus_object_get_interface (G_DBUS_OBJECT (object), "%s");\n'
' if (ret == NULL)\n'
' return NULL;\n'
' return %s%s (ret);\n'
'}\n'
'\n'
%(i.name, self.ns_upper, i.name_upper))
self.c.write('\n')
for i in self.ifaces:
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_peek_%s: (skip)\n'
' * @object: A #%sObject.\n'
' *\n'
' * Like %sobject_get_%s() but doesn\'t increase the reference count on the returned object.\n'
' *\n'
' * <warning>It is not safe to use the returned object if you are on another thread than the one where the #GDBusObjectManagerClient or #GDBusObjectManagerServer for @object is running.</warning>\n'
' *\n'
' * Returns: (transfer none): A #%s or %%NULL if @object does not implement the interface. Do not free the returned object, it is owned by @object.\n'
%(self.ns_lower, i.name_upper.lower(), self.namespace, self.ns_lower, i.name_upper.lower(), i.camel_name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write ('%s *%sobject_peek_%s (%sObject *object)\n'
%(i.camel_name, self.ns_lower, i.name_upper.lower(), self.namespace))
self.c.write('{\n'
' GDBusInterface *ret;\n'
' ret = g_dbus_object_get_interface (G_DBUS_OBJECT (object), "%s");\n'
' if (ret == NULL)\n'
' return NULL;\n'
' g_object_unref (ret);\n'
' return %s%s (ret);\n'
'}\n'
'\n'
%(i.name, self.ns_upper, i.name_upper))
self.c.write('\n')
# shared by ObjectProxy and ObjectSkeleton classes
self.c.write('static void\n'
'%sobject_notify (GDBusObject *object, GDBusInterface *interface)\n'
'{\n'
' g_object_notify (G_OBJECT (object), ((_ExtendedGDBusInterfaceInfo *) g_dbus_interface_get_info (interface))->hyphen_name);\n'
'}\n'
'\n'
%(self.ns_lower))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectProxy:\n'
' *\n'
' * The #%sObjectProxy structure contains only private data and should only be accessed using the provided API.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectProxyClass:\n'
' * @parent_class: The parent class.\n'
' *\n'
' * Class structure for #%sObjectProxy.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
# class boilerplate
self.c.write('static void\n'
'%sobject_proxy__%sobject_iface_init (%sObjectIface *iface)\n'
'{\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_lower, self.namespace))
self.c.write('static void\n'
'%sobject_proxy__g_dbus_object_iface_init (GDBusObjectIface *iface)\n'
'{\n'
' iface->interface_added = %sobject_notify;\n'
' iface->interface_removed = %sobject_notify;\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_lower, self.ns_lower))
self.c.write('\n')
self.c.write('G_DEFINE_TYPE_WITH_CODE (%sObjectProxy, %sobject_proxy, G_TYPE_DBUS_OBJECT_PROXY,\n'
' G_IMPLEMENT_INTERFACE (%sTYPE_OBJECT, %sobject_proxy__%sobject_iface_init)\n'
' G_IMPLEMENT_INTERFACE (G_TYPE_DBUS_OBJECT, %sobject_proxy__g_dbus_object_iface_init));\n'
'\n'
%(self.namespace, self.ns_lower, self.ns_upper, self.ns_lower, self.ns_lower, self.ns_lower))
# class boilerplate
self.c.write('static void\n'
'%sobject_proxy_init (%sObjectProxy *object)\n'
'{\n'
'}\n'
'\n'%(self.ns_lower, self.namespace))
self.c.write('static void\n'
'%sobject_proxy_set_property (GObject *gobject,\n'
' guint prop_id,\n'
' const GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'
' G_OBJECT_WARN_INVALID_PROPERTY_ID (gobject, prop_id, pspec);\n'
%(self.ns_lower))
self.c.write('}\n'
'\n'%())
self.c.write('static void\n'
'%sobject_proxy_get_property (GObject *gobject,\n'
' guint prop_id,\n'
' GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'
' %sObjectProxy *object = %sOBJECT_PROXY (gobject);\n'
' GDBusInterface *interface;\n'
'\n'
' switch (prop_id)\n'
' {\n'
%(self.ns_lower, self.namespace, self.ns_upper))
n = 1
for i in self.ifaces:
self.c.write(' case %d:\n'
' interface = g_dbus_object_get_interface (G_DBUS_OBJECT (object), "%s");\n'
' g_value_take_object (value, interface);\n'
' break;\n'
'\n'
%(n, i.name))
n += 1
self.c.write(' default:\n'
' G_OBJECT_WARN_INVALID_PROPERTY_ID (gobject, prop_id, pspec);\n'
' break;\n'
' }\n'
'}\n'
'\n'%())
self.c.write('static void\n'
'%sobject_proxy_class_init (%sObjectProxyClass *klass)\n'
'{\n'
' GObjectClass *gobject_class = G_OBJECT_CLASS (klass);\n'
'\n'
' gobject_class->set_property = %sobject_proxy_set_property;\n'
' gobject_class->get_property = %sobject_proxy_get_property;\n'
'\n'
%(self.ns_lower, self.namespace, self.ns_lower, self.ns_lower))
n = 1
for i in self.ifaces:
self.c.write(' g_object_class_override_property (gobject_class, %d, "%s");'
'\n'
%(n, i.name_hyphen))
n += 1
self.c.write('}\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_proxy_new:\n'
' * @connection: A #GDBusConnection.\n'
' * @object_path: An object path.\n'
' *\n'
' * Creates a new proxy object.\n'
' *\n'
' * Returns: (transfer full): The proxy object.\n'
' */\n'
%(self.ns_lower), False))
self.c.write('%sObjectProxy *\n'
'%sobject_proxy_new (GDBusConnection *connection,\n'
' const gchar *object_path)\n'
'{\n'
' g_return_val_if_fail (G_IS_DBUS_CONNECTION (connection), NULL);\n'
' g_return_val_if_fail (g_variant_is_object_path (object_path), NULL);\n'
' return %sOBJECT_PROXY (g_object_new (%sTYPE_OBJECT_PROXY, "g-connection", connection, "g-object-path", object_path, NULL));\n'
'}\n'
'\n'%(self.namespace, self.ns_lower, self.ns_upper, self.ns_upper))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectSkeleton:\n'
' *\n'
' * The #%sObjectSkeleton structure contains only private data and should only be accessed using the provided API.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectSkeletonClass:\n'
' * @parent_class: The parent class.\n'
' *\n'
' * Class structure for #%sObjectSkeleton.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
# class boilerplate
self.c.write('static void\n'
'%sobject_skeleton__%sobject_iface_init (%sObjectIface *iface)\n'
'{\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_lower, self.namespace))
self.c.write('\n')
self.c.write('static void\n'
'%sobject_skeleton__g_dbus_object_iface_init (GDBusObjectIface *iface)\n'
'{\n'
' iface->interface_added = %sobject_notify;\n'
' iface->interface_removed = %sobject_notify;\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_lower, self.ns_lower))
self.c.write('G_DEFINE_TYPE_WITH_CODE (%sObjectSkeleton, %sobject_skeleton, G_TYPE_DBUS_OBJECT_SKELETON,\n'
' G_IMPLEMENT_INTERFACE (%sTYPE_OBJECT, %sobject_skeleton__%sobject_iface_init)\n'
' G_IMPLEMENT_INTERFACE (G_TYPE_DBUS_OBJECT, %sobject_skeleton__g_dbus_object_iface_init));\n'
'\n'
%(self.namespace, self.ns_lower, self.ns_upper, self.ns_lower, self.ns_lower, self.ns_lower))
# class boilerplate
self.c.write('static void\n'
'%sobject_skeleton_init (%sObjectSkeleton *object)\n'
'{\n'
'}\n'
'\n'%(self.ns_lower, self.namespace))
self.c.write('static void\n'
'%sobject_skeleton_set_property (GObject *gobject,\n'
' guint prop_id,\n'
' const GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'
' %sObjectSkeleton *object = %sOBJECT_SKELETON (gobject);\n'
' GDBusInterfaceSkeleton *interface;\n'
'\n'
' switch (prop_id)\n'
' {\n'
%(self.ns_lower, self.namespace, self.ns_upper))
n = 1
for i in self.ifaces:
self.c.write(' case %d:\n'
' interface = g_value_get_object (value);\n'
' if (interface != NULL)\n'
' {\n'
' g_warn_if_fail (%sIS_%s (interface));\n'
' g_dbus_object_skeleton_add_interface (G_DBUS_OBJECT_SKELETON (object), interface);\n'
' }\n'
' else\n'
' {\n'
' g_dbus_object_skeleton_remove_interface_by_name (G_DBUS_OBJECT_SKELETON (object), "%s");\n'
' }\n'
' break;\n'
'\n'
%(n, self.ns_upper, i.name_upper, i.name))
n += 1
self.c.write(' default:\n'
' G_OBJECT_WARN_INVALID_PROPERTY_ID (gobject, prop_id, pspec);\n'
' break;\n'
' }\n'
'}\n'
'\n'%())
self.c.write('static void\n'
'%sobject_skeleton_get_property (GObject *gobject,\n'
' guint prop_id,\n'
' GValue *value,\n'
' GParamSpec *pspec)\n'
'{\n'
' %sObjectSkeleton *object = %sOBJECT_SKELETON (gobject);\n'
' GDBusInterface *interface;\n'
'\n'
' switch (prop_id)\n'
' {\n'
%(self.ns_lower, self.namespace, self.ns_upper))
n = 1
for i in self.ifaces:
self.c.write(' case %d:\n'
' interface = g_dbus_object_get_interface (G_DBUS_OBJECT (object), "%s");\n'
' g_value_take_object (value, interface);\n'
' break;\n'
'\n'
%(n, i.name))
n += 1
self.c.write(' default:\n'
' G_OBJECT_WARN_INVALID_PROPERTY_ID (gobject, prop_id, pspec);\n'
' break;\n'
' }\n'
'}\n'
'\n'%())
self.c.write('static void\n'
'%sobject_skeleton_class_init (%sObjectSkeletonClass *klass)\n'
'{\n'
' GObjectClass *gobject_class = G_OBJECT_CLASS (klass);\n'
'\n'
' gobject_class->set_property = %sobject_skeleton_set_property;\n'
' gobject_class->get_property = %sobject_skeleton_get_property;\n'
'\n'
%(self.ns_lower, self.namespace, self.ns_lower, self.ns_lower))
n = 1
for i in self.ifaces:
self.c.write(' g_object_class_override_property (gobject_class, %d, "%s");'
'\n'
%(n, i.name_hyphen))
n += 1
self.c.write('}\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_skeleton_new:\n'
' * @object_path: An object path.\n'
' *\n'
' * Creates a new skeleton object.\n'
' *\n'
' * Returns: (transfer full): The skeleton object.\n'
' */\n'
%(self.ns_lower), False))
self.c.write('%sObjectSkeleton *\n'
'%sobject_skeleton_new (const gchar *object_path)\n'
'{\n'
' g_return_val_if_fail (g_variant_is_object_path (object_path), NULL);\n'
' return %sOBJECT_SKELETON (g_object_new (%sTYPE_OBJECT_SKELETON, "g-object-path", object_path, NULL));\n'
'}\n'
'\n'%(self.namespace, self.ns_lower, self.ns_upper, self.ns_upper))
for i in self.ifaces:
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_skeleton_set_%s:\n'
' * @object: A #%sObjectSkeleton.\n'
' * @interface_: (allow-none): A #%s or %%NULL to clear the interface.\n'
' *\n'
' * Sets the #%s instance for the D-Bus interface #%s on @object.\n'
%(self.ns_lower, i.name_upper.lower(), self.namespace, i.camel_name, i.camel_name, i.name), False))
self.write_gtkdoc_deprecated_and_since_and_close(i, self.c, 0)
self.c.write ('void %sobject_skeleton_set_%s (%sObjectSkeleton *object, %s *interface_)\n'
%(self.ns_lower, i.name_upper.lower(), self.namespace, i.camel_name))
self.c.write('{\n'
' g_object_set (G_OBJECT (object), "%s", interface_, NULL);\n'
'}\n'
'\n'
%(i.name_hyphen))
self.c.write('\n')
def generate_object_manager_client(self):
self.c.write('/* ------------------------------------------------------------------------\n'
' * Code for ObjectManager client\n'
' * ------------------------------------------------------------------------\n'
' */\n'
'\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * SECTION:%sObjectManagerClient\n'
' * @title: %sObjectManagerClient\n'
' * @short_description: Generated GDBusObjectManagerClient type\n'
' *\n'
' * This section contains a #GDBusObjectManagerClient that uses %sobject_manager_client_get_proxy_type() as the #GDBusProxyTypeFunc.\n'
' */\n'
%(self.namespace, self.namespace, self.ns_lower), False))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectManagerClient:\n'
' *\n'
' * The #%sObjectManagerClient structure contains only private data and should only be accessed using the provided API.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sObjectManagerClientClass:\n'
' * @parent_class: The parent class.\n'
' *\n'
' * Class structure for #%sObjectManagerClient.\n'
%(self.namespace, self.namespace), False))
self.c.write(' */\n')
self.c.write('\n')
# class boilerplate
self.c.write('G_DEFINE_TYPE (%sObjectManagerClient, %sobject_manager_client, G_TYPE_DBUS_OBJECT_MANAGER_CLIENT);\n'
'\n'
%(self.namespace, self.ns_lower))
# class boilerplate
self.c.write('static void\n'
'%sobject_manager_client_init (%sObjectManagerClient *manager)\n'
'{\n'
'}\n'
'\n'%(self.ns_lower, self.namespace))
self.c.write('static void\n'
'%sobject_manager_client_class_init (%sObjectManagerClientClass *klass)\n'
'{\n'
'}\n'
'\n'%(self.ns_lower, self.namespace))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_manager_client_get_proxy_type:\n'
' * @manager: A #GDBusObjectManagerClient.\n'
' * @object_path: The object path of the remote object (unused).\n'
' * @interface_name: (allow-none): Interface name of the remote object or %%NULL to get the object proxy #GType.\n'
' * @user_data: User data (unused).\n'
' *\n'
' * A #GDBusProxyTypeFunc that maps @interface_name to the generated #GDBusObjectProxy<!-- -->- and #GDBusProxy<!-- -->-derived types.\n'
' *\n'
' * Returns: A #GDBusProxy<!-- -->-derived #GType if @interface_name is not %%NULL, otherwise the #GType for #%sObjectProxy.\n'
%(self.ns_lower, self.namespace), False))
self.c.write(' */\n')
self.c.write('GType\n'
'%sobject_manager_client_get_proxy_type (GDBusObjectManagerClient *manager, const gchar *object_path, const gchar *interface_name, gpointer user_data)\n'
'{\n'
%(self.ns_lower))
self.c.write(' static gsize once_init_value = 0;\n'
' static GHashTable *lookup_hash;\n'
' GType ret;\n'
'\n'
' if (interface_name == NULL)\n'
' return %sTYPE_OBJECT_PROXY;\n'
' if (g_once_init_enter (&once_init_value))\n'
' {\n'
' lookup_hash = g_hash_table_new (g_str_hash, g_str_equal);\n'
%(self.ns_upper))
for i in self.ifaces:
self.c.write(' g_hash_table_insert (lookup_hash, "%s", GSIZE_TO_POINTER (%sTYPE_%s_PROXY));\n'
%(i.name, i.ns_upper, i.name_upper))
self.c.write(' g_once_init_leave (&once_init_value, 1);\n'
' }\n')
self.c.write(' ret = (GType) GPOINTER_TO_SIZE (g_hash_table_lookup (lookup_hash, interface_name));\n'
' if (ret == (GType) 0)\n'
' ret = G_TYPE_DBUS_PROXY;\n')
self.c.write(' return ret;\n'
'}\n'
'\n')
# constructors
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_manager_client_new:\n'
' * @connection: A #GDBusConnection.\n'
' * @flags: Flags from the #GDBusObjectManagerClientFlags enumeration.\n'
' * @name: (allow-none): A bus name (well-known or unique) or %%NULL if @connection is not a message bus connection.\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @callback: A #GAsyncReadyCallback to call when the request is satisfied.\n'
' * @user_data: User data to pass to @callback.\n'
' *\n'
' * Asynchronously creates #GDBusObjectManagerClient using %sobject_manager_client_get_proxy_type() as the #GDBusProxyTypeFunc. See g_dbus_object_manager_client_new() for more details.\n'
' *\n'
' * When the operation is finished, @callback will be invoked in the <link linkend="g-main-context-push-thread-default">thread-default main loop</link> of the thread you are calling this method from.\n'
' * You can then call %sobject_manager_client_new_finish() to get the result of the operation.\n'
' *\n'
' * See %sobject_manager_client_new_sync() for the synchronous, blocking version of this constructor.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.ns_lower), False))
self.c.write(' */\n')
self.c.write('void\n'
'%sobject_manager_client_new (\n'
' GDBusConnection *connection,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data)\n'
'{\n'
' g_async_initable_new_async (%sTYPE_OBJECT_MANAGER_CLIENT, G_PRIORITY_DEFAULT, cancellable, callback, user_data, "flags", flags, "name", name, "connection", connection, "object-path", object_path, "get-proxy-type-func", %sobject_manager_client_get_proxy_type, NULL);\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_upper, self.ns_lower))
self.c.write('/**\n'
' * %sobject_manager_client_new_finish:\n'
' * @res: The #GAsyncResult obtained from the #GAsyncReadyCallback passed to %sobject_manager_client_new().\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Finishes an operation started with %sobject_manager_client_new().\n'
' *\n'
' * Returns: (transfer full) (type %sObjectManagerClient): The constructed object manager client or %%NULL if @error is set.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.namespace))
self.c.write(' */\n')
self.c.write('GDBusObjectManager *\n'
'%sobject_manager_client_new_finish (\n'
' GAsyncResult *res,\n'
' GError **error)\n'
'{\n'
' GObject *ret;\n'
' GObject *source_object;\n'
' source_object = g_async_result_get_source_object (res);\n'
' ret = g_async_initable_new_finish (G_ASYNC_INITABLE (source_object), res, error);\n'
' g_object_unref (source_object);\n'
' if (ret != NULL)\n'
' return G_DBUS_OBJECT_MANAGER (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(self.ns_lower))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_manager_client_new_sync:\n'
' * @connection: A #GDBusConnection.\n'
' * @flags: Flags from the #GDBusObjectManagerClientFlags enumeration.\n'
' * @name: (allow-none): A bus name (well-known or unique) or %%NULL if @connection is not a message bus connection.\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Synchronously creates #GDBusObjectManagerClient using %sobject_manager_client_get_proxy_type() as the #GDBusProxyTypeFunc. See g_dbus_object_manager_client_new_sync() for more details.\n'
' *\n'
' * The calling thread is blocked until a reply is received.\n'
' *\n'
' * See %sobject_manager_client_new() for the asynchronous version of this constructor.\n'
' *\n'
' * Returns: (transfer full) (type %sObjectManagerClient): The constructed object manager client or %%NULL if @error is set.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.namespace), False))
self.c.write(' */\n')
self.c.write('GDBusObjectManager *\n'
'%sobject_manager_client_new_sync (\n'
' GDBusConnection *connection,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error)\n'
'{\n'
' GInitable *ret;\n'
' ret = g_initable_new (%sTYPE_OBJECT_MANAGER_CLIENT, cancellable, error, "flags", flags, "name", name, "connection", connection, "object-path", object_path, "get-proxy-type-func", %sobject_manager_client_get_proxy_type, NULL);\n'
' if (ret != NULL)\n'
' return G_DBUS_OBJECT_MANAGER (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_upper, self.ns_lower))
self.c.write('\n')
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_manager_client_new_for_bus:\n'
' * @bus_type: A #GBusType.\n'
' * @flags: Flags from the #GDBusObjectManagerClientFlags enumeration.\n'
' * @name: A bus name (well-known or unique).\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @callback: A #GAsyncReadyCallback to call when the request is satisfied.\n'
' * @user_data: User data to pass to @callback.\n'
' *\n'
' * Like %sobject_manager_client_new() but takes a #GBusType instead of a #GDBusConnection.\n'
' *\n'
' * When the operation is finished, @callback will be invoked in the <link linkend="g-main-context-push-thread-default">thread-default main loop</link> of the thread you are calling this method from.\n'
' * You can then call %sobject_manager_client_new_for_bus_finish() to get the result of the operation.\n'
' *\n'
' * See %sobject_manager_client_new_for_bus_sync() for the synchronous, blocking version of this constructor.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.ns_lower), False))
self.c.write(' */\n')
self.c.write('void\n'
'%sobject_manager_client_new_for_bus (\n'
' GBusType bus_type,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GAsyncReadyCallback callback,\n'
' gpointer user_data)\n'
'{\n'
' g_async_initable_new_async (%sTYPE_OBJECT_MANAGER_CLIENT, G_PRIORITY_DEFAULT, cancellable, callback, user_data, "flags", flags, "name", name, "bus-type", bus_type, "object-path", object_path, "get-proxy-type-func", %sobject_manager_client_get_proxy_type, NULL);\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_upper, self.ns_lower))
self.c.write('/**\n'
' * %sobject_manager_client_new_for_bus_finish:\n'
' * @res: The #GAsyncResult obtained from the #GAsyncReadyCallback passed to %sobject_manager_client_new_for_bus().\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Finishes an operation started with %sobject_manager_client_new_for_bus().\n'
' *\n'
' * Returns: (transfer full) (type %sObjectManagerClient): The constructed object manager client or %%NULL if @error is set.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.namespace))
self.c.write(' */\n')
self.c.write('GDBusObjectManager *\n'
'%sobject_manager_client_new_for_bus_finish (\n'
' GAsyncResult *res,\n'
' GError **error)\n'
'{\n'
' GObject *ret;\n'
' GObject *source_object;\n'
' source_object = g_async_result_get_source_object (res);\n'
' ret = g_async_initable_new_finish (G_ASYNC_INITABLE (source_object), res, error);\n'
' g_object_unref (source_object);\n'
' if (ret != NULL)\n'
' return G_DBUS_OBJECT_MANAGER (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(self.ns_lower))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * %sobject_manager_client_new_for_bus_sync:\n'
' * @bus_type: A #GBusType.\n'
' * @flags: Flags from the #GDBusObjectManagerClientFlags enumeration.\n'
' * @name: A bus name (well-known or unique).\n'
' * @object_path: An object path.\n'
' * @cancellable: (allow-none): A #GCancellable or %%NULL.\n'
' * @error: Return location for error or %%NULL\n'
' *\n'
' * Like %sobject_manager_client_new_sync() but takes a #GBusType instead of a #GDBusConnection.\n'
' *\n'
' * The calling thread is blocked until a reply is received.\n'
' *\n'
' * See %sobject_manager_client_new_for_bus() for the asynchronous version of this constructor.\n'
' *\n'
' * Returns: (transfer full) (type %sObjectManagerClient): The constructed object manager client or %%NULL if @error is set.\n'
%(self.ns_lower, self.ns_lower, self.ns_lower, self.namespace), False))
self.c.write(' */\n')
self.c.write('GDBusObjectManager *\n'
'%sobject_manager_client_new_for_bus_sync (\n'
' GBusType bus_type,\n'
' GDBusObjectManagerClientFlags flags,\n'
' const gchar *name,\n'
' const gchar *object_path,\n'
' GCancellable *cancellable,\n'
' GError **error)\n'
'{\n'
' GInitable *ret;\n'
' ret = g_initable_new (%sTYPE_OBJECT_MANAGER_CLIENT, cancellable, error, "flags", flags, "name", name, "bus-type", bus_type, "object-path", object_path, "get-proxy-type-func", %sobject_manager_client_get_proxy_type, NULL);\n'
' if (ret != NULL)\n'
' return G_DBUS_OBJECT_MANAGER (ret);\n'
' else\n'
' return NULL;\n'
'}\n'
'\n'
%(self.ns_lower, self.ns_upper, self.ns_lower))
self.c.write('\n')
# ---------------------------------------------------------------------------------------------------
def write_gtkdoc_deprecated_and_since_and_close(self, obj, f, indent):
if len(obj.since) > 0:
f.write('%*s *\n'
'%*s * Since: %s\n'
%(indent, '', indent, '', obj.since))
if obj.deprecated:
if isinstance(obj, dbustypes.Interface):
thing = 'The D-Bus interface'
elif isinstance(obj, dbustypes.Method):
thing = 'The D-Bus method'
elif isinstance(obj, dbustypes.Signal):
thing = 'The D-Bus signal'
elif isinstance(obj, dbustypes.Property):
thing = 'The D-Bus property'
else:
raise RuntimeError('Cannot handle object ', obj)
f.write(self.docbook_gen.expand(
'%*s *\n'
'%*s * Deprecated: %s has been deprecated.\n'
%(indent, '', indent, '', thing), False))
f.write('%*s */\n'%(indent, ''))
# ---------------------------------------------------------------------------------------------------
def generate_interface_intro(self, i):
self.c.write('/* ------------------------------------------------------------------------\n'
' * Code for interface %s\n'
' * ------------------------------------------------------------------------\n'
' */\n'
'\n'%(i.name))
self.c.write(self.docbook_gen.expand(
'/**\n'
' * SECTION:%s\n'
' * @title: %s\n'
' * @short_description: Generated C code for the %s D-Bus interface\n'
' *\n'
' * This section contains code for working with the #%s D-Bus interface in C.\n'
' */\n'
%(i.camel_name, i.camel_name, i.name, i.name), False))
self.c.write('\n')
def generate(self):
self.generate_intro()
self.declare_types()
for i in self.ifaces:
self.generate_interface_intro(i)
self.generate_introspection_for_interface(i)
self.generate_interface(i)
self.generate_property_accessors(i)
self.generate_signal_emitters(i)
self.generate_method_calls(i)
self.generate_method_completers(i)
self.generate_proxy(i)
self.generate_skeleton(i)
if self.generate_objmanager:
self.generate_object()
self.generate_object_manager_client()
self.generate_outro()
|
helldorado/ansible | refs/heads/devel | lib/ansible/module_utils/gcp.py | 77 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Franck Cuny <[email protected]>, 2014
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import json
import os
import time
import traceback
from distutils.version import LooseVersion
# libcloud
try:
import libcloud
HAS_LIBCLOUD_BASE = True
except ImportError:
HAS_LIBCLOUD_BASE = False
# google-auth
try:
import google.auth
from google.oauth2 import service_account
HAS_GOOGLE_AUTH = True
except ImportError:
HAS_GOOGLE_AUTH = False
# google-python-api
try:
import google_auth_httplib2
from httplib2 import Http
from googleapiclient.http import set_user_agent
from googleapiclient.errors import HttpError
from apiclient.discovery import build
HAS_GOOGLE_API_LIB = True
except ImportError:
HAS_GOOGLE_API_LIB = False
import ansible.module_utils.six.moves.urllib.parse as urlparse
GCP_DEFAULT_SCOPES = ['https://www.googleapis.com/auth/cloud-platform']
def _get_gcp_ansible_credentials(module):
"""Helper to fetch creds from AnsibleModule object."""
service_account_email = module.params.get('service_account_email', None)
# Note: pem_file is discouraged and will be deprecated
credentials_file = module.params.get('pem_file', None) or module.params.get(
'credentials_file', None)
project_id = module.params.get('project_id', None)
return (service_account_email, credentials_file, project_id)
def _get_gcp_environ_var(var_name, default_value):
"""Wrapper around os.environ.get call."""
return os.environ.get(
var_name, default_value)
def _get_gcp_environment_credentials(service_account_email, credentials_file, project_id):
"""Helper to look in environment variables for credentials."""
# If any of the values are not given as parameters, check the appropriate
# environment variables.
if not service_account_email:
service_account_email = _get_gcp_environ_var('GCE_EMAIL', None)
if not credentials_file:
credentials_file = _get_gcp_environ_var(
'GCE_CREDENTIALS_FILE_PATH', None) or _get_gcp_environ_var(
'GOOGLE_APPLICATION_CREDENTIALS', None) or _get_gcp_environ_var(
'GCE_PEM_FILE_PATH', None)
if not project_id:
project_id = _get_gcp_environ_var('GCE_PROJECT', None) or _get_gcp_environ_var(
'GOOGLE_CLOUD_PROJECT', None)
return (service_account_email, credentials_file, project_id)
def _get_gcp_credentials(module, require_valid_json=True, check_libcloud=False):
"""
Obtain GCP credentials by trying various methods.
There are 3 ways to specify GCP credentials:
1. Specify via Ansible module parameters (recommended).
2. Specify via environment variables. Two sets of env vars are available:
a) GOOGLE_CLOUD_PROJECT, GOOGLE_CREDENTIALS_APPLICATION (preferred)
b) GCE_PROJECT, GCE_CREDENTIAL_FILE_PATH, GCE_EMAIL (legacy, not recommended; req'd if
using p12 key)
3. Specify via libcloud secrets.py file (deprecated).
There are 3 helper functions to assist in the above.
Regardless of method, the user also has the option of specifying a JSON
file or a p12 file as the credentials file. JSON is strongly recommended and
p12 will be removed in the future.
Additionally, flags may be set to require valid json and check the libcloud
version.
AnsibleModule.fail_json is called only if the project_id cannot be found.
:param module: initialized Ansible module object
:type module: `class AnsibleModule`
:param require_valid_json: If true, require credentials to be valid JSON. Default is True.
:type require_valid_json: ``bool``
:params check_libcloud: If true, check the libcloud version available to see if
JSON creds are supported.
:type check_libcloud: ``bool``
:return: {'service_account_email': service_account_email,
'credentials_file': credentials_file,
'project_id': project_id}
:rtype: ``dict``
"""
(service_account_email,
credentials_file,
project_id) = _get_gcp_ansible_credentials(module)
# If any of the values are not given as parameters, check the appropriate
# environment variables.
(service_account_email,
credentials_file,
project_id) = _get_gcp_environment_credentials(service_account_email,
credentials_file, project_id)
if credentials_file is None or project_id is None or service_account_email is None:
if check_libcloud is True:
if project_id is None:
# TODO(supertom): this message is legacy and integration tests
# depend on it.
module.fail_json(msg='Missing GCE connection parameters in libcloud '
'secrets file.')
else:
if project_id is None:
module.fail_json(msg=('GCP connection error: unable to determine project (%s) or '
'credentials file (%s)' % (project_id, credentials_file)))
# Set these fields to empty strings if they are None
# consumers of this will make the distinction between an empty string
# and None.
if credentials_file is None:
credentials_file = ''
if service_account_email is None:
service_account_email = ''
# ensure the credentials file is found and is in the proper format.
if credentials_file:
_validate_credentials_file(module, credentials_file,
require_valid_json=require_valid_json,
check_libcloud=check_libcloud)
return {'service_account_email': service_account_email,
'credentials_file': credentials_file,
'project_id': project_id}
def _validate_credentials_file(module, credentials_file, require_valid_json=True, check_libcloud=False):
"""
Check for valid credentials file.
Optionally check for JSON format and if libcloud supports JSON.
:param module: initialized Ansible module object
:type module: `class AnsibleModule`
:param credentials_file: path to file on disk
:type credentials_file: ``str``. Complete path to file on disk.
:param require_valid_json: This argument is ignored as of Ansible 2.7.
:type require_valid_json: ``bool``
:params check_libcloud: If true, check the libcloud version available to see if
JSON creds are supported.
:type check_libcloud: ``bool``
:returns: True
:rtype: ``bool``
"""
try:
# Try to read credentials as JSON
with open(credentials_file) as credentials:
json.loads(credentials.read())
# If the credentials are proper JSON and we do not have the minimum
# required libcloud version, bail out and return a descriptive
# error
if check_libcloud and LooseVersion(libcloud.__version__) < '0.17.0':
module.fail_json(msg='Using JSON credentials but libcloud minimum version not met. '
'Upgrade to libcloud>=0.17.0.')
return True
except IOError as e:
module.fail_json(msg='GCP Credentials File %s not found.' %
credentials_file, changed=False)
return False
except ValueError as e:
module.fail_json(
msg='Non-JSON credentials file provided. Please generate a new JSON key from the Google Cloud console',
changed=False)
def gcp_connect(module, provider, get_driver, user_agent_product, user_agent_version):
"""Return a Google libcloud driver connection."""
if not HAS_LIBCLOUD_BASE:
module.fail_json(msg='libcloud must be installed to use this module')
creds = _get_gcp_credentials(module,
require_valid_json=False,
check_libcloud=True)
try:
gcp = get_driver(provider)(creds['service_account_email'], creds['credentials_file'],
datacenter=module.params.get('zone', None),
project=creds['project_id'])
gcp.connection.user_agent_append("%s/%s" % (
user_agent_product, user_agent_version))
except (RuntimeError, ValueError) as e:
module.fail_json(msg=str(e), changed=False)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
return gcp
def get_google_cloud_credentials(module, scopes=None):
"""
Get credentials object for use with Google Cloud client.
Attempts to obtain credentials by calling _get_gcp_credentials. If those are
not present will attempt to connect via Application Default Credentials.
To connect via libcloud, don't use this function, use gcp_connect instead. For
Google Python API Client, see get_google_api_auth for how to connect.
For more information on Google's client library options for Python, see:
U(https://cloud.google.com/apis/docs/client-libraries-explained#google_api_client_libraries)
Google Cloud example:
creds, params = get_google_cloud_credentials(module, scopes, user_agent_product, user_agent_version)
pubsub_client = pubsub.Client(project=params['project_id'], credentials=creds)
pubsub_client.user_agent = 'ansible-pubsub-0.1'
...
:param module: initialized Ansible module object
:type module: `class AnsibleModule`
:param scopes: list of scopes
:type module: ``list`` of URIs
:returns: A tuple containing (google authorized) credentials object and
params dict {'service_account_email': '...', 'credentials_file': '...', 'project_id': ...}
:rtype: ``tuple``
"""
scopes = [] if scopes is None else scopes
if not HAS_GOOGLE_AUTH:
module.fail_json(msg='Please install google-auth.')
conn_params = _get_gcp_credentials(module,
require_valid_json=True,
check_libcloud=False)
try:
if conn_params['credentials_file']:
credentials = service_account.Credentials.from_service_account_file(
conn_params['credentials_file'])
if scopes:
credentials = credentials.with_scopes(scopes)
else:
(credentials, project_id) = google.auth.default(
scopes=scopes)
if project_id is not None:
conn_params['project_id'] = project_id
return (credentials, conn_params)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
return (None, None)
def get_google_api_auth(module, scopes=None, user_agent_product='ansible-python-api', user_agent_version='NA'):
"""
Authentication for use with google-python-api-client.
Function calls get_google_cloud_credentials, which attempts to assemble the credentials
from various locations. Next it attempts to authenticate with Google.
This function returns an httplib2 (compatible) object that can be provided to the Google Python API client.
For libcloud, don't use this function, use gcp_connect instead. For Google Cloud, See
get_google_cloud_credentials for how to connect.
For more information on Google's client library options for Python, see:
U(https://cloud.google.com/apis/docs/client-libraries-explained#google_api_client_libraries)
Google API example:
http_auth, conn_params = get_google_api_auth(module, scopes, user_agent_product, user_agent_version)
service = build('myservice', 'v1', http=http_auth)
...
:param module: initialized Ansible module object
:type module: `class AnsibleModule`
:param scopes: list of scopes
:type scopes: ``list`` of URIs
:param user_agent_product: User agent product. eg: 'ansible-python-api'
:type user_agent_product: ``str``
:param user_agent_version: Version string to append to product. eg: 'NA' or '0.1'
:type user_agent_version: ``str``
:returns: A tuple containing (google authorized) httplib2 request object and a
params dict {'service_account_email': '...', 'credentials_file': '...', 'project_id': ...}
:rtype: ``tuple``
"""
scopes = [] if scopes is None else scopes
if not HAS_GOOGLE_API_LIB:
module.fail_json(msg="Please install google-api-python-client library")
if not scopes:
scopes = GCP_DEFAULT_SCOPES
try:
(credentials, conn_params) = get_google_cloud_credentials(module, scopes)
http = set_user_agent(Http(), '%s-%s' %
(user_agent_product, user_agent_version))
http_auth = google_auth_httplib2.AuthorizedHttp(credentials, http=http)
return (http_auth, conn_params)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
return (None, None)
def get_google_api_client(module, service, user_agent_product, user_agent_version,
scopes=None, api_version='v1'):
"""
Get the discovery-based python client. Use when a cloud client is not available.
client = get_google_api_client(module, 'compute', user_agent_product=USER_AGENT_PRODUCT,
user_agent_version=USER_AGENT_VERSION)
:returns: A tuple containing the authorized client to the specified service and a
params dict {'service_account_email': '...', 'credentials_file': '...', 'project_id': ...}
:rtype: ``tuple``
"""
if not scopes:
scopes = GCP_DEFAULT_SCOPES
http_auth, conn_params = get_google_api_auth(module, scopes=scopes,
user_agent_product=user_agent_product,
user_agent_version=user_agent_version)
client = build(service, api_version, http=http_auth)
return (client, conn_params)
def check_min_pkg_version(pkg_name, minimum_version):
"""Minimum required version is >= installed version."""
from pkg_resources import get_distribution
try:
installed_version = get_distribution(pkg_name).version
return LooseVersion(installed_version) >= minimum_version
except Exception as e:
return False
def unexpected_error_msg(error):
"""Create an error string based on passed in error."""
return 'Unexpected response: (%s). Detail: %s' % (str(error), traceback.format_exc())
def get_valid_location(module, driver, location, location_type='zone'):
if location_type == 'zone':
l = driver.ex_get_zone(location)
else:
l = driver.ex_get_region(location)
if l is None:
link = 'https://cloud.google.com/compute/docs/regions-zones/regions-zones#available'
module.fail_json(msg=('%s %s is invalid. Please see the list of '
'available %s at %s' % (
location_type, location, location_type, link)),
changed=False)
return l
def check_params(params, field_list):
"""
Helper to validate params.
Use this in function definitions if they require specific fields
to be present.
:param params: structure that contains the fields
:type params: ``dict``
:param field_list: list of dict representing the fields
[{'name': str, 'required': True/False', 'type': cls}]
:type field_list: ``list`` of ``dict``
:return True or raises ValueError
:rtype: ``bool`` or `class:ValueError`
"""
for d in field_list:
if not d['name'] in params:
if 'required' in d and d['required'] is True:
raise ValueError(("%s is required and must be of type: %s" %
(d['name'], str(d['type']))))
else:
if not isinstance(params[d['name']], d['type']):
raise ValueError(("%s must be of type: %s. %s (%s) provided." % (
d['name'], str(d['type']), params[d['name']],
type(params[d['name']]))))
if 'values' in d:
if params[d['name']] not in d['values']:
raise ValueError(("%s must be one of: %s" % (
d['name'], ','.join(d['values']))))
if isinstance(params[d['name']], int):
if 'min' in d:
if params[d['name']] < d['min']:
raise ValueError(("%s must be greater than or equal to: %s" % (
d['name'], d['min'])))
if 'max' in d:
if params[d['name']] > d['max']:
raise ValueError("%s must be less than or equal to: %s" % (
d['name'], d['max']))
return True
class GCPUtils(object):
"""
Helper utilities for GCP.
"""
@staticmethod
def underscore_to_camel(txt):
return txt.split('_')[0] + ''.join(x.capitalize() or '_' for x in txt.split('_')[1:])
@staticmethod
def remove_non_gcp_params(params):
"""
Remove params if found.
"""
params_to_remove = ['state']
for p in params_to_remove:
if p in params:
del params[p]
return params
@staticmethod
def params_to_gcp_dict(params, resource_name=None):
"""
Recursively convert ansible params to GCP Params.
Keys are converted from snake to camelCase
ex: default_service to defaultService
Handles lists, dicts and strings
special provision for the resource name
"""
if not isinstance(params, dict):
return params
gcp_dict = {}
params = GCPUtils.remove_non_gcp_params(params)
for k, v in params.items():
gcp_key = GCPUtils.underscore_to_camel(k)
if isinstance(v, dict):
retval = GCPUtils.params_to_gcp_dict(v)
gcp_dict[gcp_key] = retval
elif isinstance(v, list):
gcp_dict[gcp_key] = [GCPUtils.params_to_gcp_dict(x) for x in v]
else:
if resource_name and k == resource_name:
gcp_dict['name'] = v
else:
gcp_dict[gcp_key] = v
return gcp_dict
@staticmethod
def execute_api_client_req(req, client=None, raw=True,
operation_timeout=180, poll_interval=5,
raise_404=True):
"""
General python api client interaction function.
For use with google-api-python-client, or clients created
with get_google_api_client function
Not for use with Google Cloud client libraries
For long-running operations, we make an immediate query and then
sleep poll_interval before re-querying. After the request is done
we rebuild the request with a get method and return the result.
"""
try:
resp = req.execute()
if not resp:
return None
if raw:
return resp
if resp['kind'] == 'compute#operation':
resp = GCPUtils.execute_api_client_operation_req(req, resp,
client,
operation_timeout,
poll_interval)
if 'items' in resp:
return resp['items']
return resp
except HttpError as h:
# Note: 404s can be generated (incorrectly) for dependent
# resources not existing. We let the caller determine if
# they want 404s raised for their invocation.
if h.resp.status == 404 and not raise_404:
return None
else:
raise
except Exception:
raise
@staticmethod
def execute_api_client_operation_req(orig_req, op_resp, client,
operation_timeout=180, poll_interval=5):
"""
Poll an operation for a result.
"""
parsed_url = GCPUtils.parse_gcp_url(orig_req.uri)
project_id = parsed_url['project']
resource_name = GCPUtils.get_gcp_resource_from_methodId(
orig_req.methodId)
resource = GCPUtils.build_resource_from_name(client, resource_name)
start_time = time.time()
complete = False
attempts = 1
while not complete:
if start_time + operation_timeout >= time.time():
op_req = client.globalOperations().get(
project=project_id, operation=op_resp['name'])
op_resp = op_req.execute()
if op_resp['status'] != 'DONE':
time.sleep(poll_interval)
attempts += 1
else:
complete = True
if op_resp['operationType'] == 'delete':
# don't wait for the delete
return True
elif op_resp['operationType'] in ['insert', 'update', 'patch']:
# TODO(supertom): Isolate 'build-new-request' stuff.
resource_name_singular = GCPUtils.get_entity_name_from_resource_name(
resource_name)
if op_resp['operationType'] == 'insert' or 'entity_name' not in parsed_url:
parsed_url['entity_name'] = GCPUtils.parse_gcp_url(op_resp['targetLink'])[
'entity_name']
args = {'project': project_id,
resource_name_singular: parsed_url['entity_name']}
new_req = resource.get(**args)
resp = new_req.execute()
return resp
else:
# assuming multiple entities, do a list call.
new_req = resource.list(project=project_id)
resp = new_req.execute()
return resp
else:
# operation didn't complete on time.
raise GCPOperationTimeoutError("Operation timed out: %s" % (
op_resp['targetLink']))
@staticmethod
def build_resource_from_name(client, resource_name):
try:
method = getattr(client, resource_name)
return method()
except AttributeError:
raise NotImplementedError('%s is not an attribute of %s' % (resource_name,
client))
@staticmethod
def get_gcp_resource_from_methodId(methodId):
try:
parts = methodId.split('.')
if len(parts) != 3:
return None
else:
return parts[1]
except AttributeError:
return None
@staticmethod
def get_entity_name_from_resource_name(resource_name):
if not resource_name:
return None
try:
# Chop off global or region prefixes
if resource_name.startswith('global'):
resource_name = resource_name.replace('global', '')
elif resource_name.startswith('regional'):
resource_name = resource_name.replace('region', '')
# ensure we have a lower case first letter
resource_name = resource_name[0].lower() + resource_name[1:]
if resource_name[-3:] == 'ies':
return resource_name.replace(
resource_name[-3:], 'y')
if resource_name[-1] == 's':
return resource_name[:-1]
return resource_name
except AttributeError:
return None
@staticmethod
def parse_gcp_url(url):
"""
Parse GCP urls and return dict of parts.
Supported URL structures:
/SERVICE/VERSION/'projects'/PROJECT_ID/RESOURCE
/SERVICE/VERSION/'projects'/PROJECT_ID/RESOURCE/ENTITY_NAME
/SERVICE/VERSION/'projects'/PROJECT_ID/RESOURCE/ENTITY_NAME/METHOD_NAME
/SERVICE/VERSION/'projects'/PROJECT_ID/'global'/RESOURCE
/SERVICE/VERSION/'projects'/PROJECT_ID/'global'/RESOURCE/ENTITY_NAME
/SERVICE/VERSION/'projects'/PROJECT_ID/'global'/RESOURCE/ENTITY_NAME/METHOD_NAME
/SERVICE/VERSION/'projects'/PROJECT_ID/LOCATION_TYPE/LOCATION/RESOURCE
/SERVICE/VERSION/'projects'/PROJECT_ID/LOCATION_TYPE/LOCATION/RESOURCE/ENTITY_NAME
/SERVICE/VERSION/'projects'/PROJECT_ID/LOCATION_TYPE/LOCATION/RESOURCE/ENTITY_NAME/METHOD_NAME
:param url: GCP-generated URL, such as a selflink or resource location.
:type url: ``str``
:return: dictionary of parts. Includes stanard components of urlparse, plus
GCP-specific 'service', 'api_version', 'project' and
'resource_name' keys. Optionally, 'zone', 'region', 'entity_name'
and 'method_name', if applicable.
:rtype: ``dict``
"""
p = urlparse.urlparse(url)
if not p:
return None
else:
# we add extra items such as
# zone, region and resource_name
url_parts = {}
url_parts['scheme'] = p.scheme
url_parts['host'] = p.netloc
url_parts['path'] = p.path
if p.path.find('/') == 0:
url_parts['path'] = p.path[1:]
url_parts['params'] = p.params
url_parts['fragment'] = p.fragment
url_parts['query'] = p.query
url_parts['project'] = None
url_parts['service'] = None
url_parts['api_version'] = None
path_parts = url_parts['path'].split('/')
url_parts['service'] = path_parts[0]
url_parts['api_version'] = path_parts[1]
if path_parts[2] == 'projects':
url_parts['project'] = path_parts[3]
else:
# invalid URL
raise GCPInvalidURLError('unable to parse: %s' % url)
if 'global' in path_parts:
url_parts['global'] = True
idx = path_parts.index('global')
if len(path_parts) - idx == 4:
# we have a resource, entity and method_name
url_parts['resource_name'] = path_parts[idx + 1]
url_parts['entity_name'] = path_parts[idx + 2]
url_parts['method_name'] = path_parts[idx + 3]
if len(path_parts) - idx == 3:
# we have a resource and entity
url_parts['resource_name'] = path_parts[idx + 1]
url_parts['entity_name'] = path_parts[idx + 2]
if len(path_parts) - idx == 2:
url_parts['resource_name'] = path_parts[idx + 1]
if len(path_parts) - idx < 2:
# invalid URL
raise GCPInvalidURLError('unable to parse: %s' % url)
elif 'regions' in path_parts or 'zones' in path_parts:
idx = -1
if 'regions' in path_parts:
idx = path_parts.index('regions')
url_parts['region'] = path_parts[idx + 1]
else:
idx = path_parts.index('zones')
url_parts['zone'] = path_parts[idx + 1]
if len(path_parts) - idx == 5:
# we have a resource, entity and method_name
url_parts['resource_name'] = path_parts[idx + 2]
url_parts['entity_name'] = path_parts[idx + 3]
url_parts['method_name'] = path_parts[idx + 4]
if len(path_parts) - idx == 4:
# we have a resource and entity
url_parts['resource_name'] = path_parts[idx + 2]
url_parts['entity_name'] = path_parts[idx + 3]
if len(path_parts) - idx == 3:
url_parts['resource_name'] = path_parts[idx + 2]
if len(path_parts) - idx < 3:
# invalid URL
raise GCPInvalidURLError('unable to parse: %s' % url)
else:
# no location in URL.
idx = path_parts.index('projects')
if len(path_parts) - idx == 5:
# we have a resource, entity and method_name
url_parts['resource_name'] = path_parts[idx + 2]
url_parts['entity_name'] = path_parts[idx + 3]
url_parts['method_name'] = path_parts[idx + 4]
if len(path_parts) - idx == 4:
# we have a resource and entity
url_parts['resource_name'] = path_parts[idx + 2]
url_parts['entity_name'] = path_parts[idx + 3]
if len(path_parts) - idx == 3:
url_parts['resource_name'] = path_parts[idx + 2]
if len(path_parts) - idx < 3:
# invalid URL
raise GCPInvalidURLError('unable to parse: %s' % url)
return url_parts
@staticmethod
def build_googleapi_url(project, api_version='v1', service='compute'):
return 'https://www.googleapis.com/%s/%s/projects/%s' % (service, api_version, project)
@staticmethod
def filter_gcp_fields(params, excluded_fields=None):
new_params = {}
if not excluded_fields:
excluded_fields = ['creationTimestamp', 'id', 'kind',
'selfLink', 'fingerprint', 'description']
if isinstance(params, list):
new_params = [GCPUtils.filter_gcp_fields(
x, excluded_fields) for x in params]
elif isinstance(params, dict):
for k in params.keys():
if k not in excluded_fields:
new_params[k] = GCPUtils.filter_gcp_fields(
params[k], excluded_fields)
else:
new_params = params
return new_params
@staticmethod
def are_params_equal(p1, p2):
"""
Check if two params dicts are equal.
TODO(supertom): need a way to filter out URLs, or they need to be built
"""
filtered_p1 = GCPUtils.filter_gcp_fields(p1)
filtered_p2 = GCPUtils.filter_gcp_fields(p2)
if filtered_p1 != filtered_p2:
return False
return True
class GCPError(Exception):
pass
class GCPOperationTimeoutError(GCPError):
pass
class GCPInvalidURLError(GCPError):
pass
|
ifduyue/django | refs/heads/master | tests/field_deconstruction/tests.py | 11 | from django.apps import apps
from django.db import models
from django.test import SimpleTestCase, override_settings
from django.test.utils import isolate_lru_cache
class FieldDeconstructionTests(SimpleTestCase):
"""
Tests the deconstruct() method on all core fields.
"""
def test_name(self):
"""
Tests the outputting of the correct name if assigned one.
"""
# First try using a "normal" field
field = models.CharField(max_length=65)
name, path, args, kwargs = field.deconstruct()
self.assertIsNone(name)
field.set_attributes_from_name("is_awesome_test")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(name, "is_awesome_test")
# Now try with a ForeignKey
field = models.ForeignKey("some_fake.ModelName", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertIsNone(name)
field.set_attributes_from_name("author")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(name, "author")
def test_db_tablespace(self):
field = models.Field()
_, _, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
# With a DEFAULT_DB_TABLESPACE.
with self.settings(DEFAULT_DB_TABLESPACE='foo'):
_, _, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
# With a db_tablespace.
field = models.Field(db_tablespace='foo')
_, _, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {'db_tablespace': 'foo'})
# With a db_tablespace equal to DEFAULT_DB_TABLESPACE.
with self.settings(DEFAULT_DB_TABLESPACE='foo'):
_, _, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {'db_tablespace': 'foo'})
def test_auto_field(self):
field = models.AutoField(primary_key=True)
field.set_attributes_from_name("id")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.AutoField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"primary_key": True})
def test_big_integer_field(self):
field = models.BigIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BigIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_boolean_field(self):
field = models.BooleanField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BooleanField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.BooleanField(default=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BooleanField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"default": True})
def test_char_field(self):
field = models.CharField(max_length=65)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CharField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 65})
field = models.CharField(max_length=65, null=True, blank=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CharField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 65, "null": True, "blank": True})
def test_char_field_choices(self):
field = models.CharField(max_length=1, choices=(("A", "One"), ("B", "Two")))
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CharField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"choices": [("A", "One"), ("B", "Two")], "max_length": 1})
def test_csi_field(self):
field = models.CommaSeparatedIntegerField(max_length=100)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CommaSeparatedIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 100})
def test_date_field(self):
field = models.DateField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.DateField(auto_now=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now": True})
def test_datetime_field(self):
field = models.DateTimeField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.DateTimeField(auto_now_add=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now_add": True})
# Bug #21785
field = models.DateTimeField(auto_now=True, auto_now_add=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now_add": True, "auto_now": True})
def test_decimal_field(self):
field = models.DecimalField(max_digits=5, decimal_places=2)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DecimalField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 2})
def test_decimal_field_0_decimal_places(self):
"""
A DecimalField with decimal_places=0 should work (#22272).
"""
field = models.DecimalField(max_digits=5, decimal_places=0)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DecimalField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 0})
def test_email_field(self):
field = models.EmailField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.EmailField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 254})
field = models.EmailField(max_length=255)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.EmailField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 255})
def test_file_field(self):
field = models.FileField(upload_to="foo/bar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FileField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"upload_to": "foo/bar"})
# Test max_length
field = models.FileField(upload_to="foo/bar", max_length=200)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FileField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"upload_to": "foo/bar", "max_length": 200})
def test_file_path_field(self):
field = models.FilePathField(match=r".*\.txt$")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FilePathField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"match": r".*\.txt$"})
field = models.FilePathField(recursive=True, allow_folders=True, max_length=123)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FilePathField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"recursive": True, "allow_folders": True, "max_length": 123})
def test_float_field(self):
field = models.FloatField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FloatField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_foreign_key(self):
# Test basic pointing
from django.contrib.auth.models import Permission
field = models.ForeignKey("auth.Permission", models.CASCADE)
field.remote_field.model = Permission
field.remote_field.field_name = "id"
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "on_delete": models.CASCADE})
self.assertFalse(hasattr(kwargs['to'], "setting_name"))
# Test swap detection for swappable model
field = models.ForeignKey("auth.User", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.User", "on_delete": models.CASCADE})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
# Test nonexistent (for now) model
field = models.ForeignKey("something.Else", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "something.Else", "on_delete": models.CASCADE})
# Test on_delete
field = models.ForeignKey("auth.User", models.SET_NULL)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.User", "on_delete": models.SET_NULL})
# Test to_field preservation
field = models.ForeignKey("auth.Permission", models.CASCADE, to_field="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "to_field": "foobar", "on_delete": models.CASCADE})
# Test related_name preservation
field = models.ForeignKey("auth.Permission", models.CASCADE, related_name="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "related_name": "foobar", "on_delete": models.CASCADE})
# Test related_query_name
field = models.ForeignKey("auth.Permission", models.CASCADE, related_query_name="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(
kwargs,
{"to": "auth.Permission", "related_query_name": "foobar", "on_delete": models.CASCADE}
)
# Test limit_choices_to
field = models.ForeignKey("auth.Permission", models.CASCADE, limit_choices_to={'foo': 'bar'})
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(
kwargs,
{"to": "auth.Permission", "limit_choices_to": {'foo': 'bar'}, "on_delete": models.CASCADE}
)
# Test unique
field = models.ForeignKey("auth.Permission", models.CASCADE, unique=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "unique": True, "on_delete": models.CASCADE})
@override_settings(AUTH_USER_MODEL="auth.Permission")
def test_foreign_key_swapped(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
# It doesn't matter that we swapped out user for permission;
# there's no validation. We just want to check the setting stuff works.
field = models.ForeignKey("auth.Permission", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "on_delete": models.CASCADE})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
def test_one_to_one(self):
# Test basic pointing
from django.contrib.auth.models import Permission
field = models.OneToOneField("auth.Permission", models.CASCADE)
field.remote_field.model = Permission
field.remote_field.field_name = "id"
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "on_delete": models.CASCADE})
self.assertFalse(hasattr(kwargs['to'], "setting_name"))
# Test swap detection for swappable model
field = models.OneToOneField("auth.User", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.User", "on_delete": models.CASCADE})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
# Test nonexistent (for now) model
field = models.OneToOneField("something.Else", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "something.Else", "on_delete": models.CASCADE})
# Test on_delete
field = models.OneToOneField("auth.User", models.SET_NULL)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.User", "on_delete": models.SET_NULL})
# Test to_field
field = models.OneToOneField("auth.Permission", models.CASCADE, to_field="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "to_field": "foobar", "on_delete": models.CASCADE})
# Test related_name
field = models.OneToOneField("auth.Permission", models.CASCADE, related_name="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "related_name": "foobar", "on_delete": models.CASCADE})
# Test related_query_name
field = models.OneToOneField("auth.Permission", models.CASCADE, related_query_name="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(
kwargs,
{"to": "auth.Permission", "related_query_name": "foobar", "on_delete": models.CASCADE}
)
# Test limit_choices_to
field = models.OneToOneField("auth.Permission", models.CASCADE, limit_choices_to={'foo': 'bar'})
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(
kwargs,
{"to": "auth.Permission", "limit_choices_to": {'foo': 'bar'}, "on_delete": models.CASCADE}
)
# Test unique
field = models.OneToOneField("auth.Permission", models.CASCADE, unique=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "on_delete": models.CASCADE})
def test_image_field(self):
field = models.ImageField(upload_to="foo/barness", width_field="width", height_field="height")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ImageField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"upload_to": "foo/barness", "width_field": "width", "height_field": "height"})
def test_integer_field(self):
field = models.IntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.IntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_ip_address_field(self):
field = models.IPAddressField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.IPAddressField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_generic_ip_address_field(self):
field = models.GenericIPAddressField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.GenericIPAddressField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.GenericIPAddressField(protocol="IPv6")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.GenericIPAddressField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"protocol": "IPv6"})
def test_many_to_many_field(self):
# Test normal
field = models.ManyToManyField("auth.Permission")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission"})
self.assertFalse(hasattr(kwargs['to'], "setting_name"))
# Test swappable
field = models.ManyToManyField("auth.User")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.User"})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
# Test through
field = models.ManyToManyField("auth.Permission", through="auth.Group")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "through": "auth.Group"})
# Test custom db_table
field = models.ManyToManyField("auth.Permission", db_table="custom_table")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "db_table": "custom_table"})
# Test related_name
field = models.ManyToManyField("auth.Permission", related_name="custom_table")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "related_name": "custom_table"})
# Test related_query_name
field = models.ManyToManyField("auth.Permission", related_query_name="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "related_query_name": "foobar"})
# Test limit_choices_to
field = models.ManyToManyField("auth.Permission", limit_choices_to={'foo': 'bar'})
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "limit_choices_to": {'foo': 'bar'}})
@override_settings(AUTH_USER_MODEL="auth.Permission")
def test_many_to_many_field_swapped(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
# It doesn't matter that we swapped out user for permission;
# there's no validation. We just want to check the setting stuff works.
field = models.ManyToManyField("auth.Permission")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission"})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
def test_null_boolean_field(self):
field = models.NullBooleanField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.NullBooleanField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_positive_integer_field(self):
field = models.PositiveIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.PositiveIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_positive_small_integer_field(self):
field = models.PositiveSmallIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.PositiveSmallIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_slug_field(self):
field = models.SlugField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.SlugField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.SlugField(db_index=False, max_length=231)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.SlugField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"db_index": False, "max_length": 231})
def test_small_integer_field(self):
field = models.SmallIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.SmallIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_text_field(self):
field = models.TextField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.TextField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_time_field(self):
field = models.TimeField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.TimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.TimeField(auto_now=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {'auto_now': True})
field = models.TimeField(auto_now_add=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {'auto_now_add': True})
def test_url_field(self):
field = models.URLField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.URLField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.URLField(max_length=231)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.URLField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 231})
def test_binary_field(self):
field = models.BinaryField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BinaryField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
|
mancoast/CPythonPyc_test | refs/heads/master | cpython/220_test_coercion.py | 8 | import copy
import sys
# Fake a number that implements numeric methods through __coerce__
class CoerceNumber:
def __init__(self, arg):
self.arg = arg
def __repr__(self):
return '<CoerceNumber %s>' % repr(self.arg)
def __coerce__(self, other):
if isinstance(other, CoerceNumber):
return self.arg, other.arg
else:
return (self.arg, other)
# Fake a number that implements numeric ops through methods.
class MethodNumber:
def __init__(self,arg):
self.arg = arg
def __repr__(self):
return '<MethodNumber %s>' % repr(self.arg)
def __add__(self,other):
return self.arg + other
def __radd__(self,other):
return other + self.arg
def __sub__(self,other):
return self.arg - other
def __rsub__(self,other):
return other - self.arg
def __mul__(self,other):
return self.arg * other
def __rmul__(self,other):
return other * self.arg
def __div__(self,other):
return self.arg / other
def __rdiv__(self,other):
return other / self.arg
def __pow__(self,other):
return self.arg ** other
def __rpow__(self,other):
return other ** self.arg
def __mod__(self,other):
return self.arg % other
def __rmod__(self,other):
return other % self.arg
def __cmp__(self, other):
return cmp(self.arg, other)
candidates = [ 2, 4.0, 2L, 2+0j, [1], (2,), None,
MethodNumber(1), CoerceNumber(2)]
infix_binops = [ '+', '-', '*', '/', '**', '%' ]
prefix_binops = [ 'divmod' ]
def do_infix_binops():
for a in candidates:
for b in candidates:
for op in infix_binops:
print '%s %s %s' % (a, op, b),
try:
x = eval('a %s b' % op)
except:
error = sys.exc_info()[:2]
print '... %s' % error[0]
else:
print '=', x
try:
z = copy.copy(a)
except copy.Error:
z = a # assume it has no inplace ops
print '%s %s= %s' % (a, op, b),
try:
exec('z %s= b' % op)
except:
error = sys.exc_info()[:2]
print '... %s' % error[0]
else:
print '=>', z
def do_prefix_binops():
for a in candidates:
for b in candidates:
for op in prefix_binops:
print '%s(%s, %s)' % (op, a, b),
try:
x = eval('%s(a, b)' % op)
except:
error = sys.exc_info()[:2]
print '... %s' % error[0]
else:
print '=', x
do_infix_binops()
do_prefix_binops()
|
valkjsaaa/sl4a | refs/heads/master | python/src/Lib/test/test_symtable.py | 53 | """
Test the API of the symtable module.
"""
import symtable
import unittest
import warnings
from test import test_support
TEST_CODE = """
import sys
glob = 42
class Mine:
instance_var = 24
def a_method(p1, p2):
pass
def spam(a, b, *var, **kw):
global bar
bar = 47
x = 23
glob
def internal():
return x
return internal
def foo():
exec 'm'
from sys import *
def namespace_test(): pass
def namespace_test(): pass
"""
def find_block(block, name):
for ch in block.get_children():
if ch.get_name() == name:
return ch
class SymtableTest(unittest.TestCase):
with warnings.catch_warnings():
# Ignore warnings about "from blank import *"
warnings.simplefilter("ignore", SyntaxWarning)
top = symtable.symtable(TEST_CODE, "?", "exec")
# These correspond to scopes in TEST_CODE
Mine = find_block(top, "Mine")
a_method = find_block(Mine, "a_method")
spam = find_block(top, "spam")
internal = find_block(spam, "internal")
foo = find_block(top, "foo")
def test_noops(self):
# Check methods that don't work. They should warn and return False.
def check(w, msg):
self.assertEqual(str(w.message), msg)
sym = self.top.lookup("glob")
with test_support.check_warnings() as w:
warnings.simplefilter("always", DeprecationWarning)
self.assertFalse(sym.is_vararg())
check(w, "is_vararg() is obsolete and will be removed")
w.reset()
self.assertFalse(sym.is_keywordarg())
check(w, "is_keywordarg() is obsolete and will be removed")
w.reset()
self.assertFalse(sym.is_in_tuple())
check(w, "is_in_tuple() is obsolete and will be removed")
def test_type(self):
self.assertEqual(self.top.get_type(), "module")
self.assertEqual(self.Mine.get_type(), "class")
self.assertEqual(self.a_method.get_type(), "function")
self.assertEqual(self.spam.get_type(), "function")
self.assertEqual(self.internal.get_type(), "function")
def test_optimized(self):
self.assertFalse(self.top.is_optimized())
self.assertFalse(self.top.has_exec())
self.assertFalse(self.top.has_import_star())
self.assertTrue(self.spam.is_optimized())
self.assertFalse(self.foo.is_optimized())
self.assertTrue(self.foo.has_exec())
self.assertTrue(self.foo.has_import_star())
def test_nested(self):
self.assertFalse(self.top.is_nested())
self.assertFalse(self.Mine.is_nested())
self.assertFalse(self.spam.is_nested())
self.assertTrue(self.internal.is_nested())
def test_children(self):
self.assertTrue(self.top.has_children())
self.assertTrue(self.Mine.has_children())
self.assertFalse(self.foo.has_children())
def test_lineno(self):
self.assertEqual(self.top.get_lineno(), 0)
self.assertEqual(self.spam.get_lineno(), 11)
def test_function_info(self):
func = self.spam
self.assertEqual(func.get_parameters(), ("a", "b", "kw", "var"))
self.assertEqual(func.get_locals(),
("a", "b", "bar", "internal", "kw", "var", "x"))
self.assertEqual(func.get_globals(), ("bar", "glob"))
self.assertEqual(self.internal.get_frees(), ("x",))
def test_globals(self):
self.assertTrue(self.spam.lookup("glob").is_global())
self.assertFalse(self.spam.lookup("glob").is_declared_global())
self.assertTrue(self.spam.lookup("bar").is_global())
self.assertTrue(self.spam.lookup("bar").is_declared_global())
self.assertFalse(self.internal.lookup("x").is_global())
self.assertFalse(self.Mine.lookup("instance_var").is_global())
def test_local(self):
self.assertTrue(self.spam.lookup("x").is_local())
self.assertFalse(self.internal.lookup("x").is_local())
def test_referenced(self):
self.assertTrue(self.internal.lookup("x").is_referenced())
self.assertTrue(self.spam.lookup("internal").is_referenced())
self.assertFalse(self.spam.lookup("x").is_referenced())
def test_parameters(self):
for sym in ("a", "var", "kw"):
self.assertTrue(self.spam.lookup(sym).is_parameter())
self.assertFalse(self.spam.lookup("x").is_parameter())
def test_symbol_lookup(self):
self.assertEqual(len(self.top.get_identifiers()),
len(self.top.get_symbols()))
self.assertRaises(KeyError, self.top.lookup, "not_here")
def test_namespaces(self):
self.assertTrue(self.top.lookup("Mine").is_namespace())
self.assertTrue(self.Mine.lookup("a_method").is_namespace())
self.assertTrue(self.top.lookup("spam").is_namespace())
self.assertTrue(self.spam.lookup("internal").is_namespace())
self.assertTrue(self.top.lookup("namespace_test").is_namespace())
self.assertFalse(self.spam.lookup("x").is_namespace())
self.assert_(self.top.lookup("spam").get_namespace() is self.spam)
ns_test = self.top.lookup("namespace_test")
self.assertEqual(len(ns_test.get_namespaces()), 2)
self.assertRaises(ValueError, ns_test.get_namespace)
def test_assigned(self):
self.assertTrue(self.spam.lookup("x").is_assigned())
self.assertTrue(self.spam.lookup("bar").is_assigned())
self.assertTrue(self.top.lookup("spam").is_assigned())
self.assertTrue(self.Mine.lookup("a_method").is_assigned())
self.assertFalse(self.internal.lookup("x").is_assigned())
def test_imported(self):
self.assertTrue(self.top.lookup("sys").is_imported())
def test_name(self):
self.assertEqual(self.top.get_name(), "top")
self.assertEqual(self.spam.get_name(), "spam")
self.assertEqual(self.spam.lookup("x").get_name(), "x")
self.assertEqual(self.Mine.get_name(), "Mine")
def test_class_info(self):
self.assertEqual(self.Mine.get_methods(), ('a_method',))
def test_filename_correct(self):
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
def checkfilename(brokencode):
try:
symtable.symtable(brokencode, "spam", "exec")
except SyntaxError as e:
self.assertEqual(e.filename, "spam")
else:
self.fail("no SyntaxError for %r" % (brokencode,))
checkfilename("def f(x): foo)(") # parse-time
checkfilename("def f(x): global x") # symtable-build-time
def test_eval(self):
symbols = symtable.symtable("42", "?", "eval")
def test_single(self):
symbols = symtable.symtable("42", "?", "single")
def test_exec(self):
symbols = symtable.symtable("def f(x): return x", "?", "exec")
def test_main():
test_support.run_unittest(SymtableTest)
if __name__ == '__main__':
test_main()
|
ojii/sandlib | refs/heads/master | lib/lib-python/2.7/lib2to3/pgen2/driver.py | 98 | # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
# Modifications:
# Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Parser driver.
This provides a high-level interface to parse a file into a syntax tree.
"""
__author__ = "Guido van Rossum <[email protected]>"
__all__ = ["Driver", "load_grammar"]
# Python imports
import codecs
import os
import logging
import StringIO
import sys
# Pgen imports
from . import grammar, parse, token, tokenize, pgen
class Driver(object):
def __init__(self, grammar, convert=None, logger=None):
self.grammar = grammar
if logger is None:
logger = logging.getLogger()
self.logger = logger
self.convert = convert
def parse_tokens(self, tokens, debug=False):
"""Parse a series of tokens and return the syntax tree."""
# XXX Move the prefix computation into a wrapper around tokenize.
p = parse.Parser(self.grammar, self.convert)
p.setup()
lineno = 1
column = 0
type = value = start = end = line_text = None
prefix = u""
for quintuple in tokens:
type, value, start, end, line_text = quintuple
if start != (lineno, column):
assert (lineno, column) <= start, ((lineno, column), start)
s_lineno, s_column = start
if lineno < s_lineno:
prefix += "\n" * (s_lineno - lineno)
lineno = s_lineno
column = 0
if column < s_column:
prefix += line_text[column:s_column]
column = s_column
if type in (tokenize.COMMENT, tokenize.NL):
prefix += value
lineno, column = end
if value.endswith("\n"):
lineno += 1
column = 0
continue
if type == token.OP:
type = grammar.opmap[value]
if debug:
self.logger.debug("%s %r (prefix=%r)",
token.tok_name[type], value, prefix)
if p.addtoken(type, value, (prefix, start)):
if debug:
self.logger.debug("Stop.")
break
prefix = ""
lineno, column = end
if value.endswith("\n"):
lineno += 1
column = 0
else:
# We never broke out -- EOF is too soon (how can this happen???)
raise parse.ParseError("incomplete input",
type, value, (prefix, start))
return p.rootnode
def parse_stream_raw(self, stream, debug=False):
"""Parse a stream and return the syntax tree."""
tokens = tokenize.generate_tokens(stream.readline)
return self.parse_tokens(tokens, debug)
def parse_stream(self, stream, debug=False):
"""Parse a stream and return the syntax tree."""
return self.parse_stream_raw(stream, debug)
def parse_file(self, filename, encoding=None, debug=False):
"""Parse a file and return the syntax tree."""
stream = codecs.open(filename, "r", encoding)
try:
return self.parse_stream(stream, debug)
finally:
stream.close()
def parse_string(self, text, debug=False):
"""Parse a string and return the syntax tree."""
tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
return self.parse_tokens(tokens, debug)
def load_grammar(gt="Grammar.txt", gp=None,
save=True, force=False, logger=None):
"""Load the grammar (maybe from a pickle)."""
if logger is None:
logger = logging.getLogger()
if gp is None:
head, tail = os.path.splitext(gt)
if tail == ".txt":
tail = ""
gp = head + tail + ".".join(map(str, sys.version_info)) + ".pickle"
if force or not _newer(gp, gt):
logger.info("Generating grammar tables from %s", gt)
g = pgen.generate_grammar(gt)
if save:
logger.info("Writing grammar tables to %s", gp)
try:
g.dump(gp)
except IOError, e:
logger.info("Writing failed:"+str(e))
else:
g = grammar.Grammar()
g.load(gp)
return g
def _newer(a, b):
"""Inquire whether file a was written since file b."""
if not os.path.exists(a):
return False
if not os.path.exists(b):
return True
return os.path.getmtime(a) >= os.path.getmtime(b)
|
saullocastro/pyNastran | refs/heads/master | pyNastran/gui/gui_interface/modify_label_properties/interface.py | 1 | from pyNastran.gui.gui_interface.modify_label_properties.modify_label_properties import ModifyLabelPropertiesMenu
def on_set_labelsize_color_menu(self):
"""
Opens a dialog box to set:
+--------+----------+
| Name | String |
+--------+----------+
| Min | Float |
+--------+----------+
| Max | Float |
+--------+----------+
| Format | pyString |
+--------+----------+
"""
if not hasattr(self, 'case_keys'):
self.log_error('No model has been loaded.')
return
data = {
'size' : self.label_text_size,
'color' : self.label_color,
'dim_max' : self.dim_max,
#'clicked_ok' : False,
#'clicked_cancel' : False,
#'close' : False,
}
#print(data)
if not self._label_window_shown:
self._label_window = ModifyLabelPropertiesMenu(data, win_parent=self)
self._label_window.show()
self._label_window_shown = True
self._label_window.exec_()
else:
self._label_window.activateWindow()
if 'close' not in data:
self._label_window.activateWindow()
return
if data['close']:
self._label_window_shown = False
del self._label_window
else:
self._label_window.activateWindow() |
AutorestCI/azure-sdk-for-python | refs/heads/master | azure-mgmt-eventgrid/azure/mgmt/eventgrid/models/event_subscription_filter.py | 2 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class EventSubscriptionFilter(Model):
"""Filter for the Event Subscription.
:param subject_begins_with: An optional string to filter events for an
event subscription based on a resource path prefix.
The format of this depends on the publisher of the events.
Wildcard characters are not supported in this path.
:type subject_begins_with: str
:param subject_ends_with: An optional string to filter events for an event
subscription based on a resource path suffix.
Wildcard characters are not supported in this path.
:type subject_ends_with: str
:param included_event_types: A list of applicable event types that need to
be part of the event subscription.
If it is desired to subscribe to all event types, the string "all" needs
to be specified as an element in this list.
:type included_event_types: list[str]
:param is_subject_case_sensitive: Specifies if the SubjectBeginsWith and
SubjectEndsWith properties of the filter
should be compared in a case sensitive manner. Default value: False .
:type is_subject_case_sensitive: bool
"""
_attribute_map = {
'subject_begins_with': {'key': 'subjectBeginsWith', 'type': 'str'},
'subject_ends_with': {'key': 'subjectEndsWith', 'type': 'str'},
'included_event_types': {'key': 'includedEventTypes', 'type': '[str]'},
'is_subject_case_sensitive': {'key': 'isSubjectCaseSensitive', 'type': 'bool'},
}
def __init__(self, subject_begins_with=None, subject_ends_with=None, included_event_types=None, is_subject_case_sensitive=False):
super(EventSubscriptionFilter, self).__init__()
self.subject_begins_with = subject_begins_with
self.subject_ends_with = subject_ends_with
self.included_event_types = included_event_types
self.is_subject_case_sensitive = is_subject_case_sensitive
|
davek44/Kayak | refs/heads/master | tests/test_Identity.py | 3 | import numpy as np
import numpy.random as npr
import kayak
from . import *
def test_identity():
npr.seed(1)
np_A = npr.randn(6,7)
A = kayak.Parameter(np_A)
B = kayak.Identity(A)
assert np.all(close_float(B.value, np_A))
assert np.all(close_float(B.grad(A), np.ones((6,7))))
|
wvengen/ndg_oauth_server | refs/heads/master | ndg/oauth/server/lib/access_token/myproxy_cert_token_generator.py | 3 | """OAuth 2.0 WSGI server middleware providing MyProxy certificates as access tokens
"""
__author__ = "R B Wilkinson"
__date__ = "12/12/11"
__copyright__ = "(C) 2011 Science and Technology Facilities Council"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "[email protected]"
__revision__ = "$Id$"
import base64
import logging
from ndg.oauth.server.lib.access_token.access_token_interface import AccessTokenInterface
from ndg.oauth.server.lib.register.access_token import AccessToken
log = logging.getLogger(__name__)
class MyProxyCertTokenGenerator(AccessTokenInterface):
"""Access token generator that returns MyProxy certificates as tokens.
"""
def __init__(self, lifetime, token_type, **kw):
"""
@type lifetime: int
@param lifetime: lifetimes of generated tokens in seconds
@type token_type: str
@param token_type: token type name
@type kw:dict
@param kw: additional keywords
"""
self.lifetime = lifetime
self.token_type = token_type
self.certificate_request_parameter = kw.get('certificate_request_parameter')
self.myproxy_client_env_key = kw.get('myproxy_client_env_key',
'myproxy.server.wsgi.middleware.MyProxyClientMiddleware.myProxyClient')
self.myproxy_global_password = kw.get('myproxy_global_password')
self.user_identifier_grant_data_key = kw.get('user_identifier_grant_data_key')
def get_access_token(self, token_request, grant, request):
"""
Gets an access token using MyProxyClient.
@type token_request: ndg.oauth.server.lib.access_token.AccessTokenRequest
@param token_request: access token request
@type grant: ndg.oauth.server.lib.register.authorization_grant.AuthorizationGrant
@param grant: authorization grant
@type request: webob.Request
@param request: HTTP request object
@rtype: ndg.oauth.server.lib.register.access_token.AccessToken
@return: access token or None if an error occurs
"""
myproxyclient = request.environ.get(self.myproxy_client_env_key)
if myproxyclient is None:
log.error('MyProxy client not found in environ')
return None
cert_req_enc = request.POST.get(self.certificate_request_parameter)
if cert_req_enc is None:
log.error('Certificate request not found in POST parameters')
return None
cert_req = base64.b64decode(cert_req_enc)
# Get the user identification as set by an authentication filter.
myproxy_id = grant.additional_data.get(
self.user_identifier_grant_data_key)
if not myproxy_id:
log.error('User identifier not stored with grant')
return None
# Attempt to obtain a certificate from MyProxy.
try:
creds = myproxyclient.logon(myproxy_id,
self.myproxy_global_password,
certReq=cert_req)
except Exception, exc:
log.error('MyProxy logon failed: %s', exc.__str__())
return None
token_id = creds[0]
return AccessToken(token_id, token_request, grant, self.token_type,
self.lifetime)
|
szecsi/Gears | refs/heads/master | GearsPy/Project/Components/Figure/Spot.py | 1 | import Gears as gears
from .. import *
from .Base import *
class Spot(Base) :
def applyWithArgs(
self,
spass,
functionName,
*,
radius : 'Spot radius [um].'
= 200,
innerRadius : 'Annulus inner radius [um] (or negative for solid disc).'
= -1000,
filterRadius_um : 'Antialiasing filter size [um] (shape blur).'
= 0.1
) :
spass.setShaderVariable( name = functionName+'_spotRadius', value = radius )
spass.setShaderVariable( name = functionName+'_spotInnerRadius', value = innerRadius )
spass.setShaderVariable( name= functionName+'_filterRadius', value = filterRadius_um )
spass.setShaderFunction( name = functionName, src = self.glslEsc( '''
vec3 @<X>@ (vec2 x, float time){
float diff = length(x);
float inOrOut = (1-smoothstep( -`filterRadius, +`filterRadius, diff - `spotRadius ))
* (1-smoothstep( -`filterRadius, +`filterRadius, `spotInnerRadius - diff ));
return vec3(inOrOut, inOrOut, inOrOut);
}
''').format( X=functionName ) )
|
razzius/PyClassLessons | refs/heads/master | instructors/lessons/django-intro/examples/grocery_project/grocery_list/admin.py | 3 | from grocery_list.models import Post
from django.contrib import admin
admin.site.register(Post) |
bguillot/OpenUpgrade | refs/heads/master | addons/email_template/email_template.py | 16 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Sharoon Thomas
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import base64
import datetime
import dateutil.relativedelta as relativedelta
import logging
import lxml
import urlparse
import openerp
from openerp import SUPERUSER_ID
from openerp.osv import osv, fields
from openerp import tools
from openerp.tools.translate import _
from urllib import urlencode, quote as quote
_logger = logging.getLogger(__name__)
try:
# We use a jinja2 sandboxed environment to render mako templates.
# Note that the rendering does not cover all the mako syntax, in particular
# arbitrary Python statements are not accepted, and not all expressions are
# allowed: only "public" attributes (not starting with '_') of objects may
# be accessed.
# This is done on purpose: it prevents incidental or malicious execution of
# Python code that may break the security of the server.
from jinja2.sandbox import SandboxedEnvironment
mako_template_env = SandboxedEnvironment(
block_start_string="<%",
block_end_string="%>",
variable_start_string="${",
variable_end_string="}",
comment_start_string="<%doc>",
comment_end_string="</%doc>",
line_statement_prefix="%",
line_comment_prefix="##",
trim_blocks=True, # do not output newline after blocks
autoescape=True, # XML/HTML automatic escaping
)
mako_template_env.globals.update({
'str': str,
'quote': quote,
'urlencode': urlencode,
'datetime': datetime,
'len': len,
'abs': abs,
'min': min,
'max': max,
'sum': sum,
'filter': filter,
'reduce': reduce,
'map': map,
'round': round,
# dateutil.relativedelta is an old-style class and cannot be directly
# instanciated wihtin a jinja2 expression, so a lambda "proxy" is
# is needed, apparently.
'relativedelta': lambda *a, **kw : relativedelta.relativedelta(*a, **kw),
})
except ImportError:
_logger.warning("jinja2 not available, templating features will not work!")
class email_template(osv.osv):
"Templates for sending email"
_name = "email.template"
_description = 'Email Templates'
_order = 'name'
def default_get(self, cr, uid, fields, context=None):
res = super(email_template, self).default_get(cr, uid, fields, context)
if res.get('model'):
res['model_id'] = self.pool['ir.model'].search(cr, uid, [('model', '=', res.pop('model'))], context=context)[0]
return res
def _replace_local_links(self, cr, uid, html, context=None):
""" Post-processing of html content to replace local links to absolute
links, using web.base.url as base url. """
if not html:
return html
# form a tree
root = lxml.html.fromstring(html)
if not len(root) and root.text is None and root.tail is None:
html = '<div>%s</div>' % html
root = lxml.html.fromstring(html)
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
(base_scheme, base_netloc, bpath, bparams, bquery, bfragment) = urlparse.urlparse(base_url)
def _process_link(url):
new_url = url
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
if not scheme and not netloc:
new_url = urlparse.urlunparse((base_scheme, base_netloc, path, params, query, fragment))
return new_url
# check all nodes, replace :
# - img src -> check URL
# - a href -> check URL
for node in root.iter():
if node.tag == 'a':
node.set('href', _process_link(node.get('href')))
elif node.tag == 'img' and not node.get('src', 'data').startswith('data'):
node.set('src', _process_link(node.get('src')))
html = lxml.html.tostring(root, pretty_print=False, method='html')
# this is ugly, but lxml/etree tostring want to put everything in a 'div' that breaks the editor -> remove that
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html
def render_post_process(self, cr, uid, html, context=None):
html = self._replace_local_links(cr, uid, html, context=context)
return html
def render_template_batch(self, cr, uid, template, model, res_ids, context=None, post_process=False):
"""Render the given template text, replace mako expressions ``${expr}``
with the result of evaluating these expressions with
an evaluation context containing:
* ``user``: browse_record of the current user
* ``object``: browse_record of the document record this mail is
related to
* ``context``: the context passed to the mail composition wizard
:param str template: the template text to render
:param str model: model name of the document record this mail is related to.
:param int res_ids: list of ids of document records those mails are related to.
"""
if context is None:
context = {}
results = dict.fromkeys(res_ids, u"")
# try to load the template
try:
template = mako_template_env.from_string(tools.ustr(template))
except Exception:
_logger.exception("Failed to load template %r", template)
return results
# prepare template variables
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
records = self.pool[model].browse(cr, uid, res_ids, context=context) or [None]
variables = {
'user': user,
'ctx': context, # context kw would clash with mako internals
}
for record in records:
res_id = record.id if record else None
variables['object'] = record
try:
render_result = template.render(variables)
except Exception:
_logger.exception("Failed to render template %r using values %r" % (template, variables))
render_result = u""
if render_result == u"False":
render_result = u""
results[res_id] = render_result
if post_process:
for res_id, result in results.iteritems():
results[res_id] = self.render_post_process(cr, uid, result, context=context)
return results
def get_email_template_batch(self, cr, uid, template_id=False, res_ids=None, context=None):
if context is None:
context = {}
if res_ids is None:
res_ids = [None]
results = dict.fromkeys(res_ids, False)
if not template_id:
return results
template = self.browse(cr, uid, template_id, context)
langs = self.render_template_batch(cr, uid, template.lang, template.model, res_ids, context)
for res_id, lang in langs.iteritems():
if lang:
# Use translated template if necessary
ctx = context.copy()
ctx['lang'] = lang
template = self.browse(cr, uid, template.id, ctx)
else:
template = self.browse(cr, uid, int(template_id), context)
results[res_id] = template
return results
def onchange_model_id(self, cr, uid, ids, model_id, context=None):
mod_name = False
if model_id:
mod_name = self.pool.get('ir.model').browse(cr, uid, model_id, context).model
return {'value': {'model': mod_name}}
_columns = {
'name': fields.char('Name'),
'model_id': fields.many2one('ir.model', 'Applies to', help="The kind of document with with this template can be used"),
'model': fields.related('model_id', 'model', type='char', string='Related Document Model',
size=128, select=True, store=True, readonly=True),
'lang': fields.char('Language',
help="Optional translation language (ISO code) to select when sending out an email. "
"If not set, the english version will be used. "
"This should usually be a placeholder expression "
"that provides the appropriate language code, e.g. "
"${object.partner_id.lang.code}.",
placeholder="${object.partner_id.lang.code}"),
'user_signature': fields.boolean('Add Signature',
help="If checked, the user's signature will be appended to the text version "
"of the message"),
'subject': fields.char('Subject', translate=True, help="Subject (placeholders may be used here)",),
'email_from': fields.char('From',
help="Sender address (placeholders may be used here). If not set, the default "
"value will be the author's email alias if configured, or email address."),
'use_default_to': fields.boolean(
'Default recipients',
help="Default recipients of the record:\n"
"- partner (using id on a partner or the partner_id field) OR\n"
"- email (using email_from or email field)"),
'email_to': fields.char('To (Emails)', help="Comma-separated recipient addresses (placeholders may be used here)"),
'partner_to': fields.char('To (Partners)',
help="Comma-separated ids of recipient partners (placeholders may be used here)",
oldname='email_recipients'),
'email_cc': fields.char('Cc', help="Carbon copy recipients (placeholders may be used here)"),
'reply_to': fields.char('Reply-To', help="Preferred response address (placeholders may be used here)"),
'mail_server_id': fields.many2one('ir.mail_server', 'Outgoing Mail Server', readonly=False,
help="Optional preferred server for outgoing mails. If not set, the highest "
"priority one will be used."),
'body_html': fields.html('Body', translate=True, sanitize=False, help="Rich-text/HTML version of the message (placeholders may be used here)"),
'report_name': fields.char('Report Filename', translate=True,
help="Name to use for the generated report file (may contain placeholders)\n"
"The extension can be omitted and will then come from the report type."),
'report_template': fields.many2one('ir.actions.report.xml', 'Optional report to print and attach'),
'ref_ir_act_window': fields.many2one('ir.actions.act_window', 'Sidebar action', readonly=True,
help="Sidebar action to make this template available on records "
"of the related document model"),
'ref_ir_value': fields.many2one('ir.values', 'Sidebar Button', readonly=True,
help="Sidebar button to open the sidebar action"),
'attachment_ids': fields.many2many('ir.attachment', 'email_template_attachment_rel', 'email_template_id',
'attachment_id', 'Attachments',
help="You may attach files to this template, to be added to all "
"emails created from this template"),
'auto_delete': fields.boolean('Auto Delete', help="Permanently delete this email after sending it, to save space"),
# Fake fields used to implement the placeholder assistant
'model_object_field': fields.many2one('ir.model.fields', string="Field",
help="Select target field from the related document model.\n"
"If it is a relationship field you will be able to select "
"a target field at the destination of the relationship."),
'sub_object': fields.many2one('ir.model', 'Sub-model', readonly=True,
help="When a relationship field is selected as first field, "
"this field shows the document model the relationship goes to."),
'sub_model_object_field': fields.many2one('ir.model.fields', 'Sub-field',
help="When a relationship field is selected as first field, "
"this field lets you select the target field within the "
"destination document model (sub-model)."),
'null_value': fields.char('Default Value', help="Optional value to use if the target field is empty"),
'copyvalue': fields.char('Placeholder Expression', help="Final placeholder expression, to be copy-pasted in the desired template field."),
}
_defaults = {
'auto_delete': True,
}
def create_action(self, cr, uid, ids, context=None):
action_obj = self.pool.get('ir.actions.act_window')
data_obj = self.pool.get('ir.model.data')
for template in self.browse(cr, uid, ids, context=context):
src_obj = template.model_id.model
model_data_id = data_obj._get_id(cr, uid, 'mail', 'email_compose_message_wizard_form')
res_id = data_obj.browse(cr, uid, model_data_id, context=context).res_id
button_name = _('Send Mail (%s)') % template.name
act_id = action_obj.create(cr, SUPERUSER_ID, {
'name': button_name,
'type': 'ir.actions.act_window',
'res_model': 'mail.compose.message',
'src_model': src_obj,
'view_type': 'form',
'context': "{'default_composition_mode': 'mass_mail', 'default_template_id' : %d, 'default_use_template': True}" % (template.id),
'view_mode':'form,tree',
'view_id': res_id,
'target': 'new',
'auto_refresh':1
}, context)
ir_values_id = self.pool.get('ir.values').create(cr, SUPERUSER_ID, {
'name': button_name,
'model': src_obj,
'key2': 'client_action_multi',
'value': "ir.actions.act_window,%s" % act_id,
'object': True,
}, context)
template.write({
'ref_ir_act_window': act_id,
'ref_ir_value': ir_values_id,
})
return True
def unlink_action(self, cr, uid, ids, context=None):
for template in self.browse(cr, uid, ids, context=context):
try:
if template.ref_ir_act_window:
self.pool.get('ir.actions.act_window').unlink(cr, SUPERUSER_ID, template.ref_ir_act_window.id, context)
if template.ref_ir_value:
ir_values_obj = self.pool.get('ir.values')
ir_values_obj.unlink(cr, SUPERUSER_ID, template.ref_ir_value.id, context)
except Exception:
raise osv.except_osv(_("Warning"), _("Deletion of the action record failed."))
return True
def unlink(self, cr, uid, ids, context=None):
self.unlink_action(cr, uid, ids, context=context)
return super(email_template, self).unlink(cr, uid, ids, context=context)
def copy(self, cr, uid, id, default=None, context=None):
template = self.browse(cr, uid, id, context=context)
if default is None:
default = {}
default = default.copy()
default.update(
name=_("%s (copy)") % (template.name),
ref_ir_act_window=False,
ref_ir_value=False)
return super(email_template, self).copy(cr, uid, id, default, context)
def build_expression(self, field_name, sub_field_name, null_value):
"""Returns a placeholder expression for use in a template field,
based on the values provided in the placeholder assistant.
:param field_name: main field name
:param sub_field_name: sub field name (M2O)
:param null_value: default value if the target value is empty
:return: final placeholder expression
"""
expression = ''
if field_name:
expression = "${object." + field_name
if sub_field_name:
expression += "." + sub_field_name
if null_value:
expression += " or '''%s'''" % null_value
expression += "}"
return expression
def onchange_sub_model_object_value_field(self, cr, uid, ids, model_object_field, sub_model_object_field=False, null_value=None, context=None):
result = {
'sub_object': False,
'copyvalue': False,
'sub_model_object_field': False,
'null_value': False
}
if model_object_field:
fields_obj = self.pool.get('ir.model.fields')
field_value = fields_obj.browse(cr, uid, model_object_field, context)
if field_value.ttype in ['many2one', 'one2many', 'many2many']:
res_ids = self.pool.get('ir.model').search(cr, uid, [('model', '=', field_value.relation)], context=context)
sub_field_value = False
if sub_model_object_field:
sub_field_value = fields_obj.browse(cr, uid, sub_model_object_field, context)
if res_ids:
result.update({
'sub_object': res_ids[0],
'copyvalue': self.build_expression(field_value.name, sub_field_value and sub_field_value.name or False, null_value or False),
'sub_model_object_field': sub_model_object_field or False,
'null_value': null_value or False
})
else:
result.update({
'copyvalue': self.build_expression(field_value.name, False, null_value or False),
'null_value': null_value or False
})
return {'value': result}
def generate_recipients_batch(self, cr, uid, results, template_id, res_ids, context=None):
"""Generates the recipients of the template. Default values can ben generated
instead of the template values if requested by template or context.
Emails (email_to, email_cc) can be transformed into partners if requested
in the context. """
if context is None:
context = {}
template = self.browse(cr, uid, template_id, context=context)
if template.use_default_to or context.get('tpl_force_default_to'):
ctx = dict(context, thread_model=template.model)
default_recipients = self.pool['mail.thread'].message_get_default_recipients(cr, uid, res_ids, context=ctx)
for res_id, recipients in default_recipients.iteritems():
results[res_id].pop('partner_to', None)
results[res_id].update(recipients)
for res_id, values in results.iteritems():
partner_ids = values.get('partner_ids', list())
if context and context.get('tpl_partners_only'):
mails = tools.email_split(values.pop('email_to', '')) + tools.email_split(values.pop('email_cc', ''))
for mail in mails:
partner_id = self.pool.get('res.partner').find_or_create(cr, uid, mail, context=context)
partner_ids.append(partner_id)
partner_to = values.pop('partner_to', '')
if partner_to:
# placeholders could generate '', 3, 2 due to some empty field values
tpl_partner_ids = [int(pid) for pid in partner_to.split(',') if pid]
partner_ids += self.pool['res.partner'].exists(cr, SUPERUSER_ID, tpl_partner_ids, context=context)
results[res_id]['partner_ids'] = partner_ids
return results
def generate_email_batch(self, cr, uid, template_id, res_ids, context=None, fields=None):
"""Generates an email from the template for given the given model based on
records given by res_ids.
:param template_id: id of the template to render.
:param res_id: id of the record to use for rendering the template (model
is taken from template definition)
:returns: a dict containing all relevant fields for creating a new
mail.mail entry, with one extra key ``attachments``, in the
format [(report_name, data)] where data is base64 encoded.
"""
if context is None:
context = {}
if fields is None:
fields = ['subject', 'body_html', 'email_from', 'email_to', 'partner_to', 'email_cc', 'reply_to']
report_xml_pool = self.pool.get('ir.actions.report.xml')
res_ids_to_templates = self.get_email_template_batch(cr, uid, template_id, res_ids, context)
# templates: res_id -> template; template -> res_ids
templates_to_res_ids = {}
for res_id, template in res_ids_to_templates.iteritems():
templates_to_res_ids.setdefault(template, []).append(res_id)
results = dict()
for template, template_res_ids in templates_to_res_ids.iteritems():
# generate fields value for all res_ids linked to the current template
for field in fields:
generated_field_values = self.render_template_batch(
cr, uid, getattr(template, field), template.model, template_res_ids,
post_process=(field == 'body_html'),
context=context)
for res_id, field_value in generated_field_values.iteritems():
results.setdefault(res_id, dict())[field] = field_value
# compute recipients
results = self.generate_recipients_batch(cr, uid, results, template.id, template_res_ids, context=context)
# update values for all res_ids
for res_id in template_res_ids:
values = results[res_id]
# body: add user signature, sanitize
if 'body_html' in fields and template.user_signature:
signature = self.pool.get('res.users').browse(cr, uid, uid, context).signature
values['body_html'] = tools.append_content_to_html(values['body_html'], signature)
if values.get('body_html'):
values['body'] = tools.html_sanitize(values['body_html'])
# technical settings
values.update(
mail_server_id=template.mail_server_id.id or False,
auto_delete=template.auto_delete,
model=template.model,
res_id=res_id or False,
attachment_ids=[attach.id for attach in template.attachment_ids],
)
# Add report in attachments: generate once for all template_res_ids
if template.report_template:
for res_id in template_res_ids:
attachments = []
report_name = self.render_template(cr, uid, template.report_name, template.model, res_id, context=context)
report = report_xml_pool.browse(cr, uid, template.report_template.id, context)
report_service = report.report_name
# Ensure report is rendered using template's language
ctx = context.copy()
if template.lang:
ctx['lang'] = self.render_template_batch(cr, uid, template.lang, template.model, [res_id], context)[res_id] # take 0 ?
if report.report_type in ['qweb-html', 'qweb-pdf']:
result, format = self.pool['report'].get_pdf(cr, uid, [res_id], report_service, context=ctx), 'pdf'
else:
result, format = openerp.report.render_report(cr, uid, [res_id], report_service, {'model': template.model}, ctx)
# TODO in trunk, change return format to binary to match message_post expected format
result = base64.b64encode(result)
if not report_name:
report_name = 'report.' + report_service
ext = "." + format
if not report_name.endswith(ext):
report_name += ext
attachments.append((report_name, result))
results[res_id]['attachments'] = attachments
return results
def send_mail(self, cr, uid, template_id, res_id, force_send=False, raise_exception=False, context=None):
"""Generates a new mail message for the given template and record,
and schedules it for delivery through the ``mail`` module's scheduler.
:param int template_id: id of the template to render
:param int res_id: id of the record to render the template with
(model is taken from the template)
:param bool force_send: if True, the generated mail.message is
immediately sent after being created, as if the scheduler
was executed for this message only.
:returns: id of the mail.message that was created
"""
if context is None:
context = {}
mail_mail = self.pool.get('mail.mail')
ir_attachment = self.pool.get('ir.attachment')
# create a mail_mail based on values, without attachments
values = self.generate_email(cr, uid, template_id, res_id, context=context)
if not values.get('email_from'):
raise osv.except_osv(_('Warning!'), _("Sender email is missing or empty after template rendering. Specify one to deliver your message"))
values['recipient_ids'] = [(4, pid) for pid in values.get('partner_ids', list())]
attachment_ids = values.pop('attachment_ids', [])
attachments = values.pop('attachments', [])
msg_id = mail_mail.create(cr, uid, values, context=context)
mail = mail_mail.browse(cr, uid, msg_id, context=context)
# manage attachments
for attachment in attachments:
attachment_data = {
'name': attachment[0],
'datas_fname': attachment[0],
'datas': attachment[1],
'res_model': 'mail.message',
'res_id': mail.mail_message_id.id,
}
context.pop('default_type', None)
attachment_ids.append(ir_attachment.create(cr, uid, attachment_data, context=context))
if attachment_ids:
values['attachment_ids'] = [(6, 0, attachment_ids)]
mail_mail.write(cr, uid, msg_id, {'attachment_ids': [(6, 0, attachment_ids)]}, context=context)
if force_send:
mail_mail.send(cr, uid, [msg_id], raise_exception=raise_exception, context=context)
return msg_id
# Compatibility method
def render_template(self, cr, uid, template, model, res_id, context=None):
return self.render_template_batch(cr, uid, template, model, [res_id], context)[res_id]
def get_email_template(self, cr, uid, template_id=False, record_id=None, context=None):
return self.get_email_template_batch(cr, uid, template_id, [record_id], context)[record_id]
def generate_email(self, cr, uid, template_id, res_id, context=None):
return self.generate_email_batch(cr, uid, template_id, [res_id], context)[res_id]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
rajsadho/django | refs/heads/master | django/db/backends/mysql/compiler.py | 691 | from django.db.models.sql import compiler
class SQLCompiler(compiler.SQLCompiler):
def as_subquery_condition(self, alias, columns, compiler):
qn = compiler.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
sql, params = self.as_sql()
return '(%s) IN (%s)' % (', '.join('%s.%s' % (qn(alias), qn2(column)) for column in columns), sql), params
class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):
pass
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler):
pass
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
pass
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
pass
|
Integral-Technology-Solutions/ConfigNOW-4.3 | refs/heads/master | Lib/pawt/swing.py | 5 | """
A hack to make pawt.swing point to the java swing library.
This allows code which imports pawt.swing to work on both JDK1.1 and 1.2
"""
swing = None
try:
import javax.swing.Icon
from javax import swing
except (ImportError, AttributeError):
try:
import java.awt.swing.Icon
from java.awt import swing
except (ImportError, AttributeError):
try:
import com.sun.java.swing.Icon
from com.sun.java import swing
except (ImportError, AttributeError):
raise ImportError, 'swing not defined in javax.swing or java.awt.swing or com.sun.java.swing'
import sys
def test(panel, size=None, name='Swing Tester'):
f = swing.JFrame(name, windowClosing=lambda event: sys.exit(0))
if hasattr(panel, 'init'):
panel.init()
f.contentPane.add(panel)
f.pack()
if size is not None:
from java import awt
f.setSize(apply(awt.Dimension, size))
f.setVisible(1)
return f
if swing is not None:
import pawt, sys
pawt.swing = swing
sys.modules['pawt.swing'] = swing
swing.__dict__['test'] = test
#These two lines help out jpythonc to figure out this very strange module
swing.__dict__['__file__'] = __file__
swing.__dict__['__jpythonc_name__'] = 'pawt.swing'
|
makielab/django-oscar | refs/heads/master | oscar/views/__init__.py | 2 | from django.shortcuts import render
def handler403(request):
return render(request, '403.html', status=403)
def handler404(request):
return render(request, '404.html', status=404)
def handler500(request):
return render(request, '500.html', status=500)
|
mikehulluk/morphforge | refs/heads/master | src/morphforge/componentlibraries/morphologylibrary.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# Copyright (c) 2012 Michael Hull.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------
class MorphologyLibrary(object):
_morphology_functors = dict()
@classmethod
def register_morphology(cls, modelsrc, celltype, morph_functor):
key = (modelsrc, celltype)
assert not key in cls._morphology_functors
cls._morphology_functors[key] = morph_functor
@classmethod
def get_morphology_functor(cls, celltype, modelsrc=None):
return cls._morphology_functors[(modelsrc, celltype)]
@classmethod
def get_morphology(cls, celltype, modelsrc=None, **kwargs):
functor = cls._morphology_functors[(modelsrc, celltype)]
return functor(**kwargs)
@classmethod
def summary_table(cls, ):
import mredoc
summary_data = []
for ((modelsrc,celltype), functor) in sorted(cls._morphology_functors.iteritems()):
summary_data.append( ( modelsrc, celltype ))# , functor.__file__)
summary_table = mredoc.VerticalColTable( ('Model','CellType'), summary_data)
return mredoc.Section('Cell Library Summary', summary_table )
|
atosatto/ansible | refs/heads/devel | lib/ansible/module_utils/known_hosts.py | 46 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <[email protected]>, 2012-2013
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import hmac
import re
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
try:
from hashlib import sha1
except ImportError:
import sha as sha1
HASHED_KEY_MAGIC = "|1|"
def add_git_host_key(module, url, accept_hostkey=True, create_dir=True):
""" idempotently add a git url hostkey """
if is_ssh_url(url):
fqdn, port = get_fqdn_and_port(url)
if fqdn:
known_host = check_hostkey(module, fqdn)
if not known_host:
if accept_hostkey:
rc, out, err = add_host_key(module, fqdn, port=port, create_dir=create_dir)
if rc != 0:
module.fail_json(msg="failed to add %s hostkey: %s" % (fqdn, out + err))
else:
module.fail_json(msg="%s has an unknown hostkey. Set accept_hostkey to True "
"or manually add the hostkey prior to running the git module" % fqdn)
def is_ssh_url(url):
""" check if url is ssh """
if "@" in url and "://" not in url:
return True
for scheme in "ssh://", "git+ssh://", "ssh+git://":
if url.startswith(scheme):
return True
return False
def get_fqdn_and_port(repo_url):
""" chop the hostname and port out of a url """
fqdn = None
port = None
ipv6_re = re.compile('(\[[^]]*\])(?::([0-9]+))?')
if "@" in repo_url and "://" not in repo_url:
# most likely an user@host:path or user@host/path type URL
repo_url = repo_url.split("@", 1)[1]
match = ipv6_re.match(repo_url)
# For this type of URL, colon specifies the path, not the port
if match:
fqdn, path = match.groups()
elif ":" in repo_url:
fqdn = repo_url.split(":")[0]
elif "/" in repo_url:
fqdn = repo_url.split("/")[0]
elif "://" in repo_url:
# this should be something we can parse with urlparse
parts = urlparse.urlparse(repo_url)
# parts[1] will be empty on python2.4 on ssh:// or git:// urls, so
# ensure we actually have a parts[1] before continuing.
if parts[1] != '':
fqdn = parts[1]
if "@" in fqdn:
fqdn = fqdn.split("@", 1)[1]
match = ipv6_re.match(fqdn)
if match:
fqdn, port = match.groups()
elif ":" in fqdn:
fqdn, port = fqdn.split(":")[0:2]
return fqdn, port
def check_hostkey(module, fqdn):
return not not_in_host_file(module, fqdn)
# this is a variant of code found in connection_plugins/paramiko.py and we should modify
# the paramiko code to import and use this.
def not_in_host_file(self, host):
if 'USER' in os.environ:
user_host_file = os.path.expandvars("~${USER}/.ssh/known_hosts")
else:
user_host_file = "~/.ssh/known_hosts"
user_host_file = os.path.expanduser(user_host_file)
host_file_list = []
host_file_list.append(user_host_file)
host_file_list.append("/etc/ssh/ssh_known_hosts")
host_file_list.append("/etc/ssh/ssh_known_hosts2")
host_file_list.append("/etc/openssh/ssh_known_hosts")
hfiles_not_found = 0
for hf in host_file_list:
if not os.path.exists(hf):
hfiles_not_found += 1
continue
try:
host_fh = open(hf)
except IOError:
hfiles_not_found += 1
continue
else:
data = host_fh.read()
host_fh.close()
for line in data.split("\n"):
if line is None or " " not in line:
continue
tokens = line.split()
if tokens[0].find(HASHED_KEY_MAGIC) == 0:
# this is a hashed known host entry
try:
(kn_salt,kn_host) = tokens[0][len(HASHED_KEY_MAGIC):].split("|",2)
hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1)
hash.update(host)
if hash.digest() == kn_host.decode('base64'):
return False
except:
# invalid hashed host key, skip it
continue
else:
# standard host file entry
if host in tokens[0]:
return False
return True
def add_host_key(module, fqdn, port=22, key_type="rsa", create_dir=False):
""" use ssh-keyscan to add the hostkey """
keyscan_cmd = module.get_bin_path('ssh-keyscan', True)
if 'USER' in os.environ:
user_ssh_dir = os.path.expandvars("~${USER}/.ssh/")
user_host_file = os.path.expandvars("~${USER}/.ssh/known_hosts")
else:
user_ssh_dir = "~/.ssh/"
user_host_file = "~/.ssh/known_hosts"
user_ssh_dir = os.path.expanduser(user_ssh_dir)
if not os.path.exists(user_ssh_dir):
if create_dir:
try:
os.makedirs(user_ssh_dir, int('700', 8))
except:
module.fail_json(msg="failed to create host key directory: %s" % user_ssh_dir)
else:
module.fail_json(msg="%s does not exist" % user_ssh_dir)
elif not os.path.isdir(user_ssh_dir):
module.fail_json(msg="%s is not a directory" % user_ssh_dir)
if port:
this_cmd = "%s -t %s -p %s %s" % (keyscan_cmd, key_type, port, fqdn)
else:
this_cmd = "%s -t %s %s" % (keyscan_cmd, key_type, fqdn)
rc, out, err = module.run_command(this_cmd)
# ssh-keyscan gives a 0 exit code and prints nothins on timeout
if rc != 0 or not out:
module.fail_json(msg='failed to get the hostkey for %s' % fqdn)
module.append_to_file(user_host_file, out)
return rc, out, err
|
glove747/liberty-neutron | refs/heads/master | neutron/tests/unit/agent/linux/test_ovsdb_monitor.py | 18 | # Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron.agent.common import ovs_lib
from neutron.agent.linux import ovsdb_monitor
from neutron.tests import base
class TestOvsdbMonitor(base.BaseTestCase):
def test___init__(self):
ovsdb_monitor.OvsdbMonitor('Interface')
def test___init___with_columns(self):
columns = ['col1', 'col2']
with mock.patch(
'neutron.agent.linux.async_process.AsyncProcess.__init__') as init:
ovsdb_monitor.OvsdbMonitor('Interface', columns=columns)
cmd = init.call_args_list[0][0][0]
self.assertEqual('col1,col2', cmd[-1])
def test___init___with_format(self):
with mock.patch(
'neutron.agent.linux.async_process.AsyncProcess.__init__') as init:
ovsdb_monitor.OvsdbMonitor('Interface', format='blob')
cmd = init.call_args_list[0][0][0]
self.assertEqual('--format=blob', cmd[-1])
class TestSimpleInterfaceMonitor(base.BaseTestCase):
def setUp(self):
super(TestSimpleInterfaceMonitor, self).setUp()
self.monitor = ovsdb_monitor.SimpleInterfaceMonitor()
def test_has_updates_is_false_if_active_with_no_output(self):
target = ('neutron.agent.linux.ovsdb_monitor.SimpleInterfaceMonitor'
'.is_active')
with mock.patch(target, return_value=True):
self.assertFalse(self.monitor.has_updates)
def test_has_updates_after_calling_get_events_is_false(self):
with mock.patch.object(
self.monitor, 'process_events') as process_events:
self.monitor.new_events = {'added': ['foo'], 'removed': ['foo1']}
self.assertTrue(self.monitor.has_updates)
self.monitor.get_events()
self.assertTrue(process_events.called)
self.assertFalse(self.monitor.has_updates)
def process_event_unassigned_of_port(self):
output = '{"data":[["e040fbec-0579-4990-8324-d338da33ae88","insert",'
output += '"m50",["set",[]],["map",[]]]],"headings":["row","action",'
output += '"name","ofport","external_ids"]}'
with mock.patch.object(
self.monitor, 'iter_stdout', return_value=[output]):
self.monitor.process_events()
self.assertEqual(self.monitor.new_events['added'][0]['ofport'],
ovs_lib.UNASSIGNED_OFPORT)
|
Mazecreator/tensorflow | refs/heads/master | tensorflow/python/kernel_tests/neon_depthwise_conv_op_test.py | 57 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for neon kernel for depthwise convolutional operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import nn_impl
from tensorflow.python.ops import nn_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
def ConfigsToTest():
"""Iterator for different convolution shapes, strides and paddings.
Yields:
Tuple (input_size, filter_size, out_size, stride, padding), the depthwise
convolution parameters.
"""
input_sizes = [[4, 5, 5, 48], [4, 8, 8, 84], [4, 17, 17, 48], [4, 35, 35, 2],
[4, 147, 147, 2], [3, 299, 299, 3], [5, 183, 183, 1]]
filter_sizes = [[1, 1, 48, 2], [1, 3, 84, 1], [3, 1, 48, 4], [5, 5, 2, 1],
[3, 3, 2, 8], [2, 2, 3, 8], [5, 5, 1, 2]]
out_sizes = [[4, 5, 5, 96], [4, 8, 8, 84], [4, 17, 17, 192], [4, 35, 35, 2],
[4, 49, 49, 16], [3, 150, 150, 24], [5, 92, 92, 2]]
strides = [1, 1, 1, 1, 3, 2, 2]
# pylint: disable=invalid-name
VALID = "VALID"
SAME = "SAME"
# pylint: enable=invalid-name
paddings = [SAME, SAME, SAME, SAME, VALID, SAME, SAME, SAME]
for i, f, o, s, p in zip(input_sizes, filter_sizes, out_sizes, strides,
paddings):
yield i, f, o, s, p
def CheckGradConfigsToTest():
"""Iterator for different convolution shapes, strides and paddings.
compute_gradient_error() is very expensive. So the configs should be
relatively small.
Yields:
Tuple (input_size, filter_size, out_size, stride, padding), the depthwise
convolution parameters.
"""
input_sizes = [[2, 5, 8, 1], [4, 5, 5, 1], [2, 4, 4, 2], [1, 15, 15, 2],
[2, 15, 16, 1]]
filter_sizes = [[4, 4, 1, 2], [2, 2, 1, 2], [3, 1, 2, 2], [1, 3, 2, 1],
[3, 3, 1, 2]]
out_sizes = [[2, 5, 8, 2], [4, 2, 2, 2], [2, 4, 4, 4], [1, 15, 15, 2],
[2, 5, 5, 2]]
strides = [1, 2, 1, 1, 3]
# pylint: disable=invalid-name
VALID = "VALID"
SAME = "SAME"
# pylint: enable=invalid-name
paddings = [SAME, VALID, SAME, SAME, VALID]
for i, f, o, s, p in zip(input_sizes, filter_sizes, out_sizes, strides,
paddings):
yield i, f, o, s, p
class DepthwiseConv2DTest(test.TestCase):
# This is testing that depthwise_conv2d and depthwise_conv2d_native
# produce the same results. It also tests that NCHW and NWHC
# formats agree, by comparing the depthwise_conv2d_native with
# 'NCHW' format (with transposition) matches the 'NHWC' format using
# the higher level interface.
def _VerifyValues(self,
tensor_in_sizes,
filter_in_sizes,
stride,
padding,
use_gpu,
data_format="NHWC"):
"""Verifies the output values of the convolution function.
Args:
tensor_in_sizes: Input tensor dimensions in
[batch, input_rows, input_cols, input_depth].
filter_in_sizes: Filter tensor dimensions in
[filter_rows, filter_cols, input_depth, depth_multiplier].
stride: Stride.
padding: Padding type.
use_gpu: Whether to use GPU.
data_format: The data_format of the input. "NHWC" or "NCHW".
"""
total_size_1 = 1
total_size_2 = 1
for s in tensor_in_sizes:
total_size_1 *= s
for s in filter_in_sizes:
total_size_2 *= s
# Initializes the input and filter tensor with numbers incrementing from 1.
x1 = [f * 1.0 for f in range(1, total_size_1 + 1)]
x2 = [f * 1.0 for f in range(1, total_size_2 + 1)]
with self.test_session(use_gpu=use_gpu) as sess:
with sess.graph._kernel_label_map({"DepthwiseConv2dNative": "neon"}):
t1 = constant_op.constant(x1, shape=tensor_in_sizes)
t1.set_shape(tensor_in_sizes)
t2 = constant_op.constant(x2, shape=filter_in_sizes)
native_t1 = t1
strides = [1, stride, stride, 1]
if data_format == "NCHW":
# Transpose from NWHC input to NCHW
# Ex. [4, 5, 5, 48] to [4, 48, 5, 5]
native_t1 = array_ops.transpose(t1, [0, 3, 1, 2])
strides = [1, 1, stride, stride]
conv_native = nn_ops.depthwise_conv2d_native(
native_t1,
t2,
strides=strides,
data_format=data_format,
padding=padding)
if data_format == "NCHW":
# Transpose back from NCHW to NHWC
conv_native = array_ops.transpose(conv_native, [0, 2, 3, 1])
conv_interface = nn_impl.depthwise_conv2d(
t1, t2, strides=[1, stride, stride, 1], padding=padding)
native_result = sess.run(conv_native)
interface_result = sess.run(conv_interface)
print("depthwise conv_2d: ", tensor_in_sizes, "*", filter_in_sizes,
", stride:", stride, ", padding: ", padding, ", max diff: ",
np.amax(np.absolute(native_result - interface_result)))
self.assertArrayNear(
np.ravel(native_result), np.ravel(interface_result), 1e-5)
self.assertShapeEqual(native_result, conv_native)
self.assertShapeEqual(native_result, conv_interface)
def testDepthwiseConv2D(self):
for index, (input_size, filter_size, _, stride,
padding) in enumerate(ConfigsToTest()):
print("Processing ", index, "th config.")
if index == 2:
self._VerifyValues(
input_size, filter_size, stride, padding, use_gpu=True)
self._VerifyValues(
input_size, filter_size, stride, padding, use_gpu=False)
def testDepthwiseConv2DFormat(self):
if not test.is_gpu_available():
return
for index, (input_size, filter_size, _, stride,
padding) in enumerate(ConfigsToTest()):
print("Processing ", index, "th config.")
self._VerifyValues(
input_size,
filter_size,
stride,
padding,
use_gpu=True,
data_format="NCHW")
# This is testing against hand calculated results.
def _VerifyHandValues(self, tensor_in_sizes, filter_in_sizes, stride, padding,
expected, use_gpu):
"""Verifies the output values of the depthwise convolution function.
Args:
tensor_in_sizes: Input tensor dimensions in
[batch, input_rows, input_cols, input_depth].
filter_in_sizes: Filter tensor dimensions in
[filter_rows, filter_cols, input_depth, depth_multiplier].
stride: Stride.
padding: Padding type.
expected: An array containing the expected operation outputs.
use_gpu: Whether to use GPU.
"""
total_size_1 = 1
total_size_2 = 1
for s in tensor_in_sizes:
total_size_1 *= s
for s in filter_in_sizes:
total_size_2 *= s
# Initializes the input tensor with array containing incrementing
# numbers from 1.
x1 = [f * 1.0 for f in range(1, total_size_1 + 1)]
x2 = [f * 1.0 for f in range(1, total_size_2 + 1)]
with self.test_session(use_gpu=use_gpu) as sess:
with sess.graph._kernel_label_map({"DepthwiseConv2dNative": "neon"}):
t1 = constant_op.constant(x1, shape=tensor_in_sizes)
t1.set_shape(tensor_in_sizes)
t2 = constant_op.constant(x2, shape=filter_in_sizes)
conv = nn_ops.depthwise_conv2d_native(
t1, t2, strides=[1, stride, stride, 1], padding=padding)
value = sess.run(conv)
print("value = ", value)
self.assertArrayNear(expected, np.ravel(value), 1e-5)
self.assertShapeEqual(value, conv)
def testConv2D2x2Filter(self):
# The inputs look like this (it's a 3 x 2 matrix, each of depth 2):
#
# [ (1.0, 2.0), (3.0, 4.0), ( 5.0, 6.0) ]
# [ (7.0, 8.0), (9.0, 10.0), (11.0, 12.0) ]
# We can view this as two inputs
#
# input depth 0:
#
# [ 1.0, 3.0, 5.0 ]
# [ 7.0, 9.0, 11.0 ]
#
# input depth 1:
#
# [ 2.0, 4.0, 6.0 ]
# [ 8.0, 10.0, 12.0 ]
#
# The filter looks like this (it has two 2 x 2 patches, each generating 2
# depths):
#
# filter #0:
#
# [ (1.0, 3.0), ( 5.0, 7.0)]
# [ (9.0, 11.0), (13.0, 15.0)]
#
# filter #1:
#
# [ ( 2.0, 4.0), ( 6.0, 8.0)]
# [ (10.0, 12.0), (14.0, 16.0)]
#
# So the outputs are:
#
# (position 0, 0: in_depth 0, output_depth 0 -- using filter #0)
# 1.0 * 1.0 + 7.0 * 9.0 + 3.0 * 5.0 + 9.0 * 13.0 = 196
# (position 0, 0: in_depth 0, output_depth 1 -- using filter #1)
# 1.0 * 2.0 + 7.0 * 10.0 + 3.0 * 6.0 + 9.0 * 14.0 = 216
# (position 0, 0: in_depth 1, output_depth 2 -- using filter #0)
# 2.0 * 3.0 + 8.0 * 11.0 + 4.0 * 7.0 + 10.0 * 15.0 = 272
# (position 0, 0: in_depth 1, output_depth 3 -- using filter #1)
# 2.0 * 4.0 + 8.0 * 12.0 + 4.0 * 8.0 + 10.0 * 16.0 = 296
#
# (position 1, 0: in_depth 0, output_depth 0 -- using filter #0)
# 3.0 * 1.0 + 9.0 * 9.0 + 5.0 * 5.0 + 11.0 * 13.0 = 252
# (position 1, 0: in_depth 0, output_depth 1 -- using filter #1)
# 3.0 * 2.0 + 9.0 * 10.0 + 5.0 * 6.0 + 11.0 * 14.0 = 280
# (position 1, 0: in_depth 1, output_depth 2 -- using filter #0)
# 4.0 * 3.0 + 10.0 * 11.0 + 6.0 * 7.0 + 12.0 * 15.0 = 344
# (position 1, 0: in_depth 1, output_depth 3 -- using filter #1)
# 4.0 * 4.0 + 10.0 * 12.0 + 6.0 * 8.0 + 12.0 * 16.0 = 376
expected_output = [196, 216, 272, 296, 252, 280, 344, 376]
self._VerifyHandValues(
tensor_in_sizes=[1, 2, 3, 2],
filter_in_sizes=[2, 2, 2, 2],
stride=1,
padding="VALID",
expected=expected_output,
use_gpu=False)
self._VerifyHandValues(
tensor_in_sizes=[1, 2, 3, 2],
filter_in_sizes=[2, 2, 2, 2],
stride=1,
padding="VALID",
expected=expected_output,
use_gpu=True)
if __name__ == "__main__":
test.main()
|
wojons/rethinkdb | refs/heads/next | test/rql_test/connections/http_support/flask/module.py | 850 | # -*- coding: utf-8 -*-
"""
flask.module
~~~~~~~~~~~~
Implements a class that represents module blueprints.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
from .blueprints import Blueprint
def blueprint_is_module(bp):
"""Used to figure out if something is actually a module"""
return isinstance(bp, Module)
class Module(Blueprint):
"""Deprecated module support. Until Flask 0.6 modules were a different
name of the concept now available as blueprints in Flask. They are
essentially doing the same but have some bad semantics for templates and
static files that were fixed with blueprints.
.. versionchanged:: 0.7
Modules were deprecated in favor for blueprints.
"""
def __init__(self, import_name, name=None, url_prefix=None,
static_path=None, subdomain=None):
if name is None:
assert '.' in import_name, 'name required if package name ' \
'does not point to a submodule'
name = import_name.rsplit('.', 1)[1]
Blueprint.__init__(self, name, import_name, url_prefix=url_prefix,
subdomain=subdomain, template_folder='templates')
if os.path.isdir(os.path.join(self.root_path, 'static')):
self._static_folder = 'static'
|
fvpolpeta/devide | refs/heads/master | modules/vtk_basic/vtkSQLTableReader.py | 7 | # class generated by DeVIDE::createDeVIDEModuleFromVTKObject
from module_kits.vtk_kit.mixins import SimpleVTKClassModuleBase
import vtk
class vtkSQLTableReader(SimpleVTKClassModuleBase):
def __init__(self, module_manager):
SimpleVTKClassModuleBase.__init__(
self, module_manager,
vtk.vtkSQLTableReader(), 'Reading vtkSQLTable.',
(), ('vtkSQLTable',),
replaceDoc=True,
inputFunctions=None, outputFunctions=None)
|
ArtemZ/wal-e | refs/heads/master | setup.py | 3 | #!/usr/bin/env python
import os.path
import sys
# Version file managment scheme and graceful degredation for
# setuptools borrowed and adapted from GitPython.
try:
from setuptools import setup, find_packages
# Silence pyflakes
assert setup
assert find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
if sys.version_info < (2, 6):
raise RuntimeError('Python versions < 2.6 are not supported.')
# Utility function to read the contents of short files.
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
VERSION = read(os.path.join('wal_e', 'VERSION')).strip()
install_requires = [
l for l in read('requirements.txt').split('\n')
if l and not l.startswith('#')]
if sys.version_info < (2, 7):
install_requires.append('argparse>=0.8')
setup(
name="wal-e",
version=VERSION,
packages=find_packages(),
install_requires=install_requires,
# metadata for upload to PyPI
author="The WAL-E Contributors",
author_email="[email protected]",
maintainer="Daniel Farina",
maintainer_email="[email protected]",
description="Continuous Archiving for Postgres",
long_description=read('README.rst'),
classifiers=['Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Recovery Tools'],
platforms=['any'],
license="BSD",
keywords=("postgres postgresql database backup archive archiving s3 aws "
"openstack swift wabs azure wal shipping"),
url="https://github.com/wal-e/wal-e",
# Include the VERSION file
package_data={'wal_e': ['VERSION']},
# install
entry_points={'console_scripts': ['wal-e=wal_e.cmd:main']})
|
goksie/newfies-dialer | refs/heads/master | newfies/agent/permission.py | 4 | #
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
from rest_framework import permissions
class IsOwnerOrReadOnly(permissions.BasePermission):
"""
Custom permission to only allow owners of an object to edit it.
"""
def has_object_permission(self, request, view, obj):
# Read permissions are allowed to any request,
# so we'll always allow GET, HEAD or OPTIONS requests.
if request.method in permissions.SAFE_METHODS:
return True
# Write permissions are only allowed to the owner of the agent to change his password
return obj.id == request.user.id
|
Nschanche/AstroHackWeek2015 | refs/heads/master | day3-machine-learning/solutions/validation_curve.py | 7 | import matplotlib.pyplot as plt
from sklearn.svm import LinearSVC
from sklearn.neighbors import KNeighborsClassifier
from sklearn.learning_curve import validation_curve
cs = [0.00001, 0.0001, 0.001, 0.01, 0.1, 1, 10]
training_scores, test_scores = validation_curve(LinearSVC(), X, y,
param_name="C", param_range=cs)
plt.figure()
plot_validation_curve(range(7), training_scores, test_scores)
ks = range(10)
training_scores, test_scores = validation_curve(KNeighborsClassifier(), X, y,
param_name="n_neighbors", param_range=ks)
plt.figure()
plot_validation_curve(ks, training_scores, test_scores)
|
hasadna/django | refs/heads/master | django/utils/timesince.py | 79 | from __future__ import unicode_literals
import datetime
from django.utils.timezone import is_aware, utc
from django.utils.translation import ungettext, ugettext
def timesince(d, now=None, reversed=False):
"""
Takes two datetime objects and returns the time between d and now
as a nicely formatted string, e.g. "10 minutes". If d occurs after now,
then "0 minutes" is returned.
Units used are years, months, weeks, days, hours, and minutes.
Seconds and microseconds are ignored. Up to two adjacent units will be
displayed. For example, "2 weeks, 3 days" and "1 year, 3 months" are
possible outputs, but "2 weeks, 3 hours" and "1 year, 5 days" are not.
Adapted from
http://web.archive.org/web/20060617175230/http://blog.natbat.co.uk/archive/2003/Jun/14/time_since
"""
chunks = (
(60 * 60 * 24 * 365, lambda n: ungettext('year', 'years', n)),
(60 * 60 * 24 * 30, lambda n: ungettext('month', 'months', n)),
(60 * 60 * 24 * 7, lambda n : ungettext('week', 'weeks', n)),
(60 * 60 * 24, lambda n : ungettext('day', 'days', n)),
(60 * 60, lambda n: ungettext('hour', 'hours', n)),
(60, lambda n: ungettext('minute', 'minutes', n))
)
# Convert datetime.date to datetime.datetime for comparison.
if not isinstance(d, datetime.datetime):
d = datetime.datetime(d.year, d.month, d.day)
if now and not isinstance(now, datetime.datetime):
now = datetime.datetime(now.year, now.month, now.day)
if not now:
now = datetime.datetime.now(utc if is_aware(d) else None)
delta = (d - now) if reversed else (now - d)
# ignore microseconds
since = delta.days * 24 * 60 * 60 + delta.seconds
if since <= 0:
# d is in the future compared to now, stop processing.
return '0 ' + ugettext('minutes')
for i, (seconds, name) in enumerate(chunks):
count = since // seconds
if count != 0:
break
s = ugettext('%(number)d %(type)s') % {'number': count, 'type': name(count)}
if i + 1 < len(chunks):
# Now get the second item
seconds2, name2 = chunks[i + 1]
count2 = (since - (seconds * count)) // seconds2
if count2 != 0:
s += ugettext(', %(number)d %(type)s') % {'number': count2, 'type': name2(count2)}
return s
def timeuntil(d, now=None):
"""
Like timesince, but returns a string measuring the time until
the given time.
"""
return timesince(d, now, reversed=True)
|
asmodehn/filefinder2 | refs/heads/master | filefinder2/machinery.py | 1 | from __future__ import absolute_import, print_function
import sys
# Simple module replicating importlib.machinery API of importlib in python3
from ._fileloader2 import ModuleSpec
# BuiltinImporter Not Implemented
# FrozenImporter Not implemented
# WindowsRegistryFinder
try:
from importlib.machinery import (
SOURCE_SUFFIXES, BYTECODE_SUFFIXES, EXTENSION_SUFFIXES
)
except ImportError:
from ._fileloader2 import (
SOURCE_SUFFIXES_2, BYTECODE_SUFFIXES_2, EXTENSION_SUFFIXES_2
# Note some of these will be different than a full fledged python import implementation.
)
SOURCE_SUFFIXES = SOURCE_SUFFIXES_2
BYTECODE_SUFFIXES = BYTECODE_SUFFIXES_2
EXTENSION_SUFFIXES = EXTENSION_SUFFIXES_2
# Should manage multiple python version by itself
def get_supported_file_loaders():
from ._fileloader2 import get_supported_file_loaders_2
return get_supported_file_loaders_2()
def all_suffixes():
"""Returns a list of all recognized module suffixes for this process"""
return SOURCE_SUFFIXES + BYTECODE_SUFFIXES + EXTENSION_SUFFIXES
try:
# Trying to import all at once (since the class hierarchy is similar)
# I am not aware of any python implementation where we have one but not the two others...
from importlib.machinery import SourceFileLoader, SourcelessFileLoader, ExtensionFileLoader
except ImportError:
from ._fileloader2 import SourceFileLoader2
from ._fileloader2 import ImpFileLoader2
# to be compatible with py3 importlib
SourceFileLoader = SourceFileLoader2
SourcelessFileLoader = ImpFileLoader2
ExtensionFileLoader = ImpFileLoader2
# Because we need to set our classes at import time
# hint : have a look at the "enforce" subpkg if you want to use
# the wrapping classes even in python3
try:
from importlib.machinery import PathFinder as lib_pf
PathFinder = lib_pf
except ImportError:
from ._filefinder2 import PathFinder2
PathFinder = PathFinder2
try:
from importlib.machinery import FileFinder as lib_ff
FileFinder = lib_ff
# at import time we find the instantiated filefinder hook (because we know the index)
try: # DANGER : valid on python3 only ( and if imports haven't been modified previously )
ff_path_hook = sys.path_hooks[1]
except IndexError:
ff_path_hook = None
except ImportError:
from ._filefinder2 import FileFinder2
FileFinder = FileFinder2
ff_path_hook = FileFinder2.path_hook(*get_supported_file_loaders())
# def get_pathfinder_index_in_meta_hooks():
# return sys.meta_path.index(PathFinder)
#
#
# def get_filefinder_index_in_path_hooks():
# # Note the python version distinction is made at import time on ff_path_hook
# if ff_path_hook is None: # if it was not detected at first (pypy case)
# # then the index is the last one, ie the length
# idx = len(sys.path_hooks)
# else:
# try:
# idx = sys.path_hooks.index(ff_path_hook)
# except ValueError: # if not in list it means filefinder2 was not activated.
# # we should return the index of the original python filefinder or raise (we dont want to risk breaking imports)
# idx = sys.path_hooks.index(ff_path_hook_original)
#
# return idx
|
maikito26/context.surveillanceroom.togglepreview | refs/heads/master | default.py | 1 | import xbmc
if __name__ == "__main__":
# Toggle Preview
xbmc.executebuiltin('RunPlugin(plugin://plugin.video.surveillanceroom?action=toggle_preview)')
|
tmpgit/intellij-community | refs/heads/master | python/testData/refactoring/move/importFirstWithSlash/before/src/tmp.py | 166 | from file1 import function_1,\
function_2
function_1()
function_2() |
fredericlepied/ansible | refs/heads/devel | lib/ansible/modules/cloud/google/gce_snapshot.py | 8 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gce_snapshot
version_added: "2.3"
short_description: Create or destroy snapshots for GCE storage volumes
description:
- Manages snapshots for GCE instances. This module manages snapshots for
the storage volumes of a GCE compute instance. If there are multiple
volumes, each snapshot will be prepended with the disk name
options:
instance_name:
description:
- The GCE instance to snapshot
required: True
snapshot_name:
description:
- The name of the snapshot to manage
disks:
description:
- A list of disks to create snapshots for. If none is provided,
all of the volumes will be snapshotted
default: all
required: False
state:
description:
- Whether a snapshot should be C(present) or C(absent)
required: false
default: present
choices: [present, absent]
service_account_email:
description:
- GCP service account email for the project where the instance resides
required: true
credentials_file:
description:
- The path to the credentials file associated with the service account
required: true
project_id:
description:
- The GCP project ID to use
required: true
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.19.0"
author: Rob Wagner (@robwagner33)
'''
EXAMPLES = '''
- name: Create gce snapshot
gce_snapshot:
instance_name: example-instance
snapshot_name: example-snapshot
state: present
service_account_email: [email protected]
credentials_file: /path/to/credentials
project_id: project_name
delegate_to: localhost
- name: Delete gce snapshot
gce_snapshot:
instance_name: example-instance
snapshot_name: example-snapshot
state: absent
service_account_email: [email protected]
credentials_file: /path/to/credentials
project_id: project_name
delegate_to: localhost
# This example creates snapshots for only two of the available disks as
# disk0-example-snapshot and disk1-example-snapshot
- name: Create snapshots of specific disks
gce_snapshot:
instance_name: example-instance
snapshot_name: example-snapshot
state: present
disks:
- disk0
- disk1
service_account_email: [email protected]
credentials_file: /path/to/credentials
project_id: project_name
delegate_to: localhost
'''
RETURN = '''
snapshots_created:
description: List of newly created snapshots
returned: When snapshots are created
type: list
sample: "[disk0-example-snapshot, disk1-example-snapshot]"
snapshots_deleted:
description: List of destroyed snapshots
returned: When snapshots are deleted
type: list
sample: "[disk0-example-snapshot, disk1-example-snapshot]"
snapshots_existing:
description: List of snapshots that already existed (no-op)
returned: When snapshots were already present
type: list
sample: "[disk0-example-snapshot, disk1-example-snapshot]"
snapshots_absent:
description: List of snapshots that were already absent (no-op)
returned: When snapshots were already absent
type: list
sample: "[disk0-example-snapshot, disk1-example-snapshot]"
'''
try:
from libcloud.compute.types import Provider
_ = Provider.GCE
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.gce import gce_connect
def find_snapshot(volume, name):
'''
Check if there is a snapshot already created with the given name for
the passed in volume.
Args:
volume: A gce StorageVolume object to manage
name: The name of the snapshot to look for
Returns:
The VolumeSnapshot object if one is found
'''
found_snapshot = None
snapshots = volume.list_snapshots()
for snapshot in snapshots:
if name == snapshot.name:
found_snapshot = snapshot
return found_snapshot
def main():
module = AnsibleModule(
argument_spec=dict(
instance_name=dict(required=True),
snapshot_name=dict(required=True),
state=dict(choices=['present', 'absent'], default='present'),
disks=dict(default=None, type='list'),
service_account_email=dict(type='str'),
credentials_file=dict(type='path'),
project_id=dict(type='str')
)
)
if not HAS_LIBCLOUD:
module.fail_json(msg='libcloud with GCE support (0.19.0+) is required for this module')
gce = gce_connect(module)
instance_name = module.params.get('instance_name')
snapshot_name = module.params.get('snapshot_name')
disks = module.params.get('disks')
state = module.params.get('state')
json_output = dict(
changed=False,
snapshots_created=[],
snapshots_deleted=[],
snapshots_existing=[],
snapshots_absent=[]
)
snapshot = None
instance = gce.ex_get_node(instance_name, 'all')
instance_disks = instance.extra['disks']
for instance_disk in instance_disks:
disk_snapshot_name = snapshot_name
device_name = instance_disk['deviceName']
if disks is None or device_name in disks:
volume_obj = gce.ex_get_volume(device_name)
# If we have more than one disk to snapshot, prepend the disk name
if len(instance_disks) > 1:
disk_snapshot_name = device_name + "-" + disk_snapshot_name
snapshot = find_snapshot(volume_obj, disk_snapshot_name)
if snapshot and state == 'present':
json_output['snapshots_existing'].append(disk_snapshot_name)
elif snapshot and state == 'absent':
snapshot.destroy()
json_output['changed'] = True
json_output['snapshots_deleted'].append(disk_snapshot_name)
elif not snapshot and state == 'present':
volume_obj.snapshot(disk_snapshot_name)
json_output['changed'] = True
json_output['snapshots_created'].append(disk_snapshot_name)
elif not snapshot and state == 'absent':
json_output['snapshots_absent'].append(disk_snapshot_name)
module.exit_json(**json_output)
if __name__ == '__main__':
main()
|
Akasurde/pytest | refs/heads/master | testing/test_helpconfig.py | 13 | import pytest
def test_version(testdir, pytestconfig):
result = testdir.runpytest("--version")
assert result.ret == 0
#p = py.path.local(py.__file__).dirpath()
result.stderr.fnmatch_lines([
'*pytest*%s*imported from*' % (pytest.__version__, )
])
if pytestconfig.pluginmanager.list_plugin_distinfo():
result.stderr.fnmatch_lines([
"*setuptools registered plugins:",
"*at*",
])
def test_help(testdir):
result = testdir.runpytest("--help")
assert result.ret == 0
result.stdout.fnmatch_lines("""
*-v*verbose*
*setup.cfg*
*minversion*
*to see*markers*py.test --markers*
*to see*fixtures*py.test --fixtures*
""")
def test_hookvalidation_unknown(testdir):
testdir.makeconftest("""
def pytest_hello(xyz):
pass
""")
result = testdir.runpytest()
assert result.ret != 0
result.stderr.fnmatch_lines([
'*unknown hook*pytest_hello*'
])
def test_hookvalidation_optional(testdir):
testdir.makeconftest("""
import pytest
@pytest.hookimpl(optionalhook=True)
def pytest_hello(xyz):
pass
""")
result = testdir.runpytest()
assert result.ret == 0
def test_traceconfig(testdir):
result = testdir.runpytest("--traceconfig")
result.stdout.fnmatch_lines([
"*using*pytest*py*",
"*active plugins*",
])
def test_debug(testdir, monkeypatch):
result = testdir.runpytest_subprocess("--debug")
assert result.ret == 0
p = testdir.tmpdir.join("pytestdebug.log")
assert "pytest_sessionstart" in p.read()
def test_PYTEST_DEBUG(testdir, monkeypatch):
monkeypatch.setenv("PYTEST_DEBUG", "1")
result = testdir.runpytest_subprocess()
assert result.ret == 0
result.stderr.fnmatch_lines([
"*pytest_plugin_registered*",
"*manager*PluginManager*"
])
|
axbaretto/beam | refs/heads/master | sdks/python/.tox/lint/lib/python2.7/site-packages/isort/isort.py | 7 | """isort.py.
Exposes a simple library to sort through imports within Python code
usage:
SortImports(file_name)
or:
sorted = SortImports(file_contents=file_contents).output
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import copy
import io
import itertools
import os
import re
import sys
from collections import namedtuple
from datetime import datetime
from difflib import unified_diff
from fnmatch import fnmatch
from glob import glob
from sys import path as PYTHONPATH
from sys import stdout
from . import settings
from .natural import nsorted
from .pie_slice import *
KNOWN_SECTION_MAPPING = {
'STDLIB': 'STANDARD_LIBRARY',
'FUTURE': 'FUTURE_LIBRARY',
'FIRSTPARTY': 'FIRST_PARTY',
'THIRDPARTY': 'THIRD_PARTY',
}
class SortImports(object):
incorrectly_sorted = False
skipped = False
def __init__(self, file_path=None, file_contents=None, write_to_stdout=False, check=False,
show_diff=False, settings_path=None, ask_to_apply=False, **setting_overrides):
if not settings_path and file_path:
settings_path = os.path.dirname(os.path.abspath(file_path))
settings_path = settings_path or os.getcwd()
self.config = settings.from_path(settings_path).copy()
for key, value in itemsview(setting_overrides):
access_key = key.replace('not_', '').lower()
# The sections config needs to retain order and can't be converted to a set.
if access_key != 'sections' and type(self.config.get(access_key)) in (list, tuple):
if key.startswith('not_'):
self.config[access_key] = list(set(self.config[access_key]).difference(value))
else:
self.config[access_key] = list(set(self.config[access_key]).union(value))
else:
self.config[key] = value
if self.config.get('force_alphabetical_sort', False):
self.config.update({'force_alphabetical_sort_within_sections': True,
'no_sections': True,
'lines_between_types': 1,
'from_first': True})
indent = str(self.config['indent'])
if indent.isdigit():
indent = " " * int(indent)
else:
indent = indent.strip("'").strip('"')
if indent.lower() == "tab":
indent = "\t"
self.config['indent'] = indent
self.place_imports = {}
self.import_placements = {}
self.remove_imports = [self._format_simplified(removal) for removal in self.config.get('remove_imports', [])]
self.add_imports = [self._format_natural(addition) for addition in self.config.get('add_imports', [])]
self._section_comments = ["# " + value for key, value in itemsview(self.config) if
key.startswith('import_heading') and value]
self.file_encoding = 'utf-8'
file_name = file_path
self.file_path = file_path or ""
if file_path:
file_path = os.path.abspath(file_path)
if settings.should_skip(file_path, self.config):
self.skipped = True
if self.config['verbose']:
print("WARNING: {0} was skipped as it's listed in 'skip' setting"
" or matches a glob in 'skip_glob' setting".format(file_path))
file_contents = None
elif not file_contents:
self.file_path = file_path
self.file_encoding = coding_check(file_path)
with io.open(file_path, encoding=self.file_encoding) as file_to_import_sort:
file_contents = file_to_import_sort.read()
if file_contents is None or ("isort:" + "skip_file") in file_contents:
return
self.in_lines = file_contents.split("\n")
self.original_length = len(self.in_lines)
if (self.original_length > 1 or self.in_lines[:1] not in ([], [""])) or self.config.get('force_adds', False):
for add_import in self.add_imports:
self.in_lines.append(add_import)
self.number_of_lines = len(self.in_lines)
self.out_lines = []
self.comments = {'from': {}, 'straight': {}, 'nested': {}, 'above': {'straight': {}, 'from': {}}}
self.imports = {}
self.as_map = {}
section_names = self.config.get('sections')
self.sections = namedtuple('Sections', section_names)(*[name for name in section_names])
for section in itertools.chain(self.sections, self.config['forced_separate']):
self.imports[section] = {'straight': set(), 'from': {}}
self.index = 0
self.import_index = -1
self._first_comment_index_start = -1
self._first_comment_index_end = -1
self._parse()
if self.import_index != -1:
self._add_formatted_imports()
self.length_change = len(self.out_lines) - self.original_length
while self.out_lines and self.out_lines[-1].strip() == "":
self.out_lines.pop(-1)
self.out_lines.append("")
self.output = "\n".join(self.out_lines)
if self.config.get('atomic', False):
try:
compile(self._strip_top_comments(self.out_lines), self.file_path, 'exec', 0, 1)
except SyntaxError:
self.output = file_contents
self.incorrectly_sorted = True
try:
compile(self._strip_top_comments(self.in_lines), self.file_path, 'exec', 0, 1)
print("ERROR: {0} isort would have introduced syntax errors, please report to the project!". \
format(self.file_path))
except SyntaxError:
print("ERROR: {0} File contains syntax errors.".format(self.file_path))
return
if check:
if self.output.replace("\n", "").replace(" ", "") == file_contents.replace("\n", "").replace(" ", ""):
if self.config['verbose']:
print("SUCCESS: {0} Everything Looks Good!".format(self.file_path))
else:
print("ERROR: {0} Imports are incorrectly sorted.".format(self.file_path))
self.incorrectly_sorted = True
if show_diff or self.config.get('show_diff', False) is True:
self._show_diff(file_contents)
return
if show_diff or self.config.get('show_diff', False) is True:
self._show_diff(file_contents)
elif write_to_stdout:
stdout.write(self.output)
elif file_name:
if ask_to_apply:
if self.output == file_contents:
return
self._show_diff(file_contents)
answer = None
while answer not in ('yes', 'y', 'no', 'n', 'quit', 'q'):
answer = input("Apply suggested changes to '{0}' [y/n/q]?".format(self.file_path)).lower()
if answer in ('no', 'n'):
return
if answer in ('quit', 'q'):
sys.exit(1)
with io.open(self.file_path, encoding=self.file_encoding, mode='w') as output_file:
output_file.write(self.output)
def _show_diff(self, file_contents):
for line in unified_diff(
file_contents.splitlines(1),
self.output.splitlines(1),
fromfile=self.file_path + ':before',
tofile=self.file_path + ':after',
fromfiledate=str(datetime.fromtimestamp(os.path.getmtime(self.file_path))
if self.file_path else datetime.now()),
tofiledate=str(datetime.now())
):
stdout.write(line)
@staticmethod
def _strip_top_comments(lines):
"""Strips # comments that exist at the top of the given lines"""
lines = copy.copy(lines)
while lines and lines[0].startswith("#"):
lines = lines[1:]
return "\n".join(lines)
def place_module(self, module_name):
"""Tries to determine if a module is a python std import, third party import, or project code:
if it can't determine - it assumes it is project code
"""
for forced_separate in self.config['forced_separate']:
# Ensure all forced_separate patterns will match to end of string
path_glob = forced_separate
if not forced_separate.endswith('*'):
path_glob = '%s*' % forced_separate
if fnmatch(module_name, path_glob) or fnmatch(module_name, '.' + path_glob):
return forced_separate
if module_name.startswith("."):
return self.sections.LOCALFOLDER
# Try to find most specific placement instruction match (if any)
parts = module_name.split('.')
module_names_to_check = ['.'.join(parts[:first_k]) for first_k in range(len(parts), 0, -1)]
for module_name_to_check in module_names_to_check:
for placement in reversed(self.sections):
known_placement = KNOWN_SECTION_MAPPING.get(placement, placement)
config_key = 'known_{0}'.format(known_placement.lower())
if module_name_to_check in self.config.get(config_key, []):
return placement
paths = PYTHONPATH
virtual_env = self.config.get('virtual_env') or os.environ.get('VIRTUAL_ENV')
virtual_env_src = False
if virtual_env:
paths += [path for path in glob('{0}/lib/python*/site-packages'.format(virtual_env))
if path not in paths]
paths += [path for path in glob('{0}/src/*'.format(virtual_env)) if os.path.isdir(path)]
virtual_env_src = '{0}/src/'.format(virtual_env)
for prefix in paths:
module_path = "/".join((prefix, module_name.replace(".", "/")))
package_path = "/".join((prefix, module_name.split(".")[0]))
if (os.path.exists(module_path + ".py") or os.path.exists(module_path + ".so") or
(os.path.exists(package_path) and os.path.isdir(package_path))):
if ('site-packages' in prefix or 'dist-packages' in prefix or
(virtual_env and virtual_env_src in prefix)):
return self.sections.THIRDPARTY
elif 'python2' in prefix.lower() or 'python3' in prefix.lower():
return self.sections.STDLIB
else:
return self.config['default_section']
return self.config['default_section']
def _get_line(self):
"""Returns the current line from the file while incrementing the index."""
line = self.in_lines[self.index]
self.index += 1
return line
@staticmethod
def _import_type(line):
"""If the current line is an import line it will return its type (from or straight)"""
if "isort:skip" in line:
return
elif line.startswith('import '):
return "straight"
elif line.startswith('from '):
return "from"
def _at_end(self):
"""returns True if we are at the end of the file."""
return self.index == self.number_of_lines
@staticmethod
def _module_key(module_name, config, sub_imports=False, ignore_case=False):
prefix = ""
if ignore_case:
module_name = str(module_name).lower()
else:
module_name = str(module_name)
if sub_imports and config['order_by_type']:
if module_name.isupper() and len(module_name) > 1:
prefix = "A"
elif module_name[0:1].isupper():
prefix = "B"
else:
prefix = "C"
module_name = module_name.lower()
return "{0}{1}{2}".format(module_name in config['force_to_top'] and "A" or "B", prefix,
config['length_sort'] and (str(len(module_name)) + ":" + module_name) or module_name)
def _add_comments(self, comments, original_string=""):
"""
Returns a string with comments added
"""
return comments and "{0} # {1}".format(self._strip_comments(original_string)[0],
"; ".join(comments)) or original_string
def _wrap(self, line):
"""
Returns an import wrapped to the specified line-length, if possible.
"""
wrap_mode = self.config.get('multi_line_output', 0)
if len(line) > self.config['line_length'] and wrap_mode != settings.WrapModes.NOQA:
for splitter in ("import", "."):
exp = r"\b" + re.escape(splitter) + r"\b"
if re.search(exp, line) and not line.strip().startswith(splitter):
line_parts = re.split(exp, line)
next_line = []
while (len(line) + 2) > (self.config['wrap_length'] or self.config['line_length']) and line_parts:
next_line.append(line_parts.pop())
line = splitter.join(line_parts)
if not line:
line = next_line.pop()
cont_line = self._wrap(self.config['indent'] + splitter.join(next_line).lstrip())
if self.config['use_parentheses']:
return "{0}{1} (\n{2})".format(line, splitter, cont_line)
return "{0}{1} \\\n{2}".format(line, splitter, cont_line)
elif len(line) > self.config['line_length'] and wrap_mode == settings.WrapModes.NOQA:
if "# NOQA" not in line:
return "{0} # NOQA".format(line)
return line
def _add_straight_imports(self, straight_modules, section, section_output):
for module in straight_modules:
if module in self.remove_imports:
continue
if module in self.as_map:
import_definition = "import {0} as {1}".format(module, self.as_map[module])
else:
import_definition = "import {0}".format(module)
comments_above = self.comments['above']['straight'].pop(module, None)
if comments_above:
section_output.extend(comments_above)
section_output.append(self._add_comments(self.comments['straight'].get(module), import_definition))
def _add_from_imports(self, from_modules, section, section_output, ignore_case):
for module in from_modules:
if module in self.remove_imports:
continue
import_start = "from {0} import ".format(module)
from_imports = list(self.imports[section]['from'][module])
from_imports = nsorted(from_imports, key=lambda key: self._module_key(key, self.config, True, ignore_case))
if self.remove_imports:
from_imports = [line for line in from_imports if not "{0}.{1}".format(module, line) in
self.remove_imports]
for from_import in copy.copy(from_imports):
submodule = module + "." + from_import
import_as = self.as_map.get(submodule, False)
if import_as:
import_definition = "{0} as {1}".format(from_import, import_as)
if self.config['combine_as_imports'] and not ("*" in from_imports and
self.config['combine_star']):
from_imports[from_imports.index(from_import)] = import_definition
else:
import_statement = self._wrap(import_start + import_definition)
comments = self.comments['straight'].get(submodule)
import_statement = self._add_comments(comments, import_statement)
section_output.append(import_statement)
from_imports.remove(from_import)
if from_imports:
comments = self.comments['from'].pop(module, ())
if "*" in from_imports and self.config['combine_star']:
import_statement = self._wrap(self._add_comments(comments, "{0}*".format(import_start)))
elif self.config['force_single_line']:
import_statements = []
for from_import in from_imports:
single_import_line = self._add_comments(comments, import_start + from_import)
comment = self.comments['nested'].get(module, {}).pop(from_import, None)
if comment:
single_import_line += "{0} {1}".format(comments and ";" or " #", comment)
import_statements.append(self._wrap(single_import_line))
comments = None
import_statement = "\n".join(import_statements)
else:
star_import = False
if "*" in from_imports:
section_output.append(self._add_comments(comments, "{0}*".format(import_start)))
from_imports.remove('*')
star_import = True
comments = None
for from_import in copy.copy(from_imports):
comment = self.comments['nested'].get(module, {}).pop(from_import, None)
if comment:
single_import_line = self._add_comments(comments, import_start + from_import)
single_import_line += "{0} {1}".format(comments and ";" or " #", comment)
above_comments = self.comments['above']['from'].pop(module, None)
if above_comments:
section_output.extend(above_comments)
section_output.append(self._wrap(single_import_line))
from_imports.remove(from_import)
comments = None
if star_import:
import_statement = import_start + (", ").join(from_imports)
else:
import_statement = self._add_comments(comments, import_start + (", ").join(from_imports))
if not from_imports:
import_statement = ""
if len(from_imports) > 1 and (
len(import_statement) > self.config['line_length']
or self.config.get('force_grid_wrap')
):
output_mode = settings.WrapModes._fields[self.config.get('multi_line_output',
0)].lower()
formatter = getattr(self, "_output_" + output_mode, self._output_grid)
dynamic_indent = " " * (len(import_start) + 1)
indent = self.config['indent']
line_length = self.config['wrap_length'] or self.config['line_length']
import_statement = formatter(import_start, copy.copy(from_imports),
dynamic_indent, indent, line_length, comments)
if self.config['balanced_wrapping']:
lines = import_statement.split("\n")
line_count = len(lines)
if len(lines) > 1:
minimum_length = min([len(line) for line in lines[:-1]])
else:
minimum_length = 0
new_import_statement = import_statement
while (len(lines[-1]) < minimum_length and
len(lines) == line_count and line_length > 10):
import_statement = new_import_statement
line_length -= 1
new_import_statement = formatter(import_start, copy.copy(from_imports),
dynamic_indent, indent, line_length, comments)
lines = new_import_statement.split("\n")
elif len(import_statement) > self.config['line_length']:
import_statement = self._wrap(import_statement)
if import_statement:
above_comments = self.comments['above']['from'].pop(module, None)
if above_comments:
section_output.extend(above_comments)
section_output.append(import_statement)
def _add_formatted_imports(self):
"""Adds the imports back to the file.
(at the index of the first import) sorted alphabetically and split between groups
"""
sort_ignore_case = self.config.get('force_alphabetical_sort_within_sections', False)
sections = itertools.chain(self.sections, self.config['forced_separate'])
if self.config.get('no_sections', False):
self.imports['no_sections'] = {'straight': [], 'from': {}}
for section in sections:
self.imports['no_sections']['straight'].extend(self.imports[section].get('straight', []))
self.imports['no_sections']['from'].update(self.imports[section].get('from', {}))
sections = ('no_sections', )
output = []
for section in sections:
straight_modules = list(self.imports[section]['straight'])
straight_modules = nsorted(straight_modules, key=lambda key: self._module_key(key, self.config))
from_modules = sorted(list(self.imports[section]['from'].keys()))
from_modules = nsorted(from_modules, key=lambda key: self._module_key(key, self.config, ))
section_output = []
if self.config.get('from_first', False):
self._add_from_imports(from_modules, section, section_output, sort_ignore_case)
if self.config.get('lines_between_types', 0) and from_modules and straight_modules:
section_output.extend([''] * self.config['lines_between_types'])
self._add_straight_imports(straight_modules, section, section_output)
else:
self._add_straight_imports(straight_modules, section, section_output)
if self.config.get('lines_between_types', 0) and from_modules and straight_modules:
section_output.extend([''] * self.config['lines_between_types'])
self._add_from_imports(from_modules, section, section_output, sort_ignore_case)
if self.config.get('force_sort_within_sections', False):
def by_module(line):
line = re.sub('^from ', '', line)
line = re.sub('^import ', '', line)
if not self.config['order_by_type']:
line = line.lower()
return line
section_output = nsorted(section_output, key=by_module)
if section_output:
section_name = section
if section_name in self.place_imports:
self.place_imports[section_name] = section_output
continue
section_title = self.config.get('import_heading_' + str(section_name).lower(), '')
if section_title:
section_comment = "# {0}".format(section_title)
if not section_comment in self.out_lines[0:1]:
section_output.insert(0, section_comment)
output += section_output + ([''] * self.config['lines_between_sections'])
while [character.strip() for character in output[-1:]] == [""]:
output.pop()
output_at = 0
if self.import_index < self.original_length:
output_at = self.import_index
elif self._first_comment_index_end != -1 and self._first_comment_index_start <= 2:
output_at = self._first_comment_index_end
self.out_lines[output_at:0] = output
imports_tail = output_at + len(output)
while [character.strip() for character in self.out_lines[imports_tail: imports_tail + 1]] == [""]:
self.out_lines.pop(imports_tail)
if len(self.out_lines) > imports_tail:
next_construct = ""
self._in_quote = False
for line in self.out_lines[imports_tail:]:
if not self._skip_line(line) and not line.strip().startswith("#") and line.strip():
next_construct = line
break
if self.config['lines_after_imports'] != -1:
self.out_lines[imports_tail:0] = ["" for line in range(self.config['lines_after_imports'])]
elif next_construct.startswith("def") or next_construct.startswith("class") or \
next_construct.startswith("@"):
self.out_lines[imports_tail:0] = ["", ""]
else:
self.out_lines[imports_tail:0] = [""]
if self.place_imports:
new_out_lines = []
for index, line in enumerate(self.out_lines):
new_out_lines.append(line)
if line in self.import_placements:
new_out_lines.extend(self.place_imports[self.import_placements[line]])
if len(self.out_lines) <= index or self.out_lines[index + 1].strip() != "":
new_out_lines.append("")
self.out_lines = new_out_lines
def _output_grid(self, statement, imports, white_space, indent, line_length, comments):
statement += "(" + imports.pop(0)
while imports:
next_import = imports.pop(0)
next_statement = self._add_comments(comments, statement + ", " + next_import)
if len(next_statement.split("\n")[-1]) + 1 > line_length:
statement = (self._add_comments(comments, "{0},".format(statement)) +
"\n{0}{1}".format(white_space, next_import))
comments = None
else:
statement += ", " + next_import
return statement + ("," if self.config['include_trailing_comma'] else "") + ")"
def _output_vertical(self, statement, imports, white_space, indent, line_length, comments):
first_import = self._add_comments(comments, imports.pop(0) + ",") + "\n" + white_space
return "{0}({1}{2}{3})".format(
statement,
first_import,
(",\n" + white_space).join(imports),
"," if self.config['include_trailing_comma'] else "",
)
def _output_hanging_indent(self, statement, imports, white_space, indent, line_length, comments):
statement += imports.pop(0)
while imports:
next_import = imports.pop(0)
next_statement = self._add_comments(comments, statement + ", " + next_import)
if len(next_statement.split("\n")[-1]) + 3 > line_length:
next_statement = (self._add_comments(comments, "{0}, \\".format(statement)) +
"\n{0}{1}".format(indent, next_import))
comments = None
statement = next_statement
return statement
def _output_vertical_hanging_indent(self, statement, imports, white_space, indent, line_length, comments):
return "{0}({1}\n{2}{3}{4}\n)".format(
statement,
self._add_comments(comments),
indent,
(",\n" + indent).join(imports),
"," if self.config['include_trailing_comma'] else "",
)
def _output_vertical_grid_common(self, statement, imports, white_space, indent, line_length, comments):
statement += self._add_comments(comments, "(") + "\n" + indent + imports.pop(0)
while imports:
next_import = imports.pop(0)
next_statement = "{0}, {1}".format(statement, next_import)
if len(next_statement.split("\n")[-1]) + 1 > line_length:
next_statement = "{0},\n{1}{2}".format(statement, indent, next_import)
statement = next_statement
if self.config['include_trailing_comma']:
statement += ','
return statement
def _output_vertical_grid(self, statement, imports, white_space, indent, line_length, comments):
return self._output_vertical_grid_common(statement, imports, white_space, indent, line_length, comments) + ")"
def _output_vertical_grid_grouped(self, statement, imports, white_space, indent, line_length, comments):
return self._output_vertical_grid_common(statement, imports, white_space, indent, line_length, comments) + "\n)"
def _output_noqa(self, statement, imports, white_space, indent, line_length, comments):
retval = '{0}{1}'.format(statement, ', '.join(imports))
comment_str = ' '.join(comments)
if comments:
if len(retval) + 4 + len(comment_str) <= line_length:
return '{0} # {1}'.format(retval, comment_str)
else:
if len(retval) <= line_length:
return retval
if comments:
if "NOQA" in comments:
return '{0} # {1}'.format(retval, comment_str)
else:
return '{0} # NOQA {1}'.format(retval, comment_str)
else:
return '{0} # NOQA'.format(retval)
@staticmethod
def _strip_comments(line, comments=None):
"""Removes comments from import line."""
if comments is None:
comments = []
new_comments = False
comment_start = line.find("#")
if comment_start != -1:
comments.append(line[comment_start + 1:].strip())
new_comments = True
line = line[:comment_start]
return line, comments, new_comments
@staticmethod
def _format_simplified(import_line):
import_line = import_line.strip()
if import_line.startswith("from "):
import_line = import_line.replace("from ", "")
import_line = import_line.replace(" import ", ".")
elif import_line.startswith("import "):
import_line = import_line.replace("import ", "")
return import_line
@staticmethod
def _format_natural(import_line):
import_line = import_line.strip()
if not import_line.startswith("from ") and not import_line.startswith("import "):
if not "." in import_line:
return "import {0}".format(import_line)
parts = import_line.split(".")
end = parts.pop(-1)
return "from {0} import {1}".format(".".join(parts), end)
return import_line
def _skip_line(self, line):
skip_line = self._in_quote
if self.index == 1 and line.startswith("#"):
self._in_top_comment = True
return True
elif self._in_top_comment:
if not line.startswith("#"):
self._in_top_comment = False
self._first_comment_index_end = self.index
if '"' in line or "'" in line:
index = 0
if self._first_comment_index_start == -1 and (line.startswith('"') or line.startswith("'")):
self._first_comment_index_start = self.index
while index < len(line):
if line[index] == "\\":
index += 1
elif self._in_quote:
if line[index:index + len(self._in_quote)] == self._in_quote:
self._in_quote = False
if self._first_comment_index_end < self._first_comment_index_start:
self._first_comment_index_end = self.index
elif line[index] in ("'", '"'):
long_quote = line[index:index + 3]
if long_quote in ('"""', "'''"):
self._in_quote = long_quote
index += 2
else:
self._in_quote = line[index]
elif line[index] == "#":
break
index += 1
return skip_line or self._in_quote or self._in_top_comment
def _strip_syntax(self, import_string):
import_string = import_string.replace("_import", "[[i]]")
for remove_syntax in ['\\', '(', ')', ',']:
import_string = import_string.replace(remove_syntax, " ")
import_list = import_string.split()
for key in ('from', 'import'):
if key in import_list:
import_list.remove(key)
import_string = ' '.join(import_list)
import_string = import_string.replace("[[i]]", "_import")
return import_string.replace("{ ", "{|").replace(" }", "|}")
def _parse(self):
"""Parses a python file taking out and categorizing imports."""
self._in_quote = False
self._in_top_comment = False
while not self._at_end():
line = self._get_line()
statement_index = self.index
skip_line = self._skip_line(line)
if line in self._section_comments and not skip_line:
if self.import_index == -1:
self.import_index = self.index - 1
continue
if "isort:imports-" in line and line.startswith("#"):
section = line.split("isort:imports-")[-1].split()[0].upper()
self.place_imports[section] = []
self.import_placements[line] = section
if ";" in line:
for part in (part.strip() for part in line.split(";")):
if part and not part.startswith("from ") and not part.startswith("import "):
skip_line = True
import_type = self._import_type(line)
if not import_type or skip_line:
self.out_lines.append(line)
continue
for line in (line.strip() for line in line.split(";")):
import_type = self._import_type(line)
if not import_type:
self.out_lines.append(line)
continue
line = line.replace("\t", " ")
if self.import_index == -1:
self.import_index = self.index - 1
nested_comments = {}
import_string, comments, new_comments = self._strip_comments(line)
stripped_line = [part for part in self._strip_syntax(import_string).strip().split(" ") if part]
if import_type == "from" and len(stripped_line) == 2 and stripped_line[1] != "*" and new_comments:
nested_comments[stripped_line[-1]] = comments[0]
if "(" in line and not self._at_end():
while not line.strip().endswith(")") and not self._at_end():
line, comments, new_comments = self._strip_comments(self._get_line(), comments)
stripped_line = self._strip_syntax(line).strip()
if import_type == "from" and stripped_line and not " " in stripped_line and new_comments:
nested_comments[stripped_line] = comments[-1]
import_string += "\n" + line
else:
while line.strip().endswith("\\"):
line, comments, new_comments = self._strip_comments(self._get_line(), comments)
stripped_line = self._strip_syntax(line).strip()
if import_type == "from" and stripped_line and not " " in stripped_line and new_comments:
nested_comments[stripped_line] = comments[-1]
if import_string.strip().endswith(" import") or line.strip().startswith("import "):
import_string += "\n" + line
else:
import_string = import_string.rstrip().rstrip("\\") + line.lstrip()
if import_type == "from":
import_string = import_string.replace("import(", "import (")
parts = import_string.split(" import ")
from_import = parts[0].split(" ")
import_string = " import ".join([from_import[0] + " " + "".join(from_import[1:])] + parts[1:])
imports = [item.replace("{|", "{ ").replace("|}", " }") for item in
self._strip_syntax(import_string).split()]
if "as" in imports and (imports.index('as') + 1) < len(imports):
while "as" in imports:
index = imports.index('as')
if import_type == "from":
module = imports[0] + "." + imports[index - 1]
self.as_map[module] = imports[index + 1]
else:
module = imports[index - 1]
self.as_map[module] = imports[index + 1]
if not self.config['combine_as_imports']:
self.comments['straight'][module] = comments
comments = []
del imports[index:index + 2]
if import_type == "from":
import_from = imports.pop(0)
placed_module = self.place_module(import_from)
if placed_module == '':
print(
"WARNING: could not place module {0} of line {1} --"
" Do you need to define a default section?".format(import_from, line)
)
root = self.imports[placed_module][import_type]
for import_name in imports:
associated_comment = nested_comments.get(import_name)
if associated_comment:
self.comments['nested'].setdefault(import_from, {})[import_name] = associated_comment
comments.pop(comments.index(associated_comment))
if comments:
self.comments['from'].setdefault(import_from, []).extend(comments)
if len(self.out_lines) > max(self.import_index, self._first_comment_index_end, 1) - 1:
last = self.out_lines and self.out_lines[-1].rstrip() or ""
while (last.startswith("#") and not last.endswith('"""') and not last.endswith("'''") and not
'isort:imports-' in last):
self.comments['above']['from'].setdefault(import_from, []).insert(0, self.out_lines.pop(-1))
if len(self.out_lines) > max(self.import_index - 1, self._first_comment_index_end, 1) - 1:
last = self.out_lines[-1].rstrip()
else:
last = ""
if statement_index - 1 == self.import_index:
self.import_index -= len(self.comments['above']['from'].get(import_from, []))
if root.get(import_from, False):
root[import_from].update(imports)
else:
root[import_from] = set(imports)
else:
for module in imports:
if comments:
self.comments['straight'][module] = comments
comments = None
if len(self.out_lines) > max(self.import_index, self._first_comment_index_end, 1) - 1:
last = self.out_lines and self.out_lines[-1].rstrip() or ""
while (last.startswith("#") and not last.endswith('"""') and not last.endswith("'''")
and not 'isort:imports-' in last):
self.comments['above']['straight'].setdefault(module, []).insert(0,
self.out_lines.pop(-1))
if len(self.out_lines) > max(self.import_index - 1, self._first_comment_index_end,
1) - 1:
last = self.out_lines[-1].rstrip()
else:
last = ""
if self.index - 1 == self.import_index:
self.import_index -= len(self.comments['above']['straight'].get(module, []))
placed_module = self.place_module(module)
if placed_module == '':
print(
"WARNING: could not place module {0} of line {1} --"
" Do you need to define a default section?".format(import_from, line)
)
self.imports[placed_module][import_type].add(module)
def coding_check(fname, default='utf-8'):
# see https://www.python.org/dev/peps/pep-0263/
pattern = re.compile(br'coding[:=]\s*([-\w.]+)')
coding = default
with io.open(fname, 'rb') as f:
for line_number, line in enumerate(f, 1):
groups = re.findall(pattern, line)
if groups:
coding = groups[0].decode('ascii')
break
if line_number > 2:
break
return coding
|
julienmalard/Tikon | refs/heads/master | setup.py | 1 | from setuptools import setup, find_packages
def leer(arch):
with open(arch, 'r', encoding='utf-8') as d:
return d.read()
setup(
name='tikon',
version=leer('tikon/versión.txt').strip(),
packages=find_packages(),
url='https://tikon.readthedocs.io',
download_url='https://github.com/julienmalard/Tikon',
license='GNU 3',
author='Julien Jean Malard',
author_email='[email protected]',
description='Modelos de redes agroecológicas',
long_description=leer('README.md'),
install_requires=[
'numpy', 'matplotlib', 'scipy', 'SALib', 'spotpy', 'chardet', 'pandas', 'taqdir', 'bibtexparser',
'shapely', 'ennikkai', 'xarray', 'seaborn', 'pyproj', 'geopy', 'pcse', 'tradssat', 'pyshp', 'pillow', 'babel'
],
classifiers=[
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Programming Language :: Python :: 3 :: Only',
'Natural Language :: Spanish'
],
include_package_data=True,
package_data={
# Incluir estos documentos de los paquetes:
'': ['*.csv', 'versión.txt', '*.json', '*.txt'],
},
)
|
jlnaudin/x-drone | refs/heads/master | MissionPlanner-master/packages/IronPython.StdLib.2.7.4/content/Lib/collections.py | 76 | __all__ = ['Counter', 'deque', 'defaultdict', 'namedtuple', 'OrderedDict']
# For bootstrapping reasons, the collection ABCs are defined in _abcoll.py.
# They should however be considered an integral part of collections.py.
from _abcoll import *
import _abcoll
__all__ += _abcoll.__all__
from _collections import deque, defaultdict
from operator import itemgetter as _itemgetter
from keyword import iskeyword as _iskeyword
import sys as _sys
import heapq as _heapq
from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
################################################################################
### OrderedDict
################################################################################
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as regular dictionaries.
# The internal self.__map dict maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. The signature is the same as
regular dictionaries, but keyword arguments are not recommended because
their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, PREV=0, NEXT=1, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link at the end of the linked list,
# and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[PREV]
last[NEXT] = root[PREV] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, PREV=0, NEXT=1, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which gets
# removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
# Traverse the linked list in order.
NEXT, KEY = 1, 2
root = self.__root
curr = root[NEXT]
while curr is not root:
yield curr[KEY]
curr = curr[NEXT]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
# Traverse the linked list in reverse order.
PREV, KEY = 0, 2
root = self.__root
curr = root[PREV]
while curr is not root:
yield curr[KEY]
curr = curr[PREV]
def clear(self):
'od.clear() -> None. Remove all items from od.'
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
dict.clear(self)
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) pairs in od'
for k in self:
yield (k, self[k])
update = MutableMapping.update
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
value. If key is not found, d is returned if given, otherwise KeyError
is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
key = next(reversed(self) if last else iter(self))
value = self.pop(key)
return key, value
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
If not specified, the value defaults to None.
'''
self = cls()
for key in iterable:
self[key] = value
return self
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
'od.__ne__(y) <==> od!=y'
return not self == other
# -- the following methods support python 3.x style dictionary views --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
################################################################################
### namedtuple
################################################################################
def namedtuple(typename, field_names, verbose=False, rename=False):
"""Returns a new subclass of tuple with named fields.
>>> Point = namedtuple('Point', 'x y')
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
>>> p[0] + p[1] # indexable like a plain tuple
33
>>> x, y = p # unpack like a regular tuple
>>> x, y
(11, 22)
>>> p.x + p.y # fields also accessable by name
33
>>> d = p._asdict() # convert to a dictionary
>>> d['x']
11
>>> Point(**d) # convert from a dictionary
Point(x=11, y=22)
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
Point(x=100, y=22)
"""
# Parse and validate the field names. Validation serves two purposes,
# generating informative error messages and preventing template injection attacks.
if isinstance(field_names, basestring):
field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
field_names = tuple(map(str, field_names))
if rename:
names = list(field_names)
seen = set()
for i, name in enumerate(names):
if (not all(c.isalnum() or c=='_' for c in name) or _iskeyword(name)
or not name or name[0].isdigit() or name.startswith('_')
or name in seen):
names[i] = '_%d' % i
seen.add(name)
field_names = tuple(names)
for name in (typename,) + field_names:
if not all(c.isalnum() or c=='_' for c in name):
raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a keyword: %r' % name)
if name[0].isdigit():
raise ValueError('Type names and field names cannot start with a number: %r' % name)
seen_names = set()
for name in field_names:
if name.startswith('_') and not rename:
raise ValueError('Field names cannot start with an underscore: %r' % name)
if name in seen_names:
raise ValueError('Encountered duplicate field name: %r' % name)
seen_names.add(name)
# Create and fill-in the class template
numfields = len(field_names)
argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes
reprtxt = ', '.join('%s=%%r' % name for name in field_names)
template = '''class %(typename)s(tuple):
'%(typename)s(%(argtxt)s)' \n
__slots__ = () \n
_fields = %(field_names)r \n
def __new__(_cls, %(argtxt)s):
'Create new instance of %(typename)s(%(argtxt)s)'
return _tuple.__new__(_cls, (%(argtxt)s)) \n
@classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
'Make a new %(typename)s object from a sequence or iterable'
result = new(cls, iterable)
if len(result) != %(numfields)d:
raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
return result \n
def __repr__(self):
'Return a nicely formatted representation string'
return '%(typename)s(%(reprtxt)s)' %% self \n
def _asdict(self):
'Return a new OrderedDict which maps field names to their values'
return OrderedDict(zip(self._fields, self)) \n
def _replace(_self, **kwds):
'Return a new %(typename)s object replacing specified fields with new values'
result = _self._make(map(kwds.pop, %(field_names)r, _self))
if kwds:
raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
return result \n
def __getnewargs__(self):
'Return self as a plain tuple. Used by copy and pickle.'
return tuple(self) \n\n''' % locals()
for i, name in enumerate(field_names):
template += " %s = _property(_itemgetter(%d), doc='Alias for field number %d')\n" % (name, i, i)
if verbose:
print template
# Execute the template string in a temporary namespace and
# support tracing utilities by setting a value for frame.f_globals['__name__']
namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
OrderedDict=OrderedDict, _property=property, _tuple=tuple)
try:
exec template in namespace
except SyntaxError, e:
raise SyntaxError(e.message + ':\n' + template)
result = namespace[typename]
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in enviroments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython).
try:
result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return result
########################################################################
### Counter
########################################################################
class Counter(dict):
'''Dict subclass for counting hashable items. Sometimes called a bag
or multiset. Elements are stored as dictionary keys and their counts
are stored as dictionary values.
>>> c = Counter('abcdeabcdabcaba') # count elements from a string
>>> c.most_common(3) # three most common elements
[('a', 5), ('b', 4), ('c', 3)]
>>> sorted(c) # list all unique elements
['a', 'b', 'c', 'd', 'e']
>>> ''.join(sorted(c.elements())) # list elements with repetitions
'aaaaabbbbcccdde'
>>> sum(c.values()) # total of all counts
15
>>> c['a'] # count of letter 'a'
5
>>> for elem in 'shazam': # update counts from an iterable
... c[elem] += 1 # by adding 1 to each element's count
>>> c['a'] # now there are seven 'a'
7
>>> del c['b'] # remove all 'b'
>>> c['b'] # now there are zero 'b'
0
>>> d = Counter('simsalabim') # make another counter
>>> c.update(d) # add in the second counter
>>> c['a'] # now there are nine 'a'
9
>>> c.clear() # empty the counter
>>> c
Counter()
Note: If a count is set to zero or reduced to zero, it will remain
in the counter until the entry is deleted or the counter is cleared:
>>> c = Counter('aaabbc')
>>> c['b'] -= 2 # reduce the count of 'b' by two
>>> c.most_common() # 'b' is still in, but its count is zero
[('a', 3), ('c', 1), ('b', 0)]
'''
# References:
# http://en.wikipedia.org/wiki/Multiset
# http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html
# http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm
# http://code.activestate.com/recipes/259174/
# Knuth, TAOCP Vol. II section 4.6.3
def __init__(self, iterable=None, **kwds):
'''Create a new, empty Counter object. And if given, count elements
from an input iterable. Or, initialize the count from another mapping
of elements to their counts.
>>> c = Counter() # a new, empty counter
>>> c = Counter('gallahad') # a new counter from an iterable
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
>>> c = Counter(a=4, b=2) # a new counter from keyword args
'''
super(Counter, self).__init__()
self.update(iterable, **kwds)
def __missing__(self, key):
'The count of elements not in the Counter is zero.'
# Needed so that self[missing_item] does not raise KeyError
return 0
def most_common(self, n=None):
'''List the n most common elements and their counts from the most
common to the least. If n is None, then list all element counts.
>>> Counter('abcdeabcdabcaba').most_common(3)
[('a', 5), ('b', 4), ('c', 3)]
'''
# Emulate Bag.sortedByCount from Smalltalk
if n is None:
return sorted(self.iteritems(), key=_itemgetter(1), reverse=True)
return _heapq.nlargest(n, self.iteritems(), key=_itemgetter(1))
def elements(self):
'''Iterator over elements repeating each as many times as its count.
>>> c = Counter('ABCABC')
>>> sorted(c.elements())
['A', 'A', 'B', 'B', 'C', 'C']
# Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1
>>> prime_factors = Counter({2: 2, 3: 3, 17: 1})
>>> product = 1
>>> for factor in prime_factors.elements(): # loop over factors
... product *= factor # and multiply them
>>> product
1836
Note, if an element's count has been set to zero or is a negative
number, elements() will ignore it.
'''
# Emulate Bag.do from Smalltalk and Multiset.begin from C++.
return _chain.from_iterable(_starmap(_repeat, self.iteritems()))
# Override dict methods where necessary
@classmethod
def fromkeys(cls, iterable, v=None):
# There is no equivalent method for counters because setting v=1
# means that no element can have a count greater than one.
raise NotImplementedError(
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
def update(self, iterable=None, **kwds):
'''Like dict.update() but add counts instead of replacing them.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.update('witch') # add elements from another iterable
>>> d = Counter('watch')
>>> c.update(d) # add elements from another counter
>>> c['h'] # four 'h' in which, witch, and watch
4
'''
# The regular dict.update() operation makes no sense here because the
# replace behavior results in the some of original untouched counts
# being mixed-in with all of the other counts for a mismash that
# doesn't have a straight-forward interpretation in most counting
# contexts. Instead, we implement straight-addition. Both the inputs
# and outputs are allowed to contain zero and negative counts.
if iterable is not None:
if isinstance(iterable, Mapping):
if self:
self_get = self.get
for elem, count in iterable.iteritems():
self[elem] = self_get(elem, 0) + count
else:
super(Counter, self).update(iterable) # fast path when counter is empty
else:
self_get = self.get
for elem in iterable:
self[elem] = self_get(elem, 0) + 1
if kwds:
self.update(kwds)
def subtract(self, iterable=None, **kwds):
'''Like dict.update() but subtracts counts instead of replacing them.
Counts can be reduced below zero. Both the inputs and outputs are
allowed to contain zero and negative counts.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.subtract('witch') # subtract elements from another iterable
>>> c.subtract(Counter('watch')) # subtract elements from another counter
>>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch
0
>>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch
-1
'''
if iterable is not None:
self_get = self.get
if isinstance(iterable, Mapping):
for elem, count in iterable.items():
self[elem] = self_get(elem, 0) - count
else:
for elem in iterable:
self[elem] = self_get(elem, 0) - 1
if kwds:
self.subtract(kwds)
def copy(self):
'Return a shallow copy.'
return self.__class__(self)
def __reduce__(self):
return self.__class__, (dict(self),)
def __delitem__(self, elem):
'Like dict.__delitem__() but does not raise KeyError for missing values.'
if elem in self:
super(Counter, self).__delitem__(elem)
def __repr__(self):
if not self:
return '%s()' % self.__class__.__name__
items = ', '.join(map('%r: %r'.__mod__, self.most_common()))
return '%s({%s})' % (self.__class__.__name__, items)
# Multiset-style mathematical operations discussed in:
# Knuth TAOCP Volume II section 4.6.3 exercise 19
# and at http://en.wikipedia.org/wiki/Multiset
#
# Outputs guaranteed to only include positive counts.
#
# To strip negative and zero counts, add-in an empty counter:
# c += Counter()
def __add__(self, other):
'''Add counts from two counters.
>>> Counter('abbb') + Counter('bcc')
Counter({'b': 4, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count + other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __sub__(self, other):
''' Subtract count, but keep only results with positive counts.
>>> Counter('abbbc') - Counter('bccd')
Counter({'b': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count - other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count < 0:
result[elem] = 0 - count
return result
def __or__(self, other):
'''Union is the maximum of value in either of the input counters.
>>> Counter('abbb') | Counter('bcc')
Counter({'b': 3, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = other_count if count < other_count else count
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __and__(self, other):
''' Intersection is the minimum of corresponding counts.
>>> Counter('abbb') & Counter('bcc')
Counter({'b': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = count if count < other_count else other_count
if newcount > 0:
result[elem] = newcount
return result
if __name__ == '__main__':
# verify that instances can be pickled
from cPickle import loads, dumps
Point = namedtuple('Point', 'x, y', True)
p = Point(x=10, y=20)
assert p == loads(dumps(p))
# test and demonstrate ability to override methods
class Point(namedtuple('Point', 'x y')):
__slots__ = ()
@property
def hypot(self):
return (self.x ** 2 + self.y ** 2) ** 0.5
def __str__(self):
return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot)
for p in Point(3, 4), Point(14, 5/7.):
print p
class Point(namedtuple('Point', 'x y')):
'Point class with optimized _make() and _replace() without error-checking'
__slots__ = ()
_make = classmethod(tuple.__new__)
def _replace(self, _map=map, **kwds):
return self._make(_map(kwds.get, ('x', 'y'), self))
print Point(11, 22)._replace(x=100)
Point3D = namedtuple('Point3D', Point._fields + ('z',))
print Point3D.__doc__
import doctest
TestResults = namedtuple('TestResults', 'failed attempted')
print TestResults(*doctest.testmod())
|
citrix-openstack-build/neutron | refs/heads/master | neutron/db/migration/__init__.py | 8 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mark McClain, DreamHost
def should_run(active_plugins, migrate_plugins):
if '*' in migrate_plugins:
return True
else:
return set(active_plugins) & set(migrate_plugins)
|
dcifuen/cloudbday | refs/heads/master | src/appengine_config.py | 1 | # -*- coding: utf-8 -*-
"""
App Engine specific config
"""
def namespace_manager_default_namespace_for_request():
"""
Handles the namespace resolution based on the environment and the domain
from the logged user. This let us test without touching production data
while we are in staging
:return: None if no user is logged in, staging-<domain> for staging,
just the domain otherwise
"""
from google.appengine.api import users
user = users.get_current_user()
if not user:
return None
domain = user.email().split('@', 1)[1]
from birthday import constants, get_environment
environment = get_environment()
namespace = 'staging-%s' % domain if environment == constants.ENV_STAGING \
else domain
return namespace
def gae_mini_profiler_should_profile_production():
"""Uncomment the first two lines to enable GAE Mini Profiler on production
for admin accounts"""
from google.appengine.api import users
return users.is_current_user_admin()
return False
def webapp_add_wsgi_middleware(app):
from google.appengine.ext.appstats import recording
app = recording.appstats_wsgi_middleware(app)
return app
|
ArduPilot/MissionPlanner | refs/heads/master | LogAnalyzer/py2exe/setup.py | 20 | import os
from distutils.core import setup
import py2exe
Mydata_files = []
for files in os.listdir('./tests/'):
f1 = './tests/' + files
if os.path.isfile(f1): # skip directories
f2 = 'tests', [f1]
Mydata_files.append(f2)
setup(
console=['runner.py'],
data_files = Mydata_files,
)
|
liwenlongonly/HelloPython | refs/heads/master | mysite/west/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
saz/django-inplaceedit | refs/heads/master | inplaceeditform/tag_utils.py | 21 | # Copyright (c) 2010-2013 by Yaco Sistemas <[email protected]> or <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this programe. If not, see <http://www.gnu.org/licenses/>.
from django import template
from django.utils.encoding import smart_str
from django.template.loader import render_to_string
def parse_args_kwargs(parser, token):
"""
Parse uniformly args and kwargs from a templatetag
Usage::
For parsing a template like this:
{% footag my_contents,height=10,zoom=20 as myvar %}
You simply do this:
@register.tag
def footag(parser, token):
args, kwargs = parse_args_kwargs(parser, token)
"""
bits = token.contents.split(' ')
if len(bits) <= 1:
raise template.TemplateSyntaxError("'%s' takes at least one argument" % bits[0])
if token.contents[13] == '"':
end_quote = token.contents.index('"', 14) + 1
args = [template.Variable(token.contents[13:end_quote])]
kwargs_start = end_quote
else:
try:
next_space = token.contents.index(' ', 14)
kwargs_start = next_space + 1
except ValueError:
next_space = None
kwargs_start = None
args = [template.Variable(token.contents[13:next_space])]
kwargs = {}
kwargs_list = token.contents[kwargs_start:].split(',')
for kwargs_item in kwargs_list:
if '=' in kwargs_item:
k, v = kwargs_item.split('=', 1)
k = k.strip()
kwargs[k] = template.Variable(v)
return args, kwargs
def get_args_and_kwargs(args, kwargs, context):
out_args = [arg.resolve(context) for arg in args]
out_kwargs = dict([(smart_str(k, 'ascii'), v.resolve(context)) for k, v in kwargs.items()])
return out_args, out_kwargs
class RenderWithArgsAndKwargsNode(template.Node):
"""
Node for templatetags which renders templates with parsed args and kwargs
Usage::
class FooNode(RenderWithArgsAndKwargsNode):
def prepare_context(self, context, args, kwargs):
context['result_list'] = kwargs['result_list']
return context
@register.tag
def footag(parser, token):
args, kwargs = parse_args_kwargs(parser, token)
return FooNode(args, kwargs, template='footag.html')
"""
def __init__(self, args, kwargs, template):
self.args = args
self.kwargs = kwargs
self.template = template
def prepare_context(self, args, kwargs, context):
"""
Hook for overriding in subclasses.
Note that "args" and "kwargs" parameters are already resolved with context
"""
return context
def render(self, context):
args, kwargs = get_args_and_kwargs(self.args, self.kwargs, context)
context = self.prepare_context(args, kwargs, context)
return render_to_string(self.template, context)
|
lferr/charm | refs/heads/dev | charm/schemes/pkenc/pkenc_paillier99.py | 3 | '''
Pascal Paillier (Public-Key)
| From: "Public-Key Cryptosystems Based on Composite Degree Residuosity Classes"
| Published in: EUROCRYPT 1999
| Available from: http://link.springer.com/chapter/10.1007%2F3-540-48910-X_16
| Notes:
* type public-key encryption (public key)
* setting: Integer
:Authors: J Ayo Akinyele
:Date: 4/2011 (updated 2/2016)
'''
from charm.toolbox.integergroup import lcm,integer,toInt
from charm.toolbox.PKEnc import PKEnc
debug = False
"""A ciphertext class with homomorphic properties"""
class Ciphertext(dict):
"""
This tests the additively holomorphic properties of
the Paillier encryption scheme.
>>> from charm.toolbox.integergroup import RSAGroup
>>> group = RSAGroup()
>>> pai = Pai99(group)
>>> (public_key, secret_key) = pai.keygen()
>>> msg_1=12345678987654321
>>> msg_2=12345761234123409
>>> msg_3 = msg_1 + msg_2
>>> cipher_1 = pai.encrypt(public_key, msg_1)
>>> cipher_2 = pai.encrypt(public_key, msg_2)
>>> cipher_3 = cipher_1 + cipher_2
>>> decrypted_msg_3 = pai.decrypt(public_key, secret_key, cipher_3)
>>> decrypted_msg_3 == msg_3
True
"""
def __init__(self, ct, pk, key):
dict.__init__(self, ct)
self.pk, self.key = pk, key
def __add__(self, other):
if type(other) == int: # rhs must be Cipher
lhs = dict.__getitem__(self, self.key)
return Ciphertext({self.key:lhs * ((self.pk['g'] ** other) % self.pk['n2']) },
self.pk, self.key)
else: # neither are plain ints
lhs = dict.__getitem__(self, self.key)
rhs = dict.__getitem__(other, self.key)
return Ciphertext({self.key:(lhs * rhs) % self.pk['n2']},
self.pk, self.key)
def __mul__(self, other):
if type(other) == int:
lhs = dict.__getitem__(self, self.key)
return Ciphertext({self.key:(lhs ** other)}, self.pk, self.key)
def randomize(self, r): # need to provide random value
lhs = dict.__getitem__(self, self.key)
rhs = (integer(r) ** self.pk['n']) % self.pk['n2']
return Ciphertext({self.key:(lhs * rhs) % self.pk['n2']})
def __str__(self):
value = dict.__str__(self)
return value # + ", pk =" + str(pk)
class Pai99(PKEnc):
def __init__(self, groupObj):
PKEnc.__init__(self)
global group
group = groupObj
def L(self, u, n):
# computes L(u) => ((u - 1) / n)
U = integer(int(u) - 1)
if int(U) == 0:
return integer(0, n)
return U / n
def keygen(self, secparam=1024):
(p, q, n) = group.paramgen(secparam)
lam = lcm(p - 1, q - 1)
n2 = n ** 2
g = group.random(n2)
u = (self.L(((g % n2) ** lam), n) % n) ** -1
pk, sk = {'n':n, 'g':g, 'n2':n2}, {'lamda':lam, 'u':u}
return (pk, sk)
def encrypt(self, pk, m):
g, n, n2 = pk['g'], pk['n'], pk['n2']
r = group.random(pk['n'])
c = ((g % n2) ** m) * ((r % n2) ** n)
return Ciphertext({'c':c}, pk, 'c')
def decrypt(self, pk, sk, ct):
n, n2 = pk['n'], pk['n2']
m = ((self.L(ct['c'] ** sk['lamda'], n) % n) * sk['u']) % n
return toInt(m)
def encode(self, modulus, message):
# takes a string and represents as a bytes object
elem = integer(message)
return elem % modulus
def decode(self, pk, element):
pass
|
artemsok/sockeye | refs/heads/master | sockeye/config.py | 2 | # Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not
# use this file except in compliance with the License. A copy of the License
# is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import copy
import inspect
import yaml
class TaggedYamlObjectMetaclass(yaml.YAMLObjectMetaclass):
def __init__(cls, name, bases, kwds):
cls.yaml_tag = "!" + name
new_kwds = {}
new_kwds.update(kwds)
new_kwds['yaml_tag'] = "!" + name
super().__init__(name, bases, new_kwds)
class Config(yaml.YAMLObject, metaclass=TaggedYamlObjectMetaclass):
"""
Base configuration object that supports freezing of members and YAML (de-)serialization.
Actual Configuration should subclass this object.
"""
def __init__(self):
self.__add_frozen()
def __setattr__(self, key, value):
if hasattr(self, '_frozen') and getattr(self, '_frozen'):
raise AttributeError("Cannot set '%s' in frozen config" % key)
if value == self:
raise AttributeError("Cannot set self as attribute")
object.__setattr__(self, key, value)
def __setstate__(self, state):
"""Pickle protocol implementation."""
# We first take the serialized state:
self.__dict__.update(state)
# Then we take the constructors default values for missing arguments in order to stay backwards compatible
# This way we can add parameters to Config objects and still load old models.
init_signature = inspect.signature(self.__init__)
for param_name, param in init_signature.parameters.items():
if param.default is not param.empty:
if not hasattr(self, param_name):
object.__setattr__(self, param_name, param.default)
def freeze(self):
"""
Freezes this Config object, disallowing modification or addition of any parameters.
"""
if getattr(self, '_frozen'):
return
object.__setattr__(self, "_frozen", True)
for k, v in self.__dict__.items():
if isinstance(v, Config) and k != "self":
v.freeze() # pylint: disable= no-member
def __repr__(self):
return "Config[%s]" % ", ".join("%s=%s" % (str(k), str(v)) for k, v in sorted(self.__dict__.items()))
def __eq__(self, other):
if type(other) is not type(self):
return False
for k, v in self.__dict__.items():
if k != "self":
if k not in other.__dict__:
return False
if self.__dict__[k] != other.__dict__[k]:
return False
return True
def __del_frozen(self):
"""
Removes _frozen attribute from this instance and all its child configurations.
"""
self.__delattr__('_frozen')
for attr, val in self.__dict__.items():
if isinstance(val, Config) and hasattr(val, '_frozen'):
val.__del_frozen() # pylint: disable= no-member
def __add_frozen(self):
"""
Adds _frozen attribute to this instance and all its child configurations.
"""
setattr(self, "_frozen", False)
for attr, val in self.__dict__.items():
if isinstance(val, Config):
val.__add_frozen() # pylint: disable= no-member
def save(self, fname: str):
"""
Saves this Config (without the frozen state) to a file called fname.
:param fname: Name of file to store this Config in.
"""
obj = copy.deepcopy(self)
obj.__del_frozen()
with open(fname, 'w') as out:
yaml.dump(obj, out, default_flow_style=False)
@staticmethod
def load(fname: str) -> 'Config':
"""
Returns a Config object loaded from a file. The loaded object is not frozen.
:param fname: Name of file to load the Config from.
:return: Configuration.
"""
with open(fname) as inp:
obj = yaml.load(inp)
obj.__add_frozen()
return obj
def copy(self, **kwargs):
"""
Create a copy of the config object, optionally modifying some of the attributes.
For example `nn_config.copy(num_hidden=512)` will create a copy of `nn_config` where the attribute `num_hidden`
will be set to the new value of num_hidden.
:param kwargs:
:return: A deep copy of the config object.
"""
copy_obj = copy.deepcopy(self)
for name, value in kwargs.items():
object.__setattr__(copy_obj, name, value)
return copy_obj
|
JRepoInd/PyGithub | refs/heads/master | github/tests/Equality.py | 39 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import Framework
class Equality(Framework.TestCase):
def testUserEquality(self):
u1 = self.g.get_user("jacquev6")
u2 = self.g.get_user("jacquev6")
self.assertTrue(u1 == u2)
self.assertFalse(u1 != u2)
self.assertEqual(u1, u2)
def testUserDifference(self):
u1 = self.g.get_user("jacquev6")
u2 = self.g.get_user("OddBloke")
self.assertFalse(u1 == u2)
self.assertTrue(u1 != u2)
self.assertNotEqual(u1, u2)
def testBranchEquality(self):
# Erf, equality of NonCompletableGithubObjects will be difficult to implement
# because even their _rawData can differ. (Here, the avatar_url is not equal)
# (CompletableGithubObjects are compared by their API url, which is a good key)
r = self.g.get_user().get_repo("PyGithub")
b1 = r.get_branch("develop")
b2 = r.get_branch("develop")
self.assertNotEqual(b1._rawData, b2._rawData)
|
pivotaccess2007/RapidSMS-Rwanda | refs/heads/master | apps/ajax/app.py | 5 | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import rapidsms
import cgi, urlparse, traceback
from threading import Thread
from SocketServer import ThreadingMixIn
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from django.utils.simplejson import JSONEncoder
from django.db.models.query import QuerySet
class App(rapidsms.app.App):
"""This App does nothing by itself. It exists only to serve other Apps, by
providing an easy (and standard) way for them to communicate between their
WebUI and RapidSMS App object.
When RapidSMS starts, this app starts an HTTPServer (port 8001 as default,
but configurable via rapidsms.ini) in a worker thread, and watches for any
incoming HTTP requests matching */app/method*. These requests, along with
their GET parameters and POST form, are passed on to the named app.
Examples:
method URL app method args
====== === === ====== ====
GET /breakfast/toast breakfast ajax_GET_toast { }
POST /breakfast/waffles breakfast ajax_POST_waffles { }, { }
POST /breakfast/eggs?x=1 breakfast ajax_POST_eggs { "x": 1 }, {}
Any data that is returned by the handler method is JSON encoded, and sent
back to the WebUI in response. Since the _webui_ app includes jQuery with
every view, this makes it very easy for the WebUIs of other apps to query
their running App object for state. See the _training_ app for an example.
But wait! AJAX can't cross domains, so a request to port 8001 from the WebUI
won't work! This is handled by the WebUI bundled with this app, that proxies
all requests to /ajax/(.+) to the right place, on the server side. I cannot
conceive of a situation where this would be a problem - but keep it in mind,
and don't forget to prepend "/ajax/" to your AJAX URLs."""
class Server(ThreadingMixIn, HTTPServer):
pass
class MyJsonEncoder(JSONEncoder):
def default(self, o):
# if this object has its own preference
# for JSON serialization, prioritize that
if hasattr(o, "__json__"):
return o.__json__()
elif type(o) == QuerySet:
return list(o)
# otherwise, revert to the usual behavior
return JSONEncoder.default(self, o)
class RequestHandler(BaseHTTPRequestHandler):
def __find_app(self, name):
# inspect the name of each active app,
# returning as soon as we find a match
for app in self.server.app.router.apps:
if app.slug == name:
return app
# no app by that
# name was found
return None
# handle both GET and POST with
# the same method
def do_GET(self): return self.process()
def do_POST(self): return self.process()
def process(self):
def response(code, output, json=True):
self.send_response(code)
mime_type = "application/json" if json else "text/plain"
self.send_header("content-type", mime_type)
self.end_headers()
if json:
json = App.MyJsonEncoder().encode(output)
self.wfile.write(json)
# otherwise, write the raw response.
# it doesn't make much sense to have
# error messages encoded as JSON...
else: self.wfile.write(output)
# HTTP2xx represents success
return (code>=200 and code <=299)
# should look something like:
# /alpha/bravo?charlie=delta
#
# this request will be parsed to the "bravo"
# method of the "alpha" app, with the params:
# { "charlie": ["delta"] }
#
# any other path format will return an http404
# error, for the time being. params are optional.
url = urlparse.urlparse(self.path)
path_parts = url.path.split("/")
# abort if the url didn't look right
# TODO: better error message here
if len(path_parts) != 3:
return response(404, "FAIL.")
# resolve the first part of the url into an app
# (via the router), and abort if it wasn't valid
app_name = path_parts[1]
app = self.__find_app(app_name)
if (app is None):
return response(404,
"Invalid app: %s" % app_name)
# same for the request name within the app
# (FYI, self.command returns GET, POST, etc)
meth_name = "ajax_%s_%s" % (self.command, path_parts[2])
if not hasattr(app, meth_name):
return response(404,
"Invalid method: %s" % meth_name)
# everything appears to be well, so call the
# target method, and return the response (as
# a string, for now)
try:
method = getattr(app, meth_name)
params = urlparse.urlparse(url.query)
args = [params]
# for post requests, we'll also need to parse
# the form data, and hand it to the method
if self.command == "POST":
form = {}
# parse the form data via the CGI lib. this is
# a horrible mess, but supports all kinds of
# encodings (multipart, in particular)
storage = cgi.FieldStorage(
fp = self.rfile,
headers = self.headers,
environ = {
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": self.headers["content-type"] })
# convert the fieldstorage object into a dict,
# to keep it simple for the handler methods.
# TODO: maybe make this a util if it's useful
# elsewhere. it isn't, for the time being.
for key in storage.keys():
v = storage.getlist(key)
# where possible, just store the values as singular,
# to avoid CGIs usual post["id"][0] verbosity
if len(v) > 1: form[key] = v
else: form[key] = v[0]
args.append(form)
# call the method, and send back whatever data
# structure was returned, serialized with JSON
output = method(*args)
return response(200, output)
# something raised during the request, so
# return a useless http error to the requester
except Exception, err:
self.server.app.warning(traceback.format_exc())
return response(500, unicode(err), False)
# this does nothing, except prevent HTTP
# requests being echoed to the screen
def log_request(*args):
pass
def configure(self, host=None, port=None):
self.host = host
self.port = port
def start(self):
# create the webserver, through which the
# AJAX requests from the WebUI will arrive
self.server = self.Server((self.host, self.port), self.RequestHandler)
self.server.app = self
# start the server in a separate thread, and daemonize it
# to prevent it from hanging once the main thread terminates
self.thread = Thread(target=self.server.serve_forever)
self.thread.daemon = True
self.thread.start()
|
rjschwei/azure-sdk-for-python | refs/heads/master | azure-batch/azure/batch/models/job_schedule_list_options.py | 3 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class JobScheduleListOptions(Model):
"""Additional parameters for the JobSchedule_list operation.
:param filter: An OData $filter clause.
:type filter: str
:param select: An OData $select clause.
:type select: str
:param expand: An OData $expand clause.
:type expand: str
:param max_results: The maximum number of items to return in the response.
A maximum of 1000 job schedules can be returned. Default value: 1000 .
:type max_results: int
:param timeout: The maximum time that the server can spend processing the
request, in seconds. The default is 30 seconds. Default value: 30 .
:type timeout: int
:param client_request_id: The caller-generated request identity, in the
form of a GUID with no decoration such as curly braces, e.g.
9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
:type client_request_id: str
:param return_client_request_id: Whether the server should return the
client-request-id in the response. Default value: False .
:type return_client_request_id: bool
:param ocp_date: The time the request was issued. Client libraries
typically set this to the current system clock time; set it explicitly if
you are calling the REST API directly.
:type ocp_date: datetime
"""
def __init__(self, filter=None, select=None, expand=None, max_results=1000, timeout=30, client_request_id=None, return_client_request_id=False, ocp_date=None):
self.filter = filter
self.select = select
self.expand = expand
self.max_results = max_results
self.timeout = timeout
self.client_request_id = client_request_id
self.return_client_request_id = return_client_request_id
self.ocp_date = ocp_date
|
tkaitchuck/nupic | refs/heads/master | external/darwin64/lib/python2.6/site-packages/yaml/dumper.py | 543 |
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
from emitter import *
from serializer import *
from representer import *
from resolver import *
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
Representer.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
SafeRepresenter.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)
class Dumper(Emitter, Serializer, Representer, Resolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
Representer.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)
|
18098924759/Wox | refs/heads/master | PythonHome/Lib/site-packages/_markerlib/__init__.py | 1008 | try:
import ast
from _markerlib.markers import default_environment, compile, interpret
except ImportError:
if 'ast' in globals():
raise
def default_environment():
return {}
def compile(marker):
def marker_fn(environment=None, override=None):
# 'empty markers are True' heuristic won't install extra deps.
return not marker.strip()
marker_fn.__doc__ = marker
return marker_fn
def interpret(marker, environment=None, override=None):
return compile(marker)()
|
sbhowmik89/oppia | refs/heads/develop | core/controllers/editor.py | 1 | # coding: utf-8
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the editor view."""
import imghdr
import logging
import jinja2
from core.controllers import base
from core.domain import config_domain
from core.domain import dependency_registry
from core.domain import email_manager
from core.domain import event_services
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import fs_domain
from core.domain import gadget_registry
from core.domain import interaction_registry
from core.domain import rights_manager
from core.domain import rte_component_registry
from core.domain import rule_domain
from core.domain import stats_services
from core.domain import user_services
from core.domain import value_generators_domain
from core.platform import models
import feconf
import utils
current_user_services = models.Registry.import_current_user_services()
# The frontend template for a new state. It is sent to the frontend when the
# exploration editor page is first loaded, so that new states can be
# added in a way that is completely client-side.
# IMPORTANT: Before adding this state to an existing exploration, the
# state name and the destination of the default rule should first be
# changed to the desired new state name.
NEW_STATE_TEMPLATE = {
'content': [{
'type': 'text',
'value': ''
}],
'interaction': exp_domain.State.NULL_INTERACTION_DICT,
'param_changes': [],
'unresolved_answers': {},
}
MODERATOR_REQUEST_FORUM_URL_DEFAULT_VALUE = (
'https://moderator/request/forum/url')
MODERATOR_REQUEST_FORUM_URL = config_domain.ConfigProperty(
'moderator_request_forum_url', {'type': 'unicode'},
'A link to the forum for nominating explorations to be featured '
'in the gallery',
default_value=MODERATOR_REQUEST_FORUM_URL_DEFAULT_VALUE)
def get_value_generators_js():
"""Return a string that concatenates the JS for all value generators."""
all_value_generators = (
value_generators_domain.Registry.get_all_generator_classes())
value_generators_js = ''
for _, generator_cls in all_value_generators.iteritems():
value_generators_js += generator_cls.get_js_template()
return value_generators_js
def _require_valid_version(version_from_payload, exploration_version):
"""Check that the payload version matches the given exploration version."""
if version_from_payload is None:
raise base.BaseHandler.InvalidInputException(
'Invalid POST request: a version must be specified.')
if version_from_payload != exploration_version:
raise base.BaseHandler.InvalidInputException(
'Trying to update version %s of exploration from version %s, '
'which is too old. Please reload the page and try again.'
% (exploration_version, version_from_payload))
def require_editor(handler):
"""Decorator that checks if the user can edit the given exploration."""
def test_editor(self, exploration_id, escaped_state_name=None, **kwargs):
"""Gets the user and exploration id if the user can edit it.
Args:
self: the handler instance
exploration_id: the exploration id
escaped_state_name: the URL-escaped state name, if it exists
**kwargs: any other arguments passed to the handler
Returns:
The relevant handler, if the user is authorized to edit this
exploration.
Raises:
self.PageNotFoundException: if no such exploration or state exists.
self.UnauthorizedUserException: if the user exists but does not
have the right credentials.
"""
if not self.user_id:
self.redirect(current_user_services.create_login_url(
self.request.uri))
return
if self.username in config_domain.BANNED_USERNAMES.value:
raise self.UnauthorizedUserException(
'You do not have the credentials to access this page.')
try:
exploration = exp_services.get_exploration_by_id(exploration_id)
except:
raise self.PageNotFoundException
if not rights_manager.Actor(self.user_id).can_edit(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id):
raise self.UnauthorizedUserException(
'You do not have the credentials to edit this exploration.',
self.user_id)
if not escaped_state_name:
return handler(self, exploration_id, **kwargs)
state_name = self.unescape_state_name(escaped_state_name)
if state_name not in exploration.states:
logging.error('Could not find state: %s' % state_name)
logging.error('Available states: %s' % exploration.states.keys())
raise self.PageNotFoundException
return handler(self, exploration_id, state_name, **kwargs)
return test_editor
class EditorHandler(base.BaseHandler):
"""Base class for all handlers for the editor page."""
# The page name to use as a key for generating CSRF tokens.
PAGE_NAME_FOR_CSRF = 'editor'
class ExplorationPage(EditorHandler):
"""The editor page for a single exploration."""
EDITOR_PAGE_DEPENDENCY_IDS = ['codemirror']
def get(self, exploration_id):
"""Handles GET requests."""
if exploration_id in base.DISABLED_EXPLORATIONS.value:
self.render_template(
'error/disabled_exploration.html', iframe_restriction=None)
return
exploration = exp_services.get_exploration_by_id(
exploration_id, strict=False)
if (exploration is None or
not rights_manager.Actor(self.user_id).can_view(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id)):
self.redirect('/')
return
can_edit = (
bool(self.user_id) and
self.username not in config_domain.BANNED_USERNAMES.value and
rights_manager.Actor(self.user_id).can_edit(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id))
interaction_ids = (
interaction_registry.Registry.get_all_interaction_ids())
interaction_dependency_ids = (
interaction_registry.Registry.get_deduplicated_dependency_ids(
interaction_ids))
dependencies_html, additional_angular_modules = (
dependency_registry.Registry.get_deps_html_and_angular_modules(
interaction_dependency_ids + self.EDITOR_PAGE_DEPENDENCY_IDS))
interaction_templates = (
rte_component_registry.Registry.get_html_for_all_components() +
interaction_registry.Registry.get_interaction_html(
interaction_ids))
interaction_validators_html = (
interaction_registry.Registry.get_validators_html(
interaction_ids))
gadget_types = gadget_registry.Registry.get_all_gadget_types()
gadget_templates = (
gadget_registry.Registry.get_gadget_html(gadget_types))
self.values.update({
'GADGET_SPECS': gadget_registry.Registry.get_all_specs(),
'INTERACTION_SPECS': interaction_registry.Registry.get_all_specs(),
'PANEL_SPECS': feconf.PANELS_PROPERTIES,
'DEFAULT_OBJECT_VALUES': rule_domain.get_default_object_values(),
'additional_angular_modules': additional_angular_modules,
'can_delete': rights_manager.Actor(
self.user_id).can_delete(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id),
'can_edit': can_edit,
'can_modify_roles': rights_manager.Actor(
self.user_id).can_modify_roles(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id),
'can_publicize': rights_manager.Actor(
self.user_id).can_publicize(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id),
'can_publish': rights_manager.Actor(
self.user_id).can_publish(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id),
'can_release_ownership': rights_manager.Actor(
self.user_id).can_release_ownership(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id),
'can_unpublicize': rights_manager.Actor(
self.user_id).can_unpublicize(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id),
'can_unpublish': rights_manager.Actor(
self.user_id).can_unpublish(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id),
'dependencies_html': jinja2.utils.Markup(dependencies_html),
'gadget_templates': jinja2.utils.Markup(gadget_templates),
'interaction_templates': jinja2.utils.Markup(
interaction_templates),
'interaction_validators_html': jinja2.utils.Markup(
interaction_validators_html),
'moderator_request_forum_url': MODERATOR_REQUEST_FORUM_URL.value,
'nav_mode': feconf.NAV_MODE_CREATE,
'value_generators_js': jinja2.utils.Markup(
get_value_generators_js()),
'title': exploration.title,
'ALL_LANGUAGE_CODES': feconf.ALL_LANGUAGE_CODES,
'ALLOWED_GADGETS': feconf.ALLOWED_GADGETS,
'ALLOWED_INTERACTION_CATEGORIES': (
feconf.ALLOWED_INTERACTION_CATEGORIES),
# This is needed for the exploration preview.
'CATEGORIES_TO_COLORS': feconf.CATEGORIES_TO_COLORS,
'INVALID_PARAMETER_NAMES': feconf.INVALID_PARAMETER_NAMES,
'NEW_STATE_TEMPLATE': NEW_STATE_TEMPLATE,
'SHOW_TRAINABLE_UNRESOLVED_ANSWERS': (
feconf.SHOW_TRAINABLE_UNRESOLVED_ANSWERS),
'TAG_REGEX': feconf.TAG_REGEX,
})
self.render_template('editor/exploration_editor.html')
class ExplorationHandler(EditorHandler):
"""Page with editor data for a single exploration."""
PAGE_NAME_FOR_CSRF = 'editor'
def _get_exploration_data(self, exploration_id, version=None):
"""Returns a description of the given exploration."""
try:
exploration = exp_services.get_exploration_by_id(
exploration_id, version=version)
except:
raise self.PageNotFoundException
states = {}
for state_name in exploration.states:
state_dict = exploration.states[state_name].to_dict()
state_dict['unresolved_answers'] = (
stats_services.get_top_unresolved_answers_for_default_rule(
exploration_id, state_name))
states[state_name] = state_dict
editor_dict = {
'category': exploration.category,
'exploration_id': exploration_id,
'init_state_name': exploration.init_state_name,
'language_code': exploration.language_code,
'objective': exploration.objective,
'param_changes': exploration.param_change_dicts,
'param_specs': exploration.param_specs_dict,
'rights': rights_manager.get_exploration_rights(
exploration_id).to_dict(),
'show_state_editor_tutorial_on_load': (
self.user_id and not self.has_seen_editor_tutorial),
'skin_customizations': exploration.skin_instance.to_dict()[
'skin_customizations'],
'states': states,
'tags': exploration.tags,
'title': exploration.title,
'version': exploration.version,
}
return editor_dict
def get(self, exploration_id):
"""Gets the data for the exploration overview page."""
if not rights_manager.Actor(self.user_id).can_view(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id):
raise self.PageNotFoundException
version = self.request.get('v', default_value=None)
self.values.update(
self._get_exploration_data(exploration_id, version=version))
self.render_json(self.values)
@require_editor
def put(self, exploration_id):
"""Updates properties of the given exploration."""
exploration = exp_services.get_exploration_by_id(exploration_id)
version = self.payload.get('version')
_require_valid_version(version, exploration.version)
commit_message = self.payload.get('commit_message')
change_list = self.payload.get('change_list')
try:
exp_services.update_exploration(
self.user_id, exploration_id, change_list, commit_message)
except utils.ValidationError as e:
raise self.InvalidInputException(e)
self.values.update(self._get_exploration_data(exploration_id))
self.render_json(self.values)
@require_editor
def delete(self, exploration_id):
"""Deletes the given exploration."""
role = self.request.get('role')
if not role:
role = None
if role == rights_manager.ROLE_ADMIN:
if not self.is_admin:
logging.error(
'%s tried to delete an exploration, but is not an admin.'
% self.user_id)
raise self.UnauthorizedUserException(
'User %s does not have permissions to delete exploration '
'%s' % (self.user_id, exploration_id))
elif role == rights_manager.ROLE_MODERATOR:
if not self.is_moderator:
logging.error(
'%s tried to delete an exploration, but is not a '
'moderator.' % self.user_id)
raise self.UnauthorizedUserException(
'User %s does not have permissions to delete exploration '
'%s' % (self.user_id, exploration_id))
elif role is not None:
raise self.InvalidInputException('Invalid role: %s' % role)
logging.info(
'%s %s tried to delete exploration %s' %
(role, self.user_id, exploration_id))
exploration = exp_services.get_exploration_by_id(exploration_id)
can_delete = rights_manager.Actor(self.user_id).can_delete(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration.id)
if not can_delete:
raise self.UnauthorizedUserException(
'User %s does not have permissions to delete exploration %s' %
(self.user_id, exploration_id))
is_exploration_cloned = rights_manager.is_exploration_cloned(
exploration_id)
exp_services.delete_exploration(
self.user_id, exploration_id, force_deletion=is_exploration_cloned)
logging.info(
'%s %s deleted exploration %s' %
(role, self.user_id, exploration_id))
class ExplorationRightsHandler(EditorHandler):
"""Handles management of exploration editing rights."""
PAGE_NAME_FOR_CSRF = 'editor'
@require_editor
def put(self, exploration_id):
"""Updates the editing rights for the given exploration."""
exploration = exp_services.get_exploration_by_id(exploration_id)
version = self.payload.get('version')
_require_valid_version(version, exploration.version)
is_public = self.payload.get('is_public')
is_publicized = self.payload.get('is_publicized')
is_community_owned = self.payload.get('is_community_owned')
new_member_username = self.payload.get('new_member_username')
new_member_role = self.payload.get('new_member_role')
viewable_if_private = self.payload.get('viewable_if_private')
if new_member_username:
if not rights_manager.Actor(
self.user_id).can_modify_roles(
rights_manager.ACTIVITY_TYPE_EXPLORATION,
exploration_id):
raise self.UnauthorizedUserException(
'Only an owner of this exploration can add or change '
'roles.')
new_member_id = user_services.get_user_id_from_username(
new_member_username)
if new_member_id is None:
raise Exception(
'Sorry, we could not find the specified user.')
rights_manager.assign_role_for_exploration(
self.user_id, exploration_id, new_member_id, new_member_role)
elif is_public is not None:
exploration = exp_services.get_exploration_by_id(exploration_id)
if is_public:
try:
exploration.validate(strict=True)
except utils.ValidationError as e:
raise self.InvalidInputException(e)
exp_services.publish_exploration_and_update_user_profiles(
self.user_id, exploration_id)
exp_services.index_explorations_given_ids([exploration_id])
else:
rights_manager.unpublish_exploration(
self.user_id, exploration_id)
exp_services.delete_documents_from_search_index([
exploration_id])
elif is_publicized is not None:
exploration = exp_services.get_exploration_by_id(exploration_id)
if is_publicized:
try:
exploration.validate(strict=True)
except utils.ValidationError as e:
raise self.InvalidInputException(e)
rights_manager.publicize_exploration(
self.user_id, exploration_id)
else:
rights_manager.unpublicize_exploration(
self.user_id, exploration_id)
elif is_community_owned:
exploration = exp_services.get_exploration_by_id(exploration_id)
try:
exploration.validate(strict=True)
except utils.ValidationError as e:
raise self.InvalidInputException(e)
rights_manager.release_ownership_of_exploration(
self.user_id, exploration_id)
elif viewable_if_private is not None:
rights_manager.set_private_viewability_of_exploration(
self.user_id, exploration_id, viewable_if_private)
else:
raise self.InvalidInputException(
'No change was made to this exploration.')
self.render_json({
'rights': rights_manager.get_exploration_rights(
exploration_id).to_dict()
})
class ExplorationModeratorRightsHandler(EditorHandler):
"""Handles management of exploration rights by moderators."""
PAGE_NAME_FOR_CSRF = 'editor'
@base.require_moderator
def put(self, exploration_id):
"""Updates the publication status of the given exploration, and sends
an email to all its owners.
"""
exploration = exp_services.get_exploration_by_id(exploration_id)
action = self.payload.get('action')
email_body = self.payload.get('email_body')
version = self.payload.get('version')
_require_valid_version(version, exploration.version)
if action not in feconf.VALID_MODERATOR_ACTIONS:
raise self.InvalidInputException('Invalid moderator action.')
# If moderator emails can be sent, check that all the prerequisites are
# satisfied, otherwise do nothing.
if feconf.REQUIRE_EMAIL_ON_MODERATOR_ACTION:
if not email_body:
raise self.InvalidInputException(
'Moderator actions should include an email to the '
'recipient.')
email_manager.require_moderator_email_prereqs_are_satisfied()
# Perform the moderator action.
if action == 'unpublish_exploration':
rights_manager.unpublish_exploration(
self.user_id, exploration_id)
exp_services.delete_documents_from_search_index([
exploration_id])
elif action == 'publicize_exploration':
try:
exploration.validate(strict=True)
except utils.ValidationError as e:
raise self.InvalidInputException(e)
rights_manager.publicize_exploration(
self.user_id, exploration_id)
else:
raise self.InvalidInputException(
'No change was made to this exploration.')
exp_rights = rights_manager.get_exploration_rights(exploration_id)
# If moderator emails can be sent, send an email to the all owners of
# the exploration notifying them of the change.
if feconf.REQUIRE_EMAIL_ON_MODERATOR_ACTION:
for owner_id in exp_rights.owner_ids:
email_manager.send_moderator_action_email(
self.user_id, owner_id,
feconf.VALID_MODERATOR_ACTIONS[action]['email_intent'],
exploration.title, email_body)
self.render_json({
'rights': exp_rights.to_dict(),
})
class ResolvedAnswersHandler(EditorHandler):
"""Allows learners' answers for a state to be marked as resolved."""
PAGE_NAME_FOR_CSRF = 'editor'
@require_editor
def put(self, exploration_id, state_name):
"""Marks learners' answers as resolved."""
resolved_answers = self.payload.get('resolved_answers')
if not isinstance(resolved_answers, list):
raise self.InvalidInputException(
'Expected a list of resolved answers; received %s.' %
resolved_answers)
if 'resolved_answers' in self.payload:
event_services.DefaultRuleAnswerResolutionEventHandler.record(
exploration_id, state_name, resolved_answers)
self.render_json({})
class UntrainedAnswersHandler(EditorHandler):
"""Returns answers that learners have submitted, but that Oppia hasn't been
explicitly trained to respond to be an exploration author.
"""
NUMBER_OF_TOP_ANSWERS_PER_RULE = 50
def get(self, exploration_id, escaped_state_name):
"""Handles GET requests."""
try:
exploration = exp_services.get_exploration_by_id(exploration_id)
except:
raise self.PageNotFoundException
state_name = self.unescape_state_name(escaped_state_name)
if state_name not in exploration.states:
# If trying to access a non-existing state, there is no training
# data associated with it.
self.render_json({'unhandled_answers': []})
return
state = exploration.states[state_name]
# TODO(bhenning): Answers should be bound to a particular exploration
# version or interaction ID.
# TODO(bhenning): If the top 100 answers have already been classified,
# then this handler will always return an empty list.
# TODO(bhenning): This entire function will not work as expected until
# the answers storage backend stores answers in a non-lossy way.
# Currently, answers are stored as HTML strings and they are not able
# to be converted back to the original objects they started as, so the
# normalization calls in this function will not work correctly on those
# strings. Once this happens, this handler should also be tested.
# The total number of possible answers is 100 because it requests the
# top 50 answers matched to the default rule and the top 50 answers
# matched to a fuzzy rule individually.
answers = stats_services.get_top_state_rule_answers(
exploration_id, state_name, [
exp_domain.DEFAULT_RULESPEC_STR, rule_domain.FUZZY_RULE_TYPE],
self.NUMBER_OF_TOP_ANSWERS_PER_RULE)
interaction = state.interaction
unhandled_answers = []
if feconf.SHOW_TRAINABLE_UNRESOLVED_ANSWERS and interaction.id:
interaction_instance = (
interaction_registry.Registry.get_interaction_by_id(
interaction.id))
try:
# Normalize the answers.
for answer in answers:
answer['value'] = interaction_instance.normalize_answer(
answer['value'])
trained_answers = set()
for answer_group in interaction.answer_groups:
for rule_spec in answer_group.rule_specs:
if rule_spec.rule_type == rule_domain.FUZZY_RULE_TYPE:
trained_answers.update(
interaction_instance.normalize_answer(trained)
for trained
in rule_spec.inputs['training_data'])
# Include all the answers which have been confirmed to be
# associated with the default outcome.
trained_answers.update(set(
interaction_instance.normalize_answer(confirmed)
for confirmed
in interaction.confirmed_unclassified_answers))
unhandled_answers = [
answer for answer in answers
if answer['value'] not in trained_answers
]
except Exception as e:
logging.warning(
'Error loading untrained answers for interaction %s: %s.' %
(interaction.id, e))
self.render_json({
'unhandled_answers': unhandled_answers
})
class ExplorationDownloadHandler(EditorHandler):
"""Downloads an exploration as a zip file, or dict of YAML strings
representing states.
"""
def get(self, exploration_id):
"""Handles GET requests."""
try:
exploration = exp_services.get_exploration_by_id(exploration_id)
except:
raise self.PageNotFoundException
if not rights_manager.Actor(self.user_id).can_view(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id):
raise self.PageNotFoundException
version = self.request.get('v', default_value=exploration.version)
output_format = self.request.get('output_format', default_value='zip')
width = int(self.request.get('width', default_value=80))
# If the title of the exploration has changed, we use the new title
filename = 'oppia-%s-v%s' % (
utils.to_ascii(exploration.title.replace(' ', '')), version)
if output_format == feconf.OUTPUT_FORMAT_ZIP:
self.response.headers['Content-Type'] = 'text/plain'
self.response.headers['Content-Disposition'] = (
'attachment; filename=%s.zip' % str(filename))
self.response.write(
exp_services.export_to_zip_file(exploration_id, version))
elif output_format == feconf.OUTPUT_FORMAT_JSON:
self.render_json(exp_services.export_states_to_yaml(
exploration_id, version=version, width=width))
else:
raise self.InvalidInputException(
'Unrecognized output format %s' % output_format)
class StateDownloadHandler(EditorHandler):
"""Downloads a state as a YAML string."""
def get(self, exploration_id):
"""Handles GET requests."""
try:
exploration = exp_services.get_exploration_by_id(exploration_id)
except:
raise self.PageNotFoundException
if not rights_manager.Actor(self.user_id).can_view(
rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id):
raise self.PageNotFoundException
version = self.request.get('v', default_value=exploration.version)
width = int(self.request.get('width', default_value=80))
try:
state = self.request.get('state')
except:
raise self.InvalidInputException('State not found')
exploration_dict = exp_services.export_states_to_yaml(
exploration_id, version=version, width=width)
if state not in exploration_dict:
raise self.PageNotFoundException
self.response.write(exploration_dict[state])
class ExplorationResourcesHandler(EditorHandler):
"""Manages assets associated with an exploration."""
@require_editor
def get(self, exploration_id):
"""Handles GET requests."""
fs = fs_domain.AbstractFileSystem(
fs_domain.ExplorationFileSystem(exploration_id))
dir_list = fs.listdir('')
self.render_json({'filepaths': dir_list})
class ExplorationSnapshotsHandler(EditorHandler):
"""Returns the exploration snapshot history."""
def get(self, exploration_id):
"""Handles GET requests."""
try:
snapshots = exp_services.get_exploration_snapshots_metadata(
exploration_id)
except:
raise self.PageNotFoundException
# Patch `snapshots` to use the editor's display name.
for snapshot in snapshots:
if snapshot['committer_id'] != feconf.SYSTEM_COMMITTER_ID:
snapshot['committer_id'] = user_services.get_username(
snapshot['committer_id'])
self.render_json({
'snapshots': snapshots,
})
class ExplorationRevertHandler(EditorHandler):
"""Reverts an exploration to an older version."""
@require_editor
def post(self, exploration_id):
"""Handles POST requests."""
current_version = self.payload.get('current_version')
revert_to_version = self.payload.get('revert_to_version')
if not isinstance(revert_to_version, int):
raise self.InvalidInputException(
'Expected an integer version to revert to; received %s.' %
revert_to_version)
if not isinstance(current_version, int):
raise self.InvalidInputException(
'Expected an integer current version; received %s.' %
current_version)
if revert_to_version < 1 or revert_to_version >= current_version:
raise self.InvalidInputException(
'Cannot revert to version %s from version %s.' %
(revert_to_version, current_version))
exp_services.revert_exploration(
self.user_id, exploration_id, current_version, revert_to_version)
self.render_json({})
class ExplorationStatisticsHandler(EditorHandler):
"""Returns statistics for an exploration."""
def get(self, exploration_id, exploration_version):
"""Handles GET requests."""
try:
exp_services.get_exploration_by_id(exploration_id)
except:
raise self.PageNotFoundException
self.render_json(stats_services.get_exploration_stats(
exploration_id, exploration_version))
class ExplorationStatsVersionsHandler(EditorHandler):
"""Returns statistics versions for an exploration."""
def get(self, exploration_id):
"""Handles GET requests."""
try:
exp_services.get_exploration_by_id(exploration_id)
except:
raise self.PageNotFoundException
self.render_json({
'versions': stats_services.get_versions_for_exploration_stats(
exploration_id)})
class StateRulesStatsHandler(EditorHandler):
"""Returns detailed learner answer statistics for a state."""
def get(self, exploration_id, escaped_state_name):
"""Handles GET requests."""
try:
exploration = exp_services.get_exploration_by_id(exploration_id)
except:
raise self.PageNotFoundException
state_name = self.unescape_state_name(escaped_state_name)
if state_name not in exploration.states:
logging.error('Could not find state: %s' % state_name)
logging.error('Available states: %s' % exploration.states.keys())
raise self.PageNotFoundException
self.render_json({
'rules_stats': stats_services.get_state_rules_stats(
exploration_id, state_name)
})
class ImageUploadHandler(EditorHandler):
"""Handles image uploads."""
@require_editor
def post(self, exploration_id):
"""Saves an image uploaded by a content creator."""
raw = self.request.get('image')
filename = self.payload.get('filename')
if not raw:
raise self.InvalidInputException('No image supplied')
file_format = imghdr.what(None, h=raw)
if file_format not in feconf.ACCEPTED_IMAGE_FORMATS_AND_EXTENSIONS:
allowed_formats = ', '.join(
feconf.ACCEPTED_IMAGE_FORMATS_AND_EXTENSIONS.keys())
raise Exception('Image file not recognized: it should be in '
'one of the following formats: %s.' %
allowed_formats)
if not filename:
raise self.InvalidInputException('No filename supplied')
if '/' in filename or '..' in filename:
raise self.InvalidInputException(
'Filenames should not include slashes (/) or consecutive dot '
'characters.')
if '.' in filename:
dot_index = filename.rfind('.')
primary_name = filename[:dot_index]
extension = filename[dot_index + 1:].lower()
if (extension not in
feconf.ACCEPTED_IMAGE_FORMATS_AND_EXTENSIONS[file_format]):
raise self.InvalidInputException(
'Expected a filename ending in .%s; received %s' %
(file_format, filename))
else:
primary_name = filename
filepath = '%s.%s' % (primary_name, file_format)
fs = fs_domain.AbstractFileSystem(
fs_domain.ExplorationFileSystem(exploration_id))
if fs.isfile(filepath):
raise self.InvalidInputException(
'A file with the name %s already exists. Please choose a '
'different name.' % filepath)
fs.commit(self.user_id, filepath, raw)
self.render_json({'filepath': filepath})
class ChangeListSummaryHandler(EditorHandler):
"""Returns a summary of a changelist applied to a given exploration."""
@require_editor
def post(self, exploration_id):
"""Handles POST requests."""
change_list = self.payload.get('change_list')
version = self.payload.get('version')
current_exploration = exp_services.get_exploration_by_id(
exploration_id)
if version != current_exploration.version:
# TODO(sll): Improve this.
self.render_json({
'error': (
'Sorry! Someone else has edited and committed changes to '
'this exploration while you were editing it. We suggest '
'opening another browser tab -- which will load the new '
'version of the exploration -- then transferring your '
'changes there. We will try to make this easier in the '
'future -- we have not done it yet because figuring out '
'how to merge different people\'s changes is hard. '
'(Trying to edit version %s, but the current version is '
'%s.).' % (version, current_exploration.version)
)
})
else:
utils.recursively_remove_key(change_list, '$$hashKey')
summary = exp_services.get_summary_of_change_list(
current_exploration, change_list)
updated_exploration = exp_services.apply_change_list(
exploration_id, change_list)
warning_message = ''
try:
updated_exploration.validate(strict=True)
except utils.ValidationError as e:
warning_message = unicode(e)
self.render_json({
'summary': summary,
'warning_message': warning_message
})
class StartedTutorialEventHandler(EditorHandler):
"""Records that this user has started the state editor tutorial."""
def post(self):
"""Handles GET requests."""
user_services.record_user_started_state_editor_tutorial(self.user_id)
|
gregorynicholas/flask-funktional | refs/heads/develop | setup.py | 1 | #!/usr/bin/env python
"""
flask-funktional
~~~~~~~~~~~~~~~~
flask extension which hopes to make functional testing easier.
links
`````
* `documentation <http://gregorynicholas.github.io/flask-funktional>`_
* `package <http://packages.python.org/flask-funktional>`_
* `source <http://github.com/gregorynicholas/flask-funktional>`_
* `development version
<http://github.com/gregorynicholas/flask-funktional>`_
"""
from setuptools import setup
with open("requirements.txt", "r") as f:
requires = f.readlines()
with open("README.md", "r") as f:
long_description = f.read()
setup(
name="flask-funktional",
version="0.0.1",
url='http://github.com/gregorynicholas/flask-funktional',
license='MIT',
author='gregorynicholas',
author_email='[email protected]',
description=__doc__,
long_description=long_description,
py_modules=['flask_funktional'],
zip_safe=False,
platforms='any',
install_requires=requires,
tests_require=[
'blinker==1.2',
],
test_suite='flask_funktional_tests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
marcospy/TJBot | refs/heads/master | tjbot/commands/auto_question.py | 1 | # coding: utf-8
from commands.command_handler import CommandHandler
class EnableAutoQuestion(CommandHandler):
def handle(self):
if not self.user.questao_automatica_ativa:
self.user.questao_automatica_ativa = True
self.user.save()
self.reply_text('Questão automática ativada com sucesso.')
class DisableAutoQuestion(CommandHandler):
def handle(self):
if self.user.questao_automatica_ativa:
self.user.questao_automatica_ativa = False
self.user.save()
reply = 'Questão automática desativada com sucesso.'
else:
reply = 'Questão automática não está ativa, nada feito.'
self.reply_text(reply)
|
osakared/midifile.py | refs/heads/master | midi_file.py | 1 | import struct
class Note(object):
"""Represents a single midi note"""
note_names = ['A', 'A#', 'B', 'C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#']
def __init__(self, channel, pitch, velocity, start, duration = 0):
self.channel = channel
self.pitch = pitch
self.velocity = velocity
self.start = start
self.duration = duration
def __str__(self):
s = Note.note_names[(self.pitch - 9) % 12]
s += str(self.pitch / 12 - 1)
s += " " + str(self.velocity)
s += " (" + str(self.start) + "-" + str(self.start + self.duration) + ")"
return s
def get_end(self):
return self.start + self.duration
def notes_from_xml(element):
track = []
for child in element.childNodes:
if child.attributes and (child.tagName == 'MidiNote' or child.tagName == 'TempMidiNote'):
try:
track.append(Note(0, int(child.getAttribute('pitch')), int(child.getAttribute('velocity')), float(child.getAttribute('start')), float(child.getAttribute('duration'))))
except Exception, e:
print "Cannot parse MidiNote or TempMidiNote: " + str(e)
return track
def notes_to_str(notes):
s = ""
for note in notes:
s += str(note) + " "
return s
class MidiFile(object):
"""Represents the Notes in a midi file"""
def read_byte(self, file):
return struct.unpack('B', file.read(1))[0]
def read_variable_length(self, file, counter):
counter -= 1
num = self.read_byte(file)
if num & 0x80:
num = num & 0x7F
while True:
counter -= 1
c = self.read_byte(file)
num = (num << 7) + (c & 0x7F)
if not (c & 0x80):
break
return (num, counter)
def __init__(self, file_name):
self.tempo = 120
try:
file = open(file_name, 'rb')
if file.read(4) != 'MThd': raise Exception('Not a midi file')
self.file_name = file_name
size = struct.unpack('>i', file.read(4))[0]
if size != 6: raise Exception('Unusual midi file with non-6 sized header')
self.format = struct.unpack('>h', file.read(2))[0]
self.track_count = struct.unpack('>h', file.read(2))[0]
self.time_division = struct.unpack('>h', file.read(2))[0]
# Now to fill out the arrays with the notes
self.tracks = []
for i in range(0, self.track_count):
self.tracks.append([])
abs_time = 0.0
for track in self.tracks:
if file.read(4) != 'MTrk': raise Exception('Not a valid track')
size = struct.unpack('>i', file.read(4))[0]
# To keep track of running status
last_flag = None
while size > 0:
delta, size = self.read_variable_length(file, size)
delta /= float(self.time_division)
abs_time += delta
size -= 1
flag = self.read_byte(file)
# Sysex, which we aren't interested in
if flag == 0xF0 or flag == 0xF7:
# print "Sysex"
while True:
size -= 1
if self.read_byte(file) == 0xF7: break
# Meta, which we also aren't interested in
elif flag == 0xFF:
size -= 1
type = self.read_byte(file)
if type == 0x2F:
break
# print "Meta: " + str(type)
length, size = self.read_variable_length(file, size)
message = file.read(length)
# if type not in [0x0, 0x7, 0x20, 0x2F, 0x51, 0x54, 0x58, 0x59, 0x7F]:
# print message
# Midi messages
else:
if flag & 0x80:
type_and_channel = flag#self.read_byte(file)
size -= 1
param1 = self.read_byte(file)
last_flag = flag
else:
type_and_channel = last_flag
param1 = flag
type = ((type_and_channel & 0xF0) >> 4)
channel = type_and_channel & 0xF
size -= 1
param2 = self.read_byte(file)
# For now, anyway, we only care about midi ons and midi offs
if type == 0x9:
track.append(Note(channel, param1, param2, abs_time))
elif type == 0x8:
for note in reversed(track):
if note.channel == channel and note.pitch == param1:
note.duration = abs_time - note.start
break
except Exception, e:
print "Cannot parse midi file: " + str(e)
finally:
file.close()
def __str__(self):
s = ""
for i, track in enumerate(self.tracks):
s += "Track " + str(i+1) + "\n"
for note in track:
s += str(note) + "\n"
return s
|
PulsePod/old-www-do-not-use | refs/heads/master | lib/python2.7/site-packages/pip/vendor/distlib/_backport/sysconfig.py | 80 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Access to Python's configuration information."""
import codecs
import os
import re
import sys
from os.path import pardir, realpath
try:
import configparser
except ImportError:
import ConfigParser as configparser
__all__ = [
'get_config_h_filename',
'get_config_var',
'get_config_vars',
'get_makefile_filename',
'get_path',
'get_path_names',
'get_paths',
'get_platform',
'get_python_version',
'get_scheme_names',
'parse_config_h',
]
def _safe_realpath(path):
try:
return realpath(path)
except OSError:
return path
if sys.executable:
_PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
else:
# sys.executable can be empty if argv[0] has been changed and Python is
# unable to retrieve the real program name
_PROJECT_BASE = _safe_realpath(os.getcwd())
if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))
# PC/VS7.1
if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
# PC/AMD64
if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
def is_python_build():
for fn in ("Setup.dist", "Setup.local"):
if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
return True
return False
_PYTHON_BUILD = is_python_build()
_cfg_read = False
def _ensure_cfg_read():
global _cfg_read
if not _cfg_read:
from distlib.resources import finder
_finder = finder('distlib._backport')
_cfgfile = _finder.find('sysconfig.cfg')
assert _cfgfile, 'sysconfig.cfg exists'
with _cfgfile.as_stream() as s:
_SCHEMES.readfp(s)
if _PYTHON_BUILD:
for scheme in ('posix_prefix', 'posix_home'):
_SCHEMES.set(scheme, 'include', '{srcdir}/Include')
_SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')
_cfg_read = True
_SCHEMES = configparser.RawConfigParser()
_VAR_REPL = re.compile(r'\{([^{]*?)\}')
def _expand_globals(config):
_ensure_cfg_read()
if config.has_section('globals'):
globals = config.items('globals')
else:
globals = tuple()
sections = config.sections()
for section in sections:
if section == 'globals':
continue
for option, value in globals:
if config.has_option(section, option):
continue
config.set(section, option, value)
config.remove_section('globals')
# now expanding local variables defined in the cfg file
#
for section in config.sections():
variables = dict(config.items(section))
def _replacer(matchobj):
name = matchobj.group(1)
if name in variables:
return variables[name]
return matchobj.group(0)
for option, value in config.items(section):
config.set(section, option, _VAR_REPL.sub(_replacer, value))
#_expand_globals(_SCHEMES)
# FIXME don't rely on sys.version here, its format is an implementation detail
# of CPython, use sys.version_info or sys.hexversion
_PY_VERSION = sys.version.split()[0]
_PY_VERSION_SHORT = sys.version[:3]
_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]
_PREFIX = os.path.normpath(sys.prefix)
_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
_CONFIG_VARS = None
_USER_BASE = None
def _subst_vars(path, local_vars):
"""In the string `path`, replace tokens like {some.thing} with the
corresponding value from the map `local_vars`.
If there is no corresponding value, leave the token unchanged.
"""
def _replacer(matchobj):
name = matchobj.group(1)
if name in local_vars:
return local_vars[name]
elif name in os.environ:
return os.environ[name]
return matchobj.group(0)
return _VAR_REPL.sub(_replacer, path)
def _extend_dict(target_dict, other_dict):
target_keys = target_dict.keys()
for key, value in other_dict.items():
if key in target_keys:
continue
target_dict[key] = value
def _expand_vars(scheme, vars):
res = {}
if vars is None:
vars = {}
_extend_dict(vars, get_config_vars())
for key, value in _SCHEMES.items(scheme):
if os.name in ('posix', 'nt'):
value = os.path.expanduser(value)
res[key] = os.path.normpath(_subst_vars(value, vars))
return res
def format_value(value, vars):
def _replacer(matchobj):
name = matchobj.group(1)
if name in vars:
return vars[name]
return matchobj.group(0)
return _VAR_REPL.sub(_replacer, value)
def _get_default_scheme():
if os.name == 'posix':
# the default scheme for posix is posix_prefix
return 'posix_prefix'
return os.name
def _getuserbase():
env_base = os.environ.get("PYTHONUSERBASE", None)
def joinuser(*args):
return os.path.expanduser(os.path.join(*args))
# what about 'os2emx', 'riscos' ?
if os.name == "nt":
base = os.environ.get("APPDATA") or "~"
if env_base:
return env_base
else:
return joinuser(base, "Python")
if sys.platform == "darwin":
framework = get_config_var("PYTHONFRAMEWORK")
if framework:
if env_base:
return env_base
else:
return joinuser("~", "Library", framework, "%d.%d" %
sys.version_info[:2])
if env_base:
return env_base
else:
return joinuser("~", ".local")
def _parse_makefile(filename, vars=None):
"""Parse a Makefile-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
# Regexes needed for parsing Makefile (and similar syntaxes,
# like old-style Setup files).
_variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
if vars is None:
vars = {}
done = {}
notdone = {}
with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f:
lines = f.readlines()
for line in lines:
if line.startswith('#') or line.strip() == '':
continue
m = _variable_rx.match(line)
if m:
n, v = m.group(1, 2)
v = v.strip()
# `$$' is a literal `$' in make
tmpv = v.replace('$$', '')
if "$" in tmpv:
notdone[n] = v
else:
try:
v = int(v)
except ValueError:
# insert literal `$'
done[n] = v.replace('$$', '$')
else:
done[n] = v
# do variable interpolation here
variables = list(notdone.keys())
# Variables with a 'PY_' prefix in the makefile. These need to
# be made available without that prefix through sysconfig.
# Special care is needed to ensure that variable expansion works, even
# if the expansion uses the name without a prefix.
renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
while len(variables) > 0:
for name in tuple(variables):
value = notdone[name]
m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
if m is not None:
n = m.group(1)
found = True
if n in done:
item = str(done[n])
elif n in notdone:
# get it on a subsequent round
found = False
elif n in os.environ:
# do it like make: fall back to environment
item = os.environ[n]
elif n in renamed_variables:
if (name.startswith('PY_') and
name[3:] in renamed_variables):
item = ""
elif 'PY_' + n in notdone:
found = False
else:
item = str(done['PY_' + n])
else:
done[n] = item = ""
if found:
after = value[m.end():]
value = value[:m.start()] + item + after
if "$" in after:
notdone[name] = value
else:
try:
value = int(value)
except ValueError:
done[name] = value.strip()
else:
done[name] = value
variables.remove(name)
if (name.startswith('PY_') and
name[3:] in renamed_variables):
name = name[3:]
if name not in done:
done[name] = value
else:
# bogus variable reference (e.g. "prefix=$/opt/python");
# just drop it since we can't deal
done[name] = value
variables.remove(name)
# strip spurious spaces
for k, v in done.items():
if isinstance(v, str):
done[k] = v.strip()
# save the results in the global dictionary
vars.update(done)
return vars
def get_makefile_filename():
"""Return the path of the Makefile."""
if _PYTHON_BUILD:
return os.path.join(_PROJECT_BASE, "Makefile")
if hasattr(sys, 'abiflags'):
config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
else:
config_dir_name = 'config'
return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
def _init_posix(vars):
"""Initialize the module as appropriate for POSIX systems."""
# load the installed Makefile:
makefile = get_makefile_filename()
try:
_parse_makefile(makefile, vars)
except IOError as e:
msg = "invalid Python installation: unable to open %s" % makefile
if hasattr(e, "strerror"):
msg = msg + " (%s)" % e.strerror
raise IOError(msg)
# load the installed pyconfig.h:
config_h = get_config_h_filename()
try:
with open(config_h) as f:
parse_config_h(f, vars)
except IOError as e:
msg = "invalid Python installation: unable to open %s" % config_h
if hasattr(e, "strerror"):
msg = msg + " (%s)" % e.strerror
raise IOError(msg)
# On AIX, there are wrong paths to the linker scripts in the Makefile
# -- these paths are relative to the Python source, but when installed
# the scripts are in another directory.
if _PYTHON_BUILD:
vars['LDSHARED'] = vars['BLDSHARED']
def _init_non_posix(vars):
"""Initialize the module as appropriate for NT"""
# set basic install directories
vars['LIBDEST'] = get_path('stdlib')
vars['BINLIBDEST'] = get_path('platstdlib')
vars['INCLUDEPY'] = get_path('include')
vars['SO'] = '.pyd'
vars['EXE'] = '.exe'
vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
#
# public APIs
#
def parse_config_h(fp, vars=None):
"""Parse a config.h-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
if vars is None:
vars = {}
define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
while True:
line = fp.readline()
if not line:
break
m = define_rx.match(line)
if m:
n, v = m.group(1, 2)
try:
v = int(v)
except ValueError:
pass
vars[n] = v
else:
m = undef_rx.match(line)
if m:
vars[m.group(1)] = 0
return vars
def get_config_h_filename():
"""Return the path of pyconfig.h."""
if _PYTHON_BUILD:
if os.name == "nt":
inc_dir = os.path.join(_PROJECT_BASE, "PC")
else:
inc_dir = _PROJECT_BASE
else:
inc_dir = get_path('platinclude')
return os.path.join(inc_dir, 'pyconfig.h')
def get_scheme_names():
"""Return a tuple containing the schemes names."""
return tuple(sorted(_SCHEMES.sections()))
def get_path_names():
"""Return a tuple containing the paths names."""
# xxx see if we want a static list
return _SCHEMES.options('posix_prefix')
def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
"""Return a mapping containing an install scheme.
``scheme`` is the install scheme name. If not provided, it will
return the default scheme for the current platform.
"""
_ensure_cfg_read()
if expand:
return _expand_vars(scheme, vars)
else:
return dict(_SCHEMES.items(scheme))
def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
"""Return a path corresponding to the scheme.
``scheme`` is the install scheme name.
"""
return get_paths(scheme, vars, expand)[name]
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform.
On Unix, this means every variable defined in Python's installed Makefile;
On Windows and Mac OS it's a much smaller set.
With arguments, return a list of values that result from looking up
each argument in the configuration variable dictionary.
"""
global _CONFIG_VARS
if _CONFIG_VARS is None:
_CONFIG_VARS = {}
# Normalized versions of prefix and exec_prefix are handy to have;
# in fact, these are the standard versions used most places in the
# distutils2 module.
_CONFIG_VARS['prefix'] = _PREFIX
_CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
_CONFIG_VARS['py_version'] = _PY_VERSION
_CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
_CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
_CONFIG_VARS['base'] = _PREFIX
_CONFIG_VARS['platbase'] = _EXEC_PREFIX
_CONFIG_VARS['projectbase'] = _PROJECT_BASE
try:
_CONFIG_VARS['abiflags'] = sys.abiflags
except AttributeError:
# sys.abiflags may not be defined on all platforms.
_CONFIG_VARS['abiflags'] = ''
if os.name in ('nt', 'os2'):
_init_non_posix(_CONFIG_VARS)
if os.name == 'posix':
_init_posix(_CONFIG_VARS)
# Setting 'userbase' is done below the call to the
# init function to enable using 'get_config_var' in
# the init-function.
if sys.version >= '2.6':
_CONFIG_VARS['userbase'] = _getuserbase()
if 'srcdir' not in _CONFIG_VARS:
_CONFIG_VARS['srcdir'] = _PROJECT_BASE
else:
_CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])
# Convert srcdir into an absolute path if it appears necessary.
# Normally it is relative to the build directory. However, during
# testing, for example, we might be running a non-installed python
# from a different directory.
if _PYTHON_BUILD and os.name == "posix":
base = _PROJECT_BASE
try:
cwd = os.getcwd()
except OSError:
cwd = None
if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
base != cwd):
# srcdir is relative and we are not in the same directory
# as the executable. Assume executable is in the build
# directory and make srcdir absolute.
srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
_CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
if sys.platform == 'darwin':
kernel_version = os.uname()[2] # Kernel version (8.4.3)
major_version = int(kernel_version.split('.')[0])
if major_version < 8:
# On Mac OS X before 10.4, check if -arch and -isysroot
# are in CFLAGS or LDFLAGS and remove them if they are.
# This is needed when building extensions on a 10.3 system
# using a universal build of python.
for key in ('LDFLAGS', 'BASECFLAGS',
# a number of derived variables. These need to be
# patched up as well.
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
flags = _CONFIG_VARS[key]
flags = re.sub('-arch\s+\w+\s', ' ', flags)
flags = re.sub('-isysroot [^ \t]*', ' ', flags)
_CONFIG_VARS[key] = flags
else:
# Allow the user to override the architecture flags using
# an environment variable.
# NOTE: This name was introduced by Apple in OSX 10.5 and
# is used by several scripting languages distributed with
# that OS release.
if 'ARCHFLAGS' in os.environ:
arch = os.environ['ARCHFLAGS']
for key in ('LDFLAGS', 'BASECFLAGS',
# a number of derived variables. These need to be
# patched up as well.
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
flags = _CONFIG_VARS[key]
flags = re.sub('-arch\s+\w+\s', ' ', flags)
flags = flags + ' ' + arch
_CONFIG_VARS[key] = flags
# If we're on OSX 10.5 or later and the user tries to
# compiles an extension using an SDK that is not present
# on the current machine it is better to not use an SDK
# than to fail.
#
# The major usecase for this is users using a Python.org
# binary installer on OSX 10.6: that installer uses
# the 10.4u SDK, but that SDK is not installed by default
# when you install Xcode.
#
CFLAGS = _CONFIG_VARS.get('CFLAGS', '')
m = re.search('-isysroot\s+(\S+)', CFLAGS)
if m is not None:
sdk = m.group(1)
if not os.path.exists(sdk):
for key in ('LDFLAGS', 'BASECFLAGS',
# a number of derived variables. These need to be
# patched up as well.
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
flags = _CONFIG_VARS[key]
flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags)
_CONFIG_VARS[key] = flags
if args:
vals = []
for name in args:
vals.append(_CONFIG_VARS.get(name))
return vals
else:
return _CONFIG_VARS
def get_config_var(name):
"""Return the value of a single variable using the dictionary returned by
'get_config_vars()'.
Equivalent to get_config_vars().get(name)
"""
return get_config_vars().get(name)
def get_platform():
"""Return a string that identifies the current platform.
This is used mainly to distinguish platform-specific build directories and
platform-specific built distributions. Typically includes the OS name
and version and the architecture (as supplied by 'os.uname()'),
although the exact information included depends on the OS; eg. for IRIX
the architecture isn't particularly important (IRIX only runs on SGI
hardware), but for Linux the kernel version isn't particularly
important.
Examples of returned values:
linux-i586
linux-alpha (?)
solaris-2.6-sun4u
irix-5.3
irix64-6.2
Windows will return one of:
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
win-ia64 (64bit Windows on Itanium)
win32 (all others - specifically, sys.platform is returned)
For other non-POSIX platforms, currently just returns 'sys.platform'.
"""
if os.name == 'nt':
# sniff sys.version for architecture.
prefix = " bit ("
i = sys.version.find(prefix)
if i == -1:
return sys.platform
j = sys.version.find(")", i)
look = sys.version[i+len(prefix):j].lower()
if look == 'amd64':
return 'win-amd64'
if look == 'itanium':
return 'win-ia64'
return sys.platform
if os.name != "posix" or not hasattr(os, 'uname'):
# XXX what about the architecture? NT is Intel or Alpha,
# Mac OS is M68k or PPC, etc.
return sys.platform
# Try to distinguish various flavours of Unix
osname, host, release, version, machine = os.uname()
# Convert the OS name to lowercase, remove '/' characters
# (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
osname = osname.lower().replace('/', '')
machine = machine.replace(' ', '_')
machine = machine.replace('/', '-')
if osname[:5] == "linux":
# At least on Linux/Intel, 'machine' is the processor --
# i386, etc.
# XXX what about Alpha, SPARC, etc?
return "%s-%s" % (osname, machine)
elif osname[:5] == "sunos":
if release[0] >= "5": # SunOS 5 == Solaris 2
osname = "solaris"
release = "%d.%s" % (int(release[0]) - 3, release[2:])
# fall through to standard osname-release-machine representation
elif osname[:4] == "irix": # could be "irix64"!
return "%s-%s" % (osname, release)
elif osname[:3] == "aix":
return "%s-%s.%s" % (osname, version, release)
elif osname[:6] == "cygwin":
osname = "cygwin"
rel_re = re.compile(r'[\d.]+')
m = rel_re.match(release)
if m:
release = m.group()
elif osname[:6] == "darwin":
#
# For our purposes, we'll assume that the system version from
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
# to. This makes the compatibility story a bit more sane because the
# machine is going to compile and link as if it were
# MACOSX_DEPLOYMENT_TARGET.
cfgvars = get_config_vars()
macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
if True:
# Always calculate the release of the running machine,
# needed to determine if we can build fat binaries or not.
macrelease = macver
# Get the system version. Reading this plist is a documented
# way to get the system version (see the documentation for
# the Gestalt Manager)
try:
f = open('/System/Library/CoreServices/SystemVersion.plist')
except IOError:
# We're on a plain darwin box, fall back to the default
# behaviour.
pass
else:
try:
m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
r'<string>(.*?)</string>', f.read())
finally:
f.close()
if m is not None:
macrelease = '.'.join(m.group(1).split('.')[:2])
# else: fall back to the default behaviour
if not macver:
macver = macrelease
if macver:
release = macver
osname = "macosx"
if ((macrelease + '.') >= '10.4.' and
'-arch' in get_config_vars().get('CFLAGS', '').strip()):
# The universal build will build fat binaries, but not on
# systems before 10.4
#
# Try to detect 4-way universal builds, those have machine-type
# 'universal' instead of 'fat'.
machine = 'fat'
cflags = get_config_vars().get('CFLAGS')
archs = re.findall('-arch\s+(\S+)', cflags)
archs = tuple(sorted(set(archs)))
if len(archs) == 1:
machine = archs[0]
elif archs == ('i386', 'ppc'):
machine = 'fat'
elif archs == ('i386', 'x86_64'):
machine = 'intel'
elif archs == ('i386', 'ppc', 'x86_64'):
machine = 'fat3'
elif archs == ('ppc64', 'x86_64'):
machine = 'fat64'
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
machine = 'universal'
else:
raise ValueError(
"Don't know machine value for archs=%r" % (archs,))
elif machine == 'i386':
# On OSX the machine type returned by uname is always the
# 32-bit variant, even if the executable architecture is
# the 64-bit variant
if sys.maxsize >= 2**32:
machine = 'x86_64'
elif machine in ('PowerPC', 'Power_Macintosh'):
# Pick a sane name for the PPC architecture.
# See 'i386' case
if sys.maxsize >= 2**32:
machine = 'ppc64'
else:
machine = 'ppc'
return "%s-%s-%s" % (osname, release, machine)
def get_python_version():
return _PY_VERSION_SHORT
def _print_dict(title, data):
for index, (key, value) in enumerate(sorted(data.items())):
if index == 0:
print('%s: ' % (title))
print('\t%s = "%s"' % (key, value))
def _main():
"""Display all information sysconfig detains."""
print('Platform: "%s"' % get_platform())
print('Python version: "%s"' % get_python_version())
print('Current installation scheme: "%s"' % _get_default_scheme())
print()
_print_dict('Paths', get_paths())
print()
_print_dict('Variables', get_config_vars())
if __name__ == '__main__':
_main()
|
endolith/scikit-image | refs/heads/master | skimage/transform/tests/__init__.py | 672 | from ..._shared.testing import setup_test, teardown_test
def setup():
setup_test()
def teardown():
teardown_test()
|
platinhom/CADDHom | refs/heads/master | python/bioinformatics/blastp.py | 1 | """
Taken from ProDy
(http://www.csb.pitt.edu/prody/_modules/prody/proteins/blastpdb.html)
"""
import re
import time
import urllib2
import xml.etree.cElementTree as etree
from urllib import urlencode
def blast_pdb(sequence, nhits=250, expect=1e-10, timeout=60, pause=1):
query = {
'DATABASE': 'pdb',
'ENTREZ_QUERY': '(none)',
'PROGRAM': 'blastp',
'EXPECT': expect,
'HITLIST_SIZE': nhits,
'CMD': 'Put',
'QUERY': sequence
}
url = 'http://blast.ncbi.nlm.nih.gov/Blast.cgi'
data = urlencode(query)
request = urllib2.Request(
url, data=data, headers={'User-agent': 'protutils'}
)
response = urllib2.urlopen(request)
html = response.read()
m = re.search('RID =\s?(.*?)\n', html)
if m:
rid = m.group(1)
else:
raise Exception('Could not parse response.')
query = {
'ALIGNMENTS': 500,
'DESCRIPTIONS': 500,
'FORMAT_TYPE': 'XML',
'RID': rid,
'CMD': 'Get'
}
data = urlencode(query)
slept = 0
while slept < timeout:
request = urllib2.Request(
url, data=data, headers={'User-agent': 'protutils'}
)
response = urllib2.urlopen(request)
results = response.read()
m = re.search('Status=(.*?)\n', results)
if not m:
break
elif m.group(1).strip().upper() == 'READY':
break
else:
time.sleep(pause)
slept += pause
with open('blastp.xml', 'w') as f:
f.write(results)
return etree.XML(results)
def xml_dict(root, tag_prefix):
d = {}
regex = re.compile(r'{0}(.*)'.format(tag_prefix))
for element in root:
tag = element.tag
m = regex.search(tag)
if m:
key = m.group(1)
if len(element) == 0:
d[key] = element.text
else:
d[key] = element
return d
class BLASTPDBRecord(object):
def __init__(self, sequence, nhits=250, expect=1e-10, timeout=60, pause=1):
self.qseq = sequence
root = blast_pdb(sequence, nhits, expect, timeout, pause)
root = xml_dict(root, 'BlastOutput_')
self.query_id = root['query-ID']
if not len(sequence) == int(root['query-len']):
raise ValueError('Sequence length does not match query length')
self.param = xml_dict(root['param'][0], 'Parameters_')
hits = []
for elem in root['iterations']:
for child in xml_dict(elem, 'Iteration_')['hits']:
hit = xml_dict(child, 'Hit_')
data = xml_dict(hit['hsps'][0], 'Hsp_')
for key in ['align-len', 'gaps', 'hit-frame', 'hit-from',
'hit-to', 'identity', 'positive', 'query-frame',
'query-from', 'query-to']:
data[key] = int(data[key])
for key in ['evalue', 'bit-score', 'score']:
data[key] = float(data[key])
p_identity = (data['identity'] /
float(data['query-to'] - data['query-from'] + 1)
* 100)
p_overlap = ((data['align-len'] - data['gaps']) /
float(len(sequence)) * 100)
data['percent_identity'] = p_identity
data['percent_overlap'] = p_overlap
__, gi, __, pdb, chain = hit['id'].split('|')
data['gi'] = gi
data['pdb'] = pdb
data['chain'] = chain
data['def'] = hit['def']
hits.append(data)
hits.sort(key=lambda x: x['percent_identity'], reverse=True)
self.hits = hits
def get_hits(self, percent_identity=90.0, percent_overlap=70.0):
hits = {}
for hit in self.hits:
if hit['percent_identity'] < percent_identity:
break
if hit['percent_overlap'] < percent_overlap:
continue
key = '{pdb}_{chain}'.format(**hit)
hits[key] = hit
return hits
def get_best(self):
return self.hits[0]
def ranking(self):
return {
'{pdb}_{chain}'.format(**hit): hit[
'percent_identity'
] for hit in self.hits
} |
GeyerA/android_external_chromium_org | refs/heads/master | tools/telemetry/telemetry/core/platform/platform_backend.py | 23 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class PlatformBackend(object):
def IsRawDisplayFrameRateSupported(self):
return False
# pylint: disable=W0613
def StartRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def StopRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def GetRawDisplayFrameRateMeasurements(self):
raise NotImplementedError()
def SetFullPerformanceModeEnabled(self, enabled): # pylint: disable=W0613
pass
def CanMonitorThermalThrottling(self):
return False
def IsThermallyThrottled(self):
raise NotImplementedError()
def HasBeenThermallyThrottled(self):
raise NotImplementedError()
def GetSystemCommitCharge(self):
raise NotImplementedError()
def GetMemoryStats(self, pid): # pylint: disable=W0613
return {}
def GetIOStats(self, pid): # pylint: disable=W0613
return {}
def GetChildPids(self, pid): # pylint: disable=W0613
raise NotImplementedError()
def GetCommandLine(self, pid):
raise NotImplementedError()
def GetOSName(self):
raise NotImplementedError()
def GetOSVersionName(self):
return None
def CanFlushIndividualFilesFromSystemCache(self):
raise NotImplementedError()
def FlushEntireSystemCache(self):
raise NotImplementedError()
def FlushSystemCacheForDirectory(self, directory, ignoring=None):
raise NotImplementedError()
|
c0710204/edx-platform | refs/heads/master | common/lib/xmodule/xmodule/tests/test_mako_module.py | 261 | """ Test mako_module.py """
from unittest import TestCase
from mock import Mock
from xmodule.mako_module import MakoModuleDescriptor
class MakoModuleTest(TestCase):
""" Test MakoModuleDescriptor """
def test_render_template_check(self):
mock_system = Mock()
mock_system.render_template = None
with self.assertRaises(TypeError):
MakoModuleDescriptor(mock_system, {})
del mock_system.render_template
with self.assertRaises(TypeError):
MakoModuleDescriptor(mock_system, {})
|
hogarthj/ansible | refs/heads/devel | lib/ansible/utils/module_docs_fragments/validate.py | 30 | # Copyright (c) 2015 Ansible, Inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard documentation fragment
DOCUMENTATION = '''
options:
validate:
description:
- The validation command to run before copying into place. The path to the file to
validate is passed in via '%s' which must be present as in the example below.
The command is passed securely so shell features like expansion and pipes won't work.
'''
|
edlunde-dnastar/StarCluster | refs/heads/vanilla_improvements | starcluster/plugins/pypkginstaller.py | 18 | # Copyright 2009-2014 Justin Riley
#
# This file is part of StarCluster.
#
# StarCluster is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# StarCluster is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with StarCluster. If not, see <http://www.gnu.org/licenses/>.
"""Install python packages using pip
Packages are downloaded/installed in parallel, allowing for faster installs
when using many nodes.
For example to install the flask and SQLAlchemy packages on all the nodes::
[plugin webapp-packages]
setup_class = starcluster.plugins.pypkginstaller.PyPkgInstaller
packages = flask, SQLAlchemy
It can also be used to install the development version of packages from
github, for instance if you want to install the master branch of IPython
and the latest released version of some dependencies::
[plugin ipython-dev]
setup_class = starcluster.plugins.pypkginstaller.PyPkgInstaller
install_command = pip install -U %s
packages = pyzmq,
python-msgpack,
git+http://github.com/ipython/ipython.git
"""
from starcluster.clustersetup import DefaultClusterSetup
from starcluster.logger import log
from starcluster.utils import print_timing
class PyPkgInstaller(DefaultClusterSetup):
"""Install Python packages with pip."""
def __init__(self, packages="", install_command="pip install %s"):
super(PyPkgInstaller, self).__init__()
self.install_command = install_command
self.packages = [p.strip() for p in packages.split(",") if p.strip()]
@print_timing("PyPkgInstaller")
def install_packages(self, nodes, dest='all nodes'):
log.info("Installing Python packages on %s:" % dest)
commands = [self.install_command % p for p in self.packages]
for command in commands:
log.info("$ " + command)
cmd = "\n".join(commands)
for node in nodes:
self.pool.simple_job(node.ssh.execute, (cmd,), jobid=node.alias)
self.pool.wait(len(nodes))
def run(self, nodes, master, user, user_shell, volumes):
self.install_packages(nodes)
def on_add_node(self, node, nodes, master, user, user_shell, volumes):
self.install_packages([node], dest=node.alias)
def on_remove_node(self, node, nodes, master, user, user_shell, volumes):
raise NotImplementedError("on_remove_node method not implemented")
|
lampwins/netbox | refs/heads/develop | netbox/extras/management/commands/nbshell.py | 1 | import code
import platform
import sys
from django import get_version
from django.apps import apps
from django.conf import settings
from django.core.management.base import BaseCommand
APPS = ['circuits', 'dcim', 'extras', 'ipam', 'secrets', 'tenancy', 'users', 'virtualization']
BANNER_TEXT = """### NetBox interactive shell ({node})
### Python {python} | Django {django} | NetBox {netbox}
### lsmodels() will show available models. Use help(<model>) for more info.""".format(
node=platform.node(),
python=platform.python_version(),
django=get_version(),
netbox=settings.VERSION
)
class Command(BaseCommand):
help = "Start the Django shell with all NetBox models already imported"
django_models = {}
def _lsmodels(self):
for app, models in self.django_models.items():
app_name = apps.get_app_config(app).verbose_name
print('{}:'.format(app_name))
for m in models:
print(' {}'.format(m))
def get_namespace(self):
namespace = {}
# Gather Django models and constants from each app
for app in APPS:
self.django_models[app] = []
# Load models from each app
for model in apps.get_app_config(app).get_models():
namespace[model.__name__] = model
self.django_models[app].append(model.__name__)
# Constants
try:
app_constants = sys.modules['{}.constants'.format(app)]
for name in dir(app_constants):
namespace[name] = getattr(app_constants, name)
except KeyError:
pass
# Load convenience commands
namespace.update({
'lsmodels': self._lsmodels,
})
return namespace
def handle(self, **options):
shell = code.interact(banner=BANNER_TEXT, local=self.get_namespace())
return shell
|
TheBiggerGuy/pypcappy | refs/heads/master | pypcappy/blocks/__init__.py | 1 | #!/usr/bin/env python3
from importlib import import_module
from .abstract import AbstractBlock
"""
from .sectionHeaderBlock import SectionHeaderBlock
from .customBlock import CustomBlock
from .interfaceDescriptionBlock import InterfaceDescriptionBlock
from .enhancedPacketBlock import EnhancedPacketBlock
BLOCK_TYPES = {
0x0A0D0D0A: SectionHeaderBlock,
0x00000001: InterfaceDescriptionBlock,
0x00000002: ToDoBlock, #'!OBSOLETE! Packet Block',
0x00000003: ToDoBlock, #'Simple Packet Block (SPB)',
0x00000004: ToDoBlock, #'Name Resolution Block (NRB)',
0x00000005: ToDoBlock, #'Interface Statistics Block (ISB)',
0x00000006: EnhancedPacketBlock,
0x00000BAD: CustomBlock,
0x40000BAD: CustomBlock
}
"""
class ToDoBlock(AbstractBlock):
TYPE_IDS = []
def __init__(self, byteorder, block_type, data):
super().__init__(byteorder, block_type, data, skip=True)
def block_type_name(self):
return 'TODO Block'
class BlockLibary(object):
def __init__(self):
self.blocks = {}
def add(self, type_id, block):
if type_id < 0 or type_id > 0xFFFFFFFF:
raise Exception('Invalid block type id: ' + str(type_id))
self.blocks[type_id] = block
def __getitem__(self, type_id):
if type_id < 0 or type_id > 0xFFFFFFFF:
raise Exception('Invalid block type id: ' + str(type_id))
return self.blocks.get(type_id, ToDoBlock)
BLOCK_TYPES = BlockLibary()
for block_name in ['Abstract', 'SectionHeader', 'Custom', 'InterfaceDescription', 'EnhancedPacket']:
module = import_module('.' + block_name.lower(), package='pypcappy.blocks')
block = getattr(module, block_name + 'Block')
for type_id in block.TYPE_IDS:
BLOCK_TYPES.add(type_id, block)
|
ThomasFeher/audacity | refs/heads/master | lib-src/lv2/lv2/plugins/eg-amp.lv2/waflib/extras/autowaf.py | 176 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import glob
import os
import subprocess
import sys
from waflib import Configure,Context,Logs,Node,Options,Task,Utils
from waflib.TaskGen import feature,before,after
global g_is_child
g_is_child=False
global g_step
g_step=0
@feature('c','cxx')
@after('apply_incpaths')
def include_config_h(self):
self.env.append_value('INCPATHS',self.bld.bldnode.abspath())
def set_options(opt,debug_by_default=False):
global g_step
if g_step>0:
return
dirs_options=opt.add_option_group('Installation directories','')
for k in('--prefix','--destdir'):
option=opt.parser.get_option(k)
if option:
opt.parser.remove_option(k)
dirs_options.add_option(option)
dirs_options.add_option('--bindir',type='string',help="Executable programs [Default: PREFIX/bin]")
dirs_options.add_option('--configdir',type='string',help="Configuration data [Default: PREFIX/etc]")
dirs_options.add_option('--datadir',type='string',help="Shared data [Default: PREFIX/share]")
dirs_options.add_option('--includedir',type='string',help="Header files [Default: PREFIX/include]")
dirs_options.add_option('--libdir',type='string',help="Libraries [Default: PREFIX/lib]")
dirs_options.add_option('--mandir',type='string',help="Manual pages [Default: DATADIR/man]")
dirs_options.add_option('--docdir',type='string',help="HTML documentation [Default: DATADIR/doc]")
if debug_by_default:
opt.add_option('--optimize',action='store_false',default=True,dest='debug',help="Build optimized binaries")
else:
opt.add_option('--debug',action='store_true',default=False,dest='debug',help="Build debuggable binaries")
opt.add_option('--pardebug',action='store_true',default=False,dest='pardebug',help="Build parallel-installable debuggable libraries with D suffix")
opt.add_option('--grind',action='store_true',default=False,dest='grind',help="Run tests in valgrind")
opt.add_option('--strict',action='store_true',default=False,dest='strict',help="Use strict compiler flags and show all warnings")
opt.add_option('--ultra-strict',action='store_true',default=False,dest='ultra_strict',help="Use even stricter compiler flags (likely to trigger many warnings in library headers)")
opt.add_option('--docs',action='store_true',default=False,dest='docs',help="Build documentation - requires doxygen")
opt.add_option('--lv2-user',action='store_true',default=False,dest='lv2_user',help="Install LV2 bundles to user location")
opt.add_option('--lv2-system',action='store_true',default=False,dest='lv2_system',help="Install LV2 bundles to system location")
dirs_options.add_option('--lv2dir',type='string',help="LV2 bundles [Default: LIBDIR/lv2]")
g_step=1
def check_header(conf,lang,name,define='',mandatory=True):
includes=''
if sys.platform=="darwin":
includes='/opt/local/include'
if lang=='c':
check_func=conf.check_cc
elif lang=='cxx':
check_func=conf.check_cxx
else:
Logs.error("Unknown header language `%s'"%lang)
return
if define!='':
check_func(header_name=name,includes=includes,define_name=define,mandatory=mandatory)
else:
check_func(header_name=name,includes=includes,mandatory=mandatory)
def nameify(name):
return name.replace('/','_').replace('++','PP').replace('-','_').replace('.','_')
def define(conf,var_name,value):
conf.define(var_name,value)
conf.env[var_name]=value
def check_pkg(conf,name,**args):
if args['uselib_store'].lower()in conf.env['AUTOWAF_LOCAL_LIBS']:
return
class CheckType:
OPTIONAL=1
MANDATORY=2
var_name='CHECKED_'+nameify(args['uselib_store'])
check=not var_name in conf.env
mandatory=not'mandatory'in args or args['mandatory']
if not check and'atleast_version'in args:
checked_version=conf.env['VERSION_'+name]
if checked_version and checked_version<args['atleast_version']:
check=True;
if not check and mandatory and conf.env[var_name]==CheckType.OPTIONAL:
check=True;
if check:
found=None
pkg_var_name='PKG_'+name.replace('-','_')
pkg_name=name
if conf.env.PARDEBUG:
args['mandatory']=False
found=conf.check_cfg(package=pkg_name+'D',args="--cflags --libs",**args)
if found:
pkg_name+='D'
if mandatory:
args['mandatory']=True
if not found:
found=conf.check_cfg(package=pkg_name,args="--cflags --libs",**args)
if found:
conf.env[pkg_var_name]=pkg_name
if'atleast_version'in args:
conf.env['VERSION_'+name]=args['atleast_version']
if mandatory:
conf.env[var_name]=CheckType.MANDATORY
else:
conf.env[var_name]=CheckType.OPTIONAL
def normpath(path):
if sys.platform=='win32':
return os.path.normpath(path).replace('\\','/')
else:
return os.path.normpath(path)
def configure(conf):
global g_step
if g_step>1:
return
def append_cxx_flags(flags):
conf.env.append_value('CFLAGS',flags)
conf.env.append_value('CXXFLAGS',flags)
print('')
display_header('Global Configuration')
if Options.options.docs:
conf.load('doxygen')
conf.env['DOCS']=Options.options.docs
conf.env['DEBUG']=Options.options.debug or Options.options.pardebug
conf.env['PARDEBUG']=Options.options.pardebug
conf.env['PREFIX']=normpath(os.path.abspath(os.path.expanduser(conf.env['PREFIX'])))
def config_dir(var,opt,default):
if opt:
conf.env[var]=normpath(opt)
else:
conf.env[var]=normpath(default)
opts=Options.options
prefix=conf.env['PREFIX']
config_dir('BINDIR',opts.bindir,os.path.join(prefix,'bin'))
config_dir('SYSCONFDIR',opts.configdir,os.path.join(prefix,'etc'))
config_dir('DATADIR',opts.datadir,os.path.join(prefix,'share'))
config_dir('INCLUDEDIR',opts.includedir,os.path.join(prefix,'include'))
config_dir('LIBDIR',opts.libdir,os.path.join(prefix,'lib'))
config_dir('MANDIR',opts.mandir,os.path.join(conf.env['DATADIR'],'man'))
config_dir('DOCDIR',opts.docdir,os.path.join(conf.env['DATADIR'],'doc'))
if Options.options.lv2dir:
conf.env['LV2DIR']=Options.options.lv2dir
elif Options.options.lv2_user:
if sys.platform=="darwin":
conf.env['LV2DIR']=os.path.join(os.getenv('HOME'),'Library/Audio/Plug-Ins/LV2')
elif sys.platform=="win32":
conf.env['LV2DIR']=os.path.join(os.getenv('APPDATA'),'LV2')
else:
conf.env['LV2DIR']=os.path.join(os.getenv('HOME'),'.lv2')
elif Options.options.lv2_system:
if sys.platform=="darwin":
conf.env['LV2DIR']='/Library/Audio/Plug-Ins/LV2'
elif sys.platform=="win32":
conf.env['LV2DIR']=os.path.join(os.getenv('COMMONPROGRAMFILES'),'LV2')
else:
conf.env['LV2DIR']=os.path.join(conf.env['LIBDIR'],'lv2')
else:
conf.env['LV2DIR']=os.path.join(conf.env['LIBDIR'],'lv2')
conf.env['LV2DIR']=normpath(conf.env['LV2DIR'])
if Options.options.docs:
doxygen=conf.find_program('doxygen')
if not doxygen:
conf.fatal("Doxygen is required to build with --docs")
dot=conf.find_program('dot')
if not dot:
conf.fatal("Graphviz (dot) is required to build with --docs")
if Options.options.debug:
if conf.env['MSVC_COMPILER']:
conf.env['CFLAGS']=['/Od','/Zi','/MTd']
conf.env['CXXFLAGS']=['/Od','/Zi','/MTd']
conf.env['LINKFLAGS']=['/DEBUG']
else:
conf.env['CFLAGS']=['-O0','-g']
conf.env['CXXFLAGS']=['-O0','-g']
else:
if conf.env['MSVC_COMPILER']:
conf.env['CFLAGS']=['/MD']
conf.env['CXXFLAGS']=['/MD']
append_cxx_flags(['-DNDEBUG'])
if Options.options.ultra_strict:
Options.options.strict=True
conf.env.append_value('CFLAGS',['-Wredundant-decls','-Wstrict-prototypes','-Wmissing-prototypes','-Wcast-qual'])
conf.env.append_value('CXXFLAGS',['-Wcast-qual'])
if Options.options.strict:
conf.env.append_value('CFLAGS',['-pedantic','-Wshadow'])
conf.env.append_value('CXXFLAGS',['-ansi','-Wnon-virtual-dtor','-Woverloaded-virtual'])
append_cxx_flags(['-Wall','-Wcast-align','-Wextra','-Wmissing-declarations','-Wno-unused-parameter','-Wstrict-overflow','-Wundef','-Wwrite-strings','-fstrict-overflow'])
if not conf.check_cc(fragment='''
#ifndef __clang__
#error
#endif
int main() { return 0; }''',features='c',mandatory=False,execute=False,msg='Checking for clang'):
append_cxx_flags(['-Wlogical-op','-Wsuggest-attribute=noreturn','-Wunsafe-loop-optimizations'])
if not conf.env['MSVC_COMPILER']:
append_cxx_flags(['-fshow-column'])
conf.env.prepend_value('CFLAGS','-I'+os.path.abspath('.'))
conf.env.prepend_value('CXXFLAGS','-I'+os.path.abspath('.'))
display_msg(conf,"Install prefix",conf.env['PREFIX'])
display_msg(conf,"Debuggable build",str(conf.env['DEBUG']))
display_msg(conf,"Build documentation",str(conf.env['DOCS']))
print('')
g_step=2
def set_c99_mode(conf):
if conf.env.MSVC_COMPILER:
conf.env.append_unique('CFLAGS',['-TP'])
else:
conf.env.append_unique('CFLAGS',['-std=c99'])
def set_local_lib(conf,name,has_objects):
var_name='HAVE_'+nameify(name.upper())
define(conf,var_name,1)
if has_objects:
if type(conf.env['AUTOWAF_LOCAL_LIBS'])!=dict:
conf.env['AUTOWAF_LOCAL_LIBS']={}
conf.env['AUTOWAF_LOCAL_LIBS'][name.lower()]=True
else:
if type(conf.env['AUTOWAF_LOCAL_HEADERS'])!=dict:
conf.env['AUTOWAF_LOCAL_HEADERS']={}
conf.env['AUTOWAF_LOCAL_HEADERS'][name.lower()]=True
def append_property(obj,key,val):
if hasattr(obj,key):
setattr(obj,key,getattr(obj,key)+val)
else:
setattr(obj,key,val)
def use_lib(bld,obj,libs):
abssrcdir=os.path.abspath('.')
libs_list=libs.split()
for l in libs_list:
in_headers=l.lower()in bld.env['AUTOWAF_LOCAL_HEADERS']
in_libs=l.lower()in bld.env['AUTOWAF_LOCAL_LIBS']
if in_libs:
append_property(obj,'use',' lib%s '%l.lower())
append_property(obj,'framework',bld.env['FRAMEWORK_'+l])
if in_headers or in_libs:
inc_flag='-iquote '+os.path.join(abssrcdir,l.lower())
for f in['CFLAGS','CXXFLAGS']:
if not inc_flag in bld.env[f]:
bld.env.prepend_value(f,inc_flag)
else:
append_property(obj,'uselib',' '+l)
@feature('c','cxx')
@before('apply_link')
def version_lib(self):
if sys.platform=='win32':
self.vnum=None
if self.env['PARDEBUG']:
applicable=['cshlib','cxxshlib','cstlib','cxxstlib']
if[x for x in applicable if x in self.features]:
self.target=self.target+'D'
def set_lib_env(conf,name,version):
'Set up environment for local library as if found via pkg-config.'
NAME=name.upper()
major_ver=version.split('.')[0]
pkg_var_name='PKG_'+name.replace('-','_')+'_'+major_ver
lib_name='%s-%s'%(name,major_ver)
if conf.env.PARDEBUG:
lib_name+='D'
conf.env[pkg_var_name]=lib_name
conf.env['INCLUDES_'+NAME]=['${INCLUDEDIR}/%s-%s'%(name,major_ver)]
conf.env['LIBPATH_'+NAME]=[conf.env.LIBDIR]
conf.env['LIB_'+NAME]=[lib_name]
def display_header(title):
Logs.pprint('BOLD',title)
def display_msg(conf,msg,status=None,color=None):
color='CYAN'
if type(status)==bool and status or status=="True":
color='GREEN'
elif type(status)==bool and not status or status=="False":
color='YELLOW'
Logs.pprint('BOLD'," *",sep='')
Logs.pprint('NORMAL',"%s"%msg.ljust(conf.line_just-3),sep='')
Logs.pprint('BOLD',":",sep='')
Logs.pprint(color,status)
def link_flags(env,lib):
return' '.join(map(lambda x:env['LIB_ST']%x,env['LIB_'+lib]))
def compile_flags(env,lib):
return' '.join(map(lambda x:env['CPPPATH_ST']%x,env['INCLUDES_'+lib]))
def set_recursive():
global g_is_child
g_is_child=True
def is_child():
global g_is_child
return g_is_child
def build_pc(bld,name,version,version_suffix,libs,subst_dict={}):
'''Build a pkg-config file for a library.
name -- uppercase variable name (e.g. 'SOMENAME')
version -- version string (e.g. '1.2.3')
version_suffix -- name version suffix (e.g. '2')
libs -- string/list of dependencies (e.g. 'LIBFOO GLIB')
'''
pkg_prefix=bld.env['PREFIX']
if pkg_prefix[-1]=='/':
pkg_prefix=pkg_prefix[:-1]
target=name.lower()
if version_suffix!='':
target+='-'+version_suffix
if bld.env['PARDEBUG']:
target+='D'
target+='.pc'
libdir=bld.env['LIBDIR']
if libdir.startswith(pkg_prefix):
libdir=libdir.replace(pkg_prefix,'${exec_prefix}')
includedir=bld.env['INCLUDEDIR']
if includedir.startswith(pkg_prefix):
includedir=includedir.replace(pkg_prefix,'${prefix}')
obj=bld(features='subst',source='%s.pc.in'%name.lower(),target=target,install_path=os.path.join(bld.env['LIBDIR'],'pkgconfig'),exec_prefix='${prefix}',PREFIX=pkg_prefix,EXEC_PREFIX='${prefix}',LIBDIR=libdir,INCLUDEDIR=includedir)
if type(libs)!=list:
libs=libs.split()
subst_dict[name+'_VERSION']=version
subst_dict[name+'_MAJOR_VERSION']=version[0:version.find('.')]
for i in libs:
subst_dict[i+'_LIBS']=link_flags(bld.env,i)
lib_cflags=compile_flags(bld.env,i)
if lib_cflags=='':
lib_cflags=' '
subst_dict[i+'_CFLAGS']=lib_cflags
obj.__dict__.update(subst_dict)
def build_dir(name,subdir):
if is_child():
return os.path.join('build',name,subdir)
else:
return os.path.join('build',subdir)
def make_simple_dox(name):
name=name.lower()
NAME=name.upper()
try:
top=os.getcwd()
os.chdir(build_dir(name,'doc/html'))
page='group__%s.html'%name
if not os.path.exists(page):
return
for i in[['%s_API '%NAME,''],['%s_DEPRECATED '%NAME,''],['group__%s.html'%name,''],[' ',''],['<script.*><\/script>',''],['<hr\/><a name="details" id="details"><\/a><h2>.*<\/h2>',''],['<link href=\"tabs.css\" rel=\"stylesheet\" type=\"text\/css\"\/>',''],['<img class=\"footer\" src=\"doxygen.png\" alt=\"doxygen\"\/>','Doxygen']]:
os.system("sed -i 's/%s/%s/g' %s"%(i[0],i[1],page))
os.rename('group__%s.html'%name,'index.html')
for i in(glob.glob('*.png')+glob.glob('*.html')+glob.glob('*.js')+glob.glob('*.css')):
if i!='index.html'and i!='style.css':
os.remove(i)
os.chdir(top)
os.chdir(build_dir(name,'doc/man/man3'))
for i in glob.glob('*.3'):
os.system("sed -i 's/%s_API //' %s"%(NAME,i))
for i in glob.glob('_*'):
os.remove(i)
os.chdir(top)
except Exception ,e:
Logs.error("Failed to fix up %s documentation: %s"%(name,e))
def build_dox(bld,name,version,srcdir,blddir,outdir='',versioned=True):
if not bld.env['DOCS']:
return
if is_child():
src_dir=os.path.join(srcdir,name.lower())
doc_dir=os.path.join(blddir,name.lower(),'doc')
else:
src_dir=srcdir
doc_dir=os.path.join(blddir,'doc')
subst_tg=bld(features='subst',source='doc/reference.doxygen.in',target='doc/reference.doxygen',install_path='',name='doxyfile')
subst_dict={name+'_VERSION':version,name+'_SRCDIR':os.path.abspath(src_dir),name+'_DOC_DIR':os.path.abspath(doc_dir)}
subst_tg.__dict__.update(subst_dict)
subst_tg.post()
docs=bld(features='doxygen',doxyfile='doc/reference.doxygen')
docs.post()
outname=name.lower()
if versioned:
outname+='-%d'%int(version[0:version.find('.')])
bld.install_files(os.path.join('${DOCDIR}',outname,outdir,'html'),bld.path.get_bld().ant_glob('doc/html/*'))
for i in range(1,8):
bld.install_files('${MANDIR}/man%d'%i,bld.path.get_bld().ant_glob('doc/man/man%d/*'%i,excl='**/_*'))
def build_version_files(header_path,source_path,domain,major,minor,micro):
header_path=os.path.abspath(header_path)
source_path=os.path.abspath(source_path)
text="int "+domain+"_major_version = "+str(major)+";\n"
text+="int "+domain+"_minor_version = "+str(minor)+";\n"
text+="int "+domain+"_micro_version = "+str(micro)+";\n"
try:
o=open(source_path,'w')
o.write(text)
o.close()
except IOError:
Logs.error('Failed to open %s for writing\n'%source_path)
sys.exit(-1)
text="#ifndef __"+domain+"_version_h__\n"
text+="#define __"+domain+"_version_h__\n"
text+="extern const char* "+domain+"_revision;\n"
text+="extern int "+domain+"_major_version;\n"
text+="extern int "+domain+"_minor_version;\n"
text+="extern int "+domain+"_micro_version;\n"
text+="#endif /* __"+domain+"_version_h__ */\n"
try:
o=open(header_path,'w')
o.write(text)
o.close()
except IOError:
Logs.warn('Failed to open %s for writing\n'%header_path)
sys.exit(-1)
return None
def build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder=None):
Logs.info('Generating pot file from %s'%name)
pot_file='%s.pot'%name
cmd=['xgettext','--keyword=_','--keyword=N_','--keyword=S_','--from-code=UTF-8','-o',pot_file]
if copyright_holder:
cmd+=['--copyright-holder="%s"'%copyright_holder]
cmd+=sources
Logs.info('Updating '+pot_file)
subprocess.call(cmd,cwd=os.path.join(srcdir,dir))
def build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder=None):
pwd=os.getcwd()
os.chdir(os.path.join(srcdir,dir))
pot_file='%s.pot'%name
po_files=glob.glob('po/*.po')
for po_file in po_files:
cmd=['msgmerge','--update',po_file,pot_file]
Logs.info('Updating '+po_file)
subprocess.call(cmd)
os.chdir(pwd)
def build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder=None):
pwd=os.getcwd()
os.chdir(os.path.join(srcdir,dir))
pot_file='%s.pot'%name
po_files=glob.glob('po/*.po')
for po_file in po_files:
mo_file=po_file.replace('.po','.mo')
cmd=['msgfmt','-c','-f','-o',mo_file,po_file]
Logs.info('Generating '+po_file)
subprocess.call(cmd)
os.chdir(pwd)
def build_i18n(bld,srcdir,dir,name,sources,copyright_holder=None):
build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder)
build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder)
build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder)
def cd_to_build_dir(ctx,appname):
orig_dir=os.path.abspath(os.curdir)
top_level=(len(ctx.stack_path)>1)
if top_level:
os.chdir(os.path.join('build',appname))
else:
os.chdir('build')
Logs.pprint('GREEN',"Waf: Entering directory `%s'"%os.path.abspath(os.getcwd()))
def cd_to_orig_dir(ctx,child):
if child:
os.chdir(os.path.join('..','..'))
else:
os.chdir('..')
def pre_test(ctx,appname,dirs=['src']):
diropts=''
for i in dirs:
diropts+=' -d '+i
cd_to_build_dir(ctx,appname)
clear_log=open('lcov-clear.log','w')
try:
try:
subprocess.call(('lcov %s -z'%diropts).split(),stdout=clear_log,stderr=clear_log)
except:
Logs.warn('Failed to run lcov, no coverage report will be generated')
finally:
clear_log.close()
def post_test(ctx,appname,dirs=['src'],remove=['*boost*','c++*']):
diropts=''
for i in dirs:
diropts+=' -d '+i
coverage_log=open('lcov-coverage.log','w')
coverage_lcov=open('coverage.lcov','w')
coverage_stripped_lcov=open('coverage-stripped.lcov','w')
try:
try:
base='.'
if g_is_child:
base='..'
subprocess.call(('lcov -c %s -b %s'%(diropts,base)).split(),stdout=coverage_lcov,stderr=coverage_log)
subprocess.call(['lcov','--remove','coverage.lcov']+remove,stdout=coverage_stripped_lcov,stderr=coverage_log)
if not os.path.isdir('coverage'):
os.makedirs('coverage')
subprocess.call('genhtml -o coverage coverage-stripped.lcov'.split(),stdout=coverage_log,stderr=coverage_log)
except:
Logs.warn('Failed to run lcov, no coverage report will be generated')
finally:
coverage_stripped_lcov.close()
coverage_lcov.close()
coverage_log.close()
print('')
Logs.pprint('GREEN',"Waf: Leaving directory `%s'"%os.path.abspath(os.getcwd()))
top_level=(len(ctx.stack_path)>1)
if top_level:
cd_to_orig_dir(ctx,top_level)
print('')
Logs.pprint('BOLD','Coverage:',sep='')
print('<file://%s>\n\n'%os.path.abspath('coverage/index.html'))
def run_test(ctx,appname,test,desired_status=0,dirs=['src'],name='',header=False):
s=test
if type(test)==type([]):
s=' '.join(i)
if header:
Logs.pprint('BOLD','** Test',sep='')
Logs.pprint('NORMAL','%s'%s)
cmd=test
if Options.options.grind:
cmd='valgrind '+test
if subprocess.call(cmd,shell=True)==desired_status:
Logs.pprint('GREEN','** Pass %s'%name)
return True
else:
Logs.pprint('RED','** FAIL %s'%name)
return False
def run_tests(ctx,appname,tests,desired_status=0,dirs=['src'],name='*',headers=False):
failures=0
diropts=''
for i in dirs:
diropts+=' -d '+i
for i in tests:
if not run_test(ctx,appname,i,desired_status,dirs,i,headers):
failures+=1
print('')
if failures==0:
Logs.pprint('GREEN','** Pass: All %s.%s tests passed'%(appname,name))
else:
Logs.pprint('RED','** FAIL: %d %s.%s tests failed'%(failures,appname,name))
def run_ldconfig(ctx):
if(ctx.cmd=='install'and not ctx.env['RAN_LDCONFIG']and ctx.env['LIBDIR']and not'DESTDIR'in os.environ and not Options.options.destdir):
try:
Logs.info("Waf: Running `/sbin/ldconfig %s'"%ctx.env['LIBDIR'])
subprocess.call(['/sbin/ldconfig',ctx.env['LIBDIR']])
ctx.env['RAN_LDCONFIG']=True
except:
pass
def write_news(name,in_files,out_file,top_entries=None,extra_entries=None):
import rdflib
import textwrap
from time import strftime,strptime
doap=rdflib.Namespace('http://usefulinc.com/ns/doap#')
dcs=rdflib.Namespace('http://ontologi.es/doap-changeset#')
rdfs=rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#')
foaf=rdflib.Namespace('http://xmlns.com/foaf/0.1/')
rdf=rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#')
m=rdflib.ConjunctiveGraph()
try:
for i in in_files:
m.parse(i,format='n3')
except:
Logs.warn('Error parsing data, unable to generate NEWS')
return
proj=m.value(None,rdf.type,doap.Project)
for f in m.triples([proj,rdfs.seeAlso,None]):
if f[2].endswith('.ttl'):
m.parse(f[2],format='n3')
entries={}
for r in m.triples([proj,doap.release,None]):
release=r[2]
revision=m.value(release,doap.revision,None)
date=m.value(release,doap.created,None)
blamee=m.value(release,dcs.blame,None)
changeset=m.value(release,dcs.changeset,None)
dist=m.value(release,doap['file-release'],None)
if revision and date and blamee and changeset:
entry='%s (%s) stable;\n'%(name,revision)
for i in m.triples([changeset,dcs.item,None]):
item=textwrap.wrap(m.value(i[2],rdfs.label,None),width=79)
entry+='\n * '+'\n '.join(item)
if dist and top_entries is not None:
if not str(dist)in top_entries:
top_entries[str(dist)]=[]
top_entries[str(dist)]+=['%s: %s'%(name,'\n '.join(item))]
if extra_entries:
for i in extra_entries[str(dist)]:
entry+='\n * '+i
entry+='\n\n --'
blamee_name=m.value(blamee,foaf.name,None)
blamee_mbox=m.value(blamee,foaf.mbox,None)
if blamee_name and blamee_mbox:
entry+=' %s <%s>'%(blamee_name,blamee_mbox.replace('mailto:',''))
entry+=' %s\n\n'%(strftime('%a, %d %b %Y %H:%M:%S +0000',strptime(date,'%Y-%m-%d')))
entries[(date,revision)]=entry
else:
Logs.warn('Ignored incomplete %s release description'%name)
if len(entries)>0:
news=open(out_file,'w')
for e in sorted(entries.keys(),reverse=True):
news.write(entries[e])
news.close()
|
baverman/supp | refs/heads/master | tests/test_assistant_location.py | 1 | import os
from supp.assistant import location, _loc
from supp.project import Project
from .helpers import sp
def tlocation(source, pos, project=None, filename=None, debug=False):
debug = debug or os.environ.get('DEBUG')
return location(project or Project(), source, pos, filename, debug=debug)
def test_instance_attributes_locations():
source, p = sp('''\
class Boo(Bar):
def baz(self):
self.bar = 10
def foo(self):
self.bar = 20
def boo(self):
self.b|ar = 30
''')
result = tlocation(source, p[0])
assert result == [[{'loc': (3, 8), 'file': '<string>'},
{'loc': (6, 8), 'file': '<string>'}]]
def test_module_name_location():
source, p = sp('''\
def foo(): pass
boo = 10
f|oo
|boo
''')
loc, = tlocation(source, p[0])
assert loc['loc'] == (1, 4)
loc, = tlocation(source, p[1])
assert loc['loc'] == (2, 0)
def test_imported_name_location(project):
project.add_m('testp.testm', '''\
boo = 20
''')
source, p = sp('''\
import testp.te|stm
from testp.testm import b|oo
bo|o
from . import tes|tm
''')
loc, = tlocation(source, p[0], project, filename=project.get_m('testp.testm2'))
assert loc['loc'] == (1, 0)
assert loc['file'] == project.get_m('testp.testm')
loc, = tlocation(source, p[1], project, filename=project.get_m('testp.testm2'))
assert loc['loc'] == (1, 0)
assert loc['file'] == project.get_m('testp.testm')
locs = tlocation(source, p[2], project, filename=project.get_m('testp.testm2'))
assert locs == [_loc((2, 24), project.get_m('testp.testm2')),
_loc((1, 0), project.get_m('testp.testm'))]
locs = tlocation(source, p[3], project, filename=project.get_m('testp.testm2'))
assert locs == [_loc((1, 0), project.get_m('testp.testm'))]
def test_imported_attr_location(project):
project.add_m('testp.testm', '''\
bar = 10
def boo():
pass
foo = boo
''')
source, p = sp('''\
import testp.testm
from testp import testm
from testp import testm as am
testm.bo|o
am.bo|o
testm.fo|o
testp.tes|tm.boo
from testp.tes|tm.boo import foo
''')
loc, = tlocation(source, p[0], project, filename=project.get_m('testp.testm2'))
assert loc['loc'] == (3, 4)
assert loc['file'] == project.get_m('testp.testm')
loc, = tlocation(source, p[1], project, filename=project.get_m('testp.testm2'))
assert loc['loc'] == (3, 4)
assert loc['file'] == project.get_m('testp.testm')
locs = tlocation(source, p[2], project, filename=project.get_m('testp.testm2'))
assert locs == [
_loc((6, 0), project.get_m('testp.testm')),
]
locs = tlocation(source, p[3], project, filename=project.get_m('testp.testm2'))
assert locs == [
_loc((0, 0), project.get_m('testp.testm2')),
_loc((1, 0), project.get_m('testp.testm')),
]
locs = tlocation(source, p[4], project, filename=project.get_m('testp.testm2'))
assert locs == [
_loc((1, 0), project.get_m('testp.testm')),
]
# def test_boo():
# project = Project(['/home/bobrov/work/supp'])
# source = open(__file__.rstrip('c')).read()
# loc, fname = tlocation(source, (4, 23), project, filename=__file__)
# print loc, fname
# assert False
|
tensorflow/graphics | refs/heads/master | tensorflow_graphics/util/__init__.py | 1 | # Copyright 2020 The TensorFlow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Util module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=g-import-not-at-top
from tensorflow_graphics.util.doc import _import_tfg_docs
if _import_tfg_docs():
from tensorflow_graphics.util import asserts
from tensorflow_graphics.util import export_api
from tensorflow_graphics.util import safe_ops
from tensorflow_graphics.util import shape
from tensorflow_graphics.util import test_case
from tensorflow_graphics.util import tfg_flags
# pylint: enable=g-import-not-at-top
# The util modules are not exported.
__all__ = []
|
Azulinho/ansible | refs/heads/devel | lib/ansible/modules/cloud/digital_ocean/digital_ocean_sshkey.py | 23 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: digital_ocean_sshkey
short_description: Manage DigitalOcean SSH keys
description:
- Create/delete DigitalOcean SSH keys.
version_added: "2.4"
author: "Patrick Marques (@pmarques)"
options:
state:
description:
- Indicate desired state of the target.
default: present
choices: ['present', 'absent']
fingerprint:
description:
- This is a unique identifier for the SSH key used to delete a key
required: false
default: None
version_added: 2.4
name:
description:
- The name for the SSH key
required: false
default: None
ssh_pub_key:
description:
- The Public SSH key to add.
required: false
default: None
oauth_token:
description:
- DigitalOcean OAuth token.
required: true
version_added: 2.4
notes:
- Version 2 of DigitalOcean API is used.
requirements:
- "python >= 2.6"
'''
EXAMPLES = '''
- name: "Create ssh key"
digital_ocean_sshkey:
oauth_token: "{{ oauth_token }}"
name: "My SSH Public Key"
ssh_pub_key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example"
state: present
register: result
- name: "Delete ssh key"
digital_ocean_sshkey:
oauth_token: "{{ oauth_token }}"
state: "absent"
fingerprint: "3b:16:bf:e4:8b:00:8b:b8:59:8c:a9:d3:f0:19:45:fa"
'''
RETURN = '''
# Digital Ocean API info https://developers.digitalocean.com/documentation/v2/#list-all-keys
data:
description: This is only present when C(state=present)
returned: when C(state=present)
type: dict
sample: {
"ssh_key": {
"id": 512189,
"fingerprint": "3b:16:bf:e4:8b:00:8b:b8:59:8c:a9:d3:f0:19:45:fa",
"name": "My SSH Public Key",
"public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example"
}
}
'''
import json
import hashlib
import base64
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url
class Response(object):
def __init__(self, resp, info):
self.body = None
if resp:
self.body = resp.read()
self.info = info
@property
def json(self):
if not self.body:
if "body" in self.info:
return json.loads(self.info["body"])
return None
try:
return json.loads(self.body)
except ValueError:
return None
@property
def status_code(self):
return self.info["status"]
class Rest(object):
def __init__(self, module, headers):
self.module = module
self.headers = headers
self.baseurl = 'https://api.digitalocean.com/v2'
def _url_builder(self, path):
if path[0] == '/':
path = path[1:]
return '%s/%s' % (self.baseurl, path)
def send(self, method, path, data=None, headers=None):
url = self._url_builder(path)
data = self.module.jsonify(data)
timeout = self.module.params['timeout']
resp, info = fetch_url(self.module, url, data=data, headers=self.headers, method=method, timeout=timeout)
# Exceptions in fetch_url may result in a status -1, the ensures a
if info['status'] == -1:
self.module.fail_json(msg=info['msg'])
return Response(resp, info)
def get(self, path, data=None, headers=None):
return self.send('GET', path, data, headers)
def put(self, path, data=None, headers=None):
return self.send('PUT', path, data, headers)
def post(self, path, data=None, headers=None):
return self.send('POST', path, data, headers)
def delete(self, path, data=None, headers=None):
return self.send('DELETE', path, data, headers)
def core(module):
api_token = module.params['oauth_token']
state = module.params['state']
fingerprint = module.params['fingerprint']
name = module.params['name']
ssh_pub_key = module.params['ssh_pub_key']
rest = Rest(module, {'Authorization': 'Bearer {0}'.format(api_token),
'Content-type': 'application/json'})
fingerprint = fingerprint or ssh_key_fingerprint(ssh_pub_key)
response = rest.get('account/keys/{0}'.format(fingerprint))
status_code = response.status_code
json = response.json
if status_code not in (200, 404):
module.fail_json(msg='Error getting ssh key [{0}: {1}]'.format(
status_code, response.json['message']), fingerprint=fingerprint)
if state in ('present'):
if status_code == 404:
# IF key not found create it!
if module.check_mode:
module.exit_json(changed=True)
payload = {
'name': name,
'public_key': ssh_pub_key
}
response = rest.post('account/keys', data=payload)
status_code = response.status_code
json = response.json
if status_code == 201:
module.exit_json(changed=True, data=json)
module.fail_json(msg='Error creating ssh key [{0}: {1}]'.format(
status_code, response.json['message']))
elif status_code == 200:
# If key found was found, check if name needs to be updated
if name is None or json['ssh_key']['name'] == name:
module.exit_json(changed=False, data=json)
if module.check_mode:
module.exit_json(changed=True)
payload = {
'name': name,
}
response = rest.put('account/keys/{0}'.format(fingerprint), data=payload)
status_code = response.status_code
json = response.json
if status_code == 200:
module.exit_json(changed=True, data=json)
module.fail_json(msg='Error updating ssh key name [{0}: {1}]'.format(
status_code, response.json['message']), fingerprint=fingerprint)
elif state in ('absent'):
if status_code == 404:
module.exit_json(changed=False)
if module.check_mode:
module.exit_json(changed=True)
response = rest.delete('account/keys/{0}'.format(fingerprint))
status_code = response.status_code
json = response.json
if status_code == 204:
module.exit_json(changed=True)
module.fail_json(msg='Error creating ssh key [{0}: {1}]'.format(
status_code, response.json['message']))
def ssh_key_fingerprint(ssh_pub_key):
key = ssh_pub_key.split(None, 2)[1]
fingerprint = hashlib.md5(base64.decodestring(key)).hexdigest()
return ':'.join(a + b for a, b in zip(fingerprint[::2], fingerprint[1::2]))
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(choices=['present', 'absent'], default='present'),
fingerprint=dict(aliases=['id'], required=False),
name=dict(required=False),
ssh_pub_key=dict(required=False),
oauth_token=dict(
no_log=True,
# Support environment variable for DigitalOcean OAuth Token
fallback=(env_fallback, ['DO_API_TOKEN', 'DO_API_KEY', 'DO_OAUTH_TOKEN']),
required=True,
),
validate_certs=dict(type='bool', default=True),
timeout=dict(type='int', default=30),
),
required_one_of=(
('fingerprint', 'ssh_pub_key'),
),
supports_check_mode=True,
)
core(module)
if __name__ == '__main__':
main()
|
maljac/odoomrp-wip | refs/heads/8.0 | stock_picking_wave_management/__init__.py | 240 | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from . import models
from . import wizard
|
icodezjb/bytom | refs/heads/dev | vendor/github.com/tendermint/abci/example/python3/abci/msg.py | 8 | from .wire import decode_string
# map type_byte to message name
message_types = {
0x01: "echo",
0x02: "flush",
0x03: "info",
0x04: "set_option",
0x21: "deliver_tx",
0x22: "check_tx",
0x23: "commit",
0x24: "add_listener",
0x25: "rm_listener",
}
# return the decoded arguments of abci messages
class RequestDecoder():
def __init__(self, reader):
self.reader = reader
def echo(self):
return decode_string(self.reader)
def flush(self):
return
def info(self):
return
def set_option(self):
return decode_string(self.reader), decode_string(self.reader)
def deliver_tx(self):
return decode_string(self.reader)
def check_tx(self):
return decode_string(self.reader)
def commit(self):
return
def add_listener(self):
# TODO
return
def rm_listener(self):
# TODO
return
|
karstenw/nodebox-pyobjc | refs/heads/master | examples/New Functions/Example filelist.py | 1 | print "The Documents folder with paths:"
mydocs = filelist( "~/Documents")
print u"\n".join(mydocs)
|
skapfer/rubber | refs/heads/master | src/latex_modules/dvipdfm.py | 1 | # This file is part of Rubber and thus covered by the GPL
import rubber.dvip_tool
import rubber.module_interface
class Module (rubber.module_interface.Module):
def __init__ (self, document, opt):
self.dep = rubber.dvip_tool.Dvip_Tool_Dep_Node (document, 'dvipdfm')
|
SuYiling/chrome_depot_tools | refs/heads/master | third_party/coverage/templite.py | 123 | """A simple Python template renderer, for a nano-subset of Django syntax."""
# Coincidentally named the same as http://code.activestate.com/recipes/496702/
import re, sys
class Templite(object):
"""A simple template renderer, for a nano-subset of Django syntax.
Supported constructs are extended variable access::
{{var.modifer.modifier|filter|filter}}
loops::
{% for var in list %}...{% endfor %}
and ifs::
{% if var %}...{% endif %}
Comments are within curly-hash markers::
{# This will be ignored #}
Construct a Templite with the template text, then use `render` against a
dictionary context to create a finished string.
"""
def __init__(self, text, *contexts):
"""Construct a Templite with the given `text`.
`contexts` are dictionaries of values to use for future renderings.
These are good for filters and global values.
"""
self.text = text
self.context = {}
for context in contexts:
self.context.update(context)
# Split the text to form a list of tokens.
toks = re.split(r"(?s)({{.*?}}|{%.*?%}|{#.*?#})", text)
# Parse the tokens into a nested list of operations. Each item in the
# list is a tuple with an opcode, and arguments. They'll be
# interpreted by TempliteEngine.
#
# When parsing an action tag with nested content (if, for), the current
# ops list is pushed onto ops_stack, and the parsing continues in a new
# ops list that is part of the arguments to the if or for op.
ops = []
ops_stack = []
for tok in toks:
if tok.startswith('{{'):
# Expression: ('exp', expr)
ops.append(('exp', tok[2:-2].strip()))
elif tok.startswith('{#'):
# Comment: ignore it and move on.
continue
elif tok.startswith('{%'):
# Action tag: split into words and parse further.
words = tok[2:-2].strip().split()
if words[0] == 'if':
# If: ('if', (expr, body_ops))
if_ops = []
assert len(words) == 2
ops.append(('if', (words[1], if_ops)))
ops_stack.append(ops)
ops = if_ops
elif words[0] == 'for':
# For: ('for', (varname, listexpr, body_ops))
assert len(words) == 4 and words[2] == 'in'
for_ops = []
ops.append(('for', (words[1], words[3], for_ops)))
ops_stack.append(ops)
ops = for_ops
elif words[0].startswith('end'):
# Endsomething. Pop the ops stack
ops = ops_stack.pop()
assert ops[-1][0] == words[0][3:]
else:
raise SyntaxError("Don't understand tag %r" % words)
else:
ops.append(('lit', tok))
assert not ops_stack, "Unmatched action tag: %r" % ops_stack[-1][0]
self.ops = ops
def render(self, context=None):
"""Render this template by applying it to `context`.
`context` is a dictionary of values to use in this rendering.
"""
# Make the complete context we'll use.
ctx = dict(self.context)
if context:
ctx.update(context)
# Run it through an engine, and return the result.
engine = _TempliteEngine(ctx)
engine.execute(self.ops)
return "".join(engine.result)
class _TempliteEngine(object):
"""Executes Templite objects to produce strings."""
def __init__(self, context):
self.context = context
self.result = []
def execute(self, ops):
"""Execute `ops` in the engine.
Called recursively for the bodies of if's and loops.
"""
for op, args in ops:
if op == 'lit':
self.result.append(args)
elif op == 'exp':
try:
self.result.append(str(self.evaluate(args)))
except:
exc_class, exc, _ = sys.exc_info()
new_exc = exc_class("Couldn't evaluate {{ %s }}: %s"
% (args, exc))
raise new_exc
elif op == 'if':
expr, body = args
if self.evaluate(expr):
self.execute(body)
elif op == 'for':
var, lis, body = args
vals = self.evaluate(lis)
for val in vals:
self.context[var] = val
self.execute(body)
else:
raise AssertionError("TempliteEngine doesn't grok op %r" % op)
def evaluate(self, expr):
"""Evaluate an expression.
`expr` can have pipes and dots to indicate data access and filtering.
"""
if "|" in expr:
pipes = expr.split("|")
value = self.evaluate(pipes[0])
for func in pipes[1:]:
value = self.evaluate(func)(value)
elif "." in expr:
dots = expr.split('.')
value = self.evaluate(dots[0])
for dot in dots[1:]:
try:
value = getattr(value, dot)
except AttributeError:
value = value[dot]
if hasattr(value, '__call__'):
value = value()
else:
value = self.context[expr]
return value
|
sanjayankur31/nest-simulator | refs/heads/master | pynest/nest/tests/test_changing_tic_base.py | 20 | # -*- coding: utf-8 -*-
#
# test_changing_tic_base.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
import unittest
import nest
import numpy as np
@nest.ll_api.check_stack
class TestChangingTicBase(unittest.TestCase):
eps = 1e-7 # Tolerance value
# The defaults of iaf_psc_exp_ps_lossless contains the time of the last spike, converted to ms from step=-1.
# As the initialized step-value is negative, there is no need to account for the change in tic-base.
# However, because the value in the defaults is converted to ms, it will differ from the reference value.
# The model is therefore ignored.
ignored_models = ['iaf_psc_exp_ps_lossless']
def setUp(self):
nest.ResetKernel()
def test_models(self):
"""Time objects in models correctly updated"""
# Generate a dictionary of reference values for each model.
reference = {}
for model in nest.Models():
if model in self.ignored_models:
continue
try:
reference[model] = nest.GetDefaults(model)
except nest.kernel.NESTError:
# If we can't get the defaults, we ignore the model.
pass
# Change the tic-base.
nest.SetKernelStatus({'tics_per_ms': 1500., 'resolution': 0.5})
# At this point, Time objects in models should have been updated to
# account for the new tic-base. Values in model defaults should therefore
# be equal (within a tolerance) to the reference values.
failing_models = []
for model in reference.keys():
model_reference = reference[model]
model_defaults = nest.GetDefaults(model)
# Remove entries where the item contains more than one value, as this causes issues when comparing.
array_keys = [key for key, value in model_defaults.items()
if isinstance(value, (list, tuple, dict, np.ndarray))]
for key in array_keys:
del model_defaults[key]
del model_reference[key]
keydiff = []
for key, value in model_defaults.items():
# value may not be a number, so we test for equality first.
# If it's not equal to the reference value, we assume it is a number.
if value != model_reference[key] and abs(value - model_reference[key]) > self.eps:
print(value - model_reference[key])
keydiff.append([key, model_reference[key], value])
# If any keys have values different from the reference, the model fails.
if len(keydiff) > 0:
print(model, keydiff)
failing_models.append(model)
# No models should fail for the test to pass.
self.assertEqual([], failing_models)
def _assert_ticbase_change_raises_and_reset(self, after_call):
"""Assert that changing tic-base raises a NESTError, and reset the kernel"""
with self.assertRaises(nest.kernel.NESTError, msg='after calling "{}"'.format(after_call)):
nest.SetKernelStatus({'tics_per_ms': 1500., 'resolution': 0.5})
nest.ResetKernel()
def test_prohibit_change_tic_base(self):
"""Getting error when changing tic-base in prohibited conditions"""
nest.CopyModel('iaf_psc_alpha', 'alpha_copy')
self._assert_ticbase_change_raises_and_reset('CopyModel')
nest.SetDefaults("multimeter", {"record_to": "ascii"})
self._assert_ticbase_change_raises_and_reset('SetDefaults')
nest.Create('multimeter')
self._assert_ticbase_change_raises_and_reset('Create')
nest.Simulate(10.)
self._assert_ticbase_change_raises_and_reset('Simulate')
def suite():
suite = unittest.makeSuite(TestChangingTicBase, 'test')
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == "__main__":
run()
|
tuxfux-hlp-notes/python-batches | refs/heads/master | archieves/batch-62/files/myenv/lib/python2.7/sre_compile.py | 4 | /usr/lib/python2.7/sre_compile.py |
victorzhao/miniblink49 | refs/heads/master | third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/layout_package/bot_test_expectations.py | 42 | # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Generates a fake TestExpectations file consisting of flaky tests from the bot
corresponding to the give port."""
import json
import logging
import os.path
import urllib
import urllib2
from webkitpy.layout_tests.port import builders
from webkitpy.layout_tests.models.test_expectations import TestExpectations
from webkitpy.layout_tests.models.test_expectations import TestExpectationLine
_log = logging.getLogger(__name__)
# results.json v4 format:
# {
# 'version': 4,
# 'builder name' : {
# 'blinkRevision': [],
# 'tests': {
# 'directory' { # Each path component is a dictionary.
# 'testname.html': {
# 'expected' : 'FAIL', # expectation name
# 'results': [], # Run-length encoded result.
# 'times': [],
# 'bugs': [], # bug urls
# }
# }
# }
# 'buildNumbers': [],
# 'secondsSinceEpoch': [],
# 'chromeRevision': [],
# 'failure_map': { } # Map from letter code to expectation name.
# },
class ResultsJSON(object):
TESTS_KEY = 'tests'
FAILURE_MAP_KEY = 'failure_map'
RESULTS_KEY = 'results'
EXPECTATIONS_KEY = 'expected'
BUGS_KEY = 'bugs'
RLE_LENGTH = 0
RLE_VALUE = 1
# results.json was originally designed to support
# multiple builders in one json file, so the builder_name
# is needed to figure out which builder this json file
# refers to (and thus where the results are stored)
def __init__(self, builder_name, json_dict):
self.builder_name = builder_name
self._json = json_dict
def _walk_trie(self, trie, parent_path):
for name, value in trie.items():
full_path = os.path.join(parent_path, name)
# FIXME: If we ever have a test directory self.RESULTS_KEY
# ("results"), this logic will break!
if self.RESULTS_KEY not in value:
for path, results in self._walk_trie(value, full_path):
yield path, results
else:
yield full_path, value
def walk_results(self, full_path=''):
tests_trie = self._json[self.builder_name][self.TESTS_KEY]
return self._walk_trie(tests_trie, parent_path='')
def expectation_for_type(self, type_char):
return self._json[self.builder_name][self.FAILURE_MAP_KEY][type_char]
# Knowing how to parse the run-length-encoded values in results.json
# is a detail of this class.
def occurances_and_type_from_result_item(self, item):
return item[self.RLE_LENGTH], item[self.RLE_VALUE]
class BotTestExpectationsFactory(object):
RESULTS_URL_PREFIX = 'http://test-results.appspot.com/testfile?master=ChromiumWebkit&testtype=layout-tests&name=results-small.json&builder='
def _results_json_for_port(self, port_name, builder_category):
if builder_category == 'deps':
builder = builders.deps_builder_name_for_port_name(port_name)
else:
builder = builders.builder_name_for_port_name(port_name)
if not builder:
return None
return self._results_json_for_builder(builder)
def _results_json_for_builder(self, builder):
results_url = self.RESULTS_URL_PREFIX + urllib.quote(builder)
try:
_log.debug('Fetching flakiness data from appengine.')
return ResultsJSON(builder, json.load(urllib2.urlopen(results_url)))
except urllib2.URLError as error:
_log.warning('Could not retrieve flakiness data from the bot. url: %s', results_url)
_log.warning(error)
def expectations_for_port(self, port_name, builder_category='layout'):
# FIXME: This only grabs release builder's flakiness data. If we're running debug,
# when we should grab the debug builder's data.
# FIXME: What should this do if there is no debug builder for a port, e.g. we have
# no debug XP builder? Should it use the release bot or another Windows debug bot?
# At the very least, it should log an error.
results_json = self._results_json_for_port(port_name, builder_category)
if not results_json:
return None
return BotTestExpectations(results_json)
def expectations_for_builder(self, builder):
results_json = self._results_json_for_builder(builder)
if not results_json:
return None
return BotTestExpectations(results_json)
class BotTestExpectations(object):
# FIXME: Get this from the json instead of hard-coding it.
RESULT_TYPES_TO_IGNORE = ['N', 'X', 'Y']
# specifiers arg is used in unittests to avoid the static dependency on builders.
def __init__(self, results_json, specifiers=None):
self.results_json = results_json
self.specifiers = specifiers or set(builders.specifiers_for_builder(results_json.builder_name))
def _line_from_test_and_flaky_types_and_bug_urls(self, test_path, flaky_types, bug_urls):
line = TestExpectationLine()
line.original_string = test_path
line.name = test_path
line.filename = test_path
line.path = test_path # FIXME: Should this be normpath?
line.matching_tests = [test_path]
line.bugs = bug_urls if bug_urls else ["Bug(gardener)"]
line.expectations = sorted(map(self.results_json.expectation_for_type, flaky_types))
line.specifiers = self.specifiers
return line
def flakes_by_path(self, only_ignore_very_flaky):
"""Sets test expectations to bot results if there are at least two distinct results."""
flakes_by_path = {}
for test_path, entry in self.results_json.walk_results():
results_dict = entry[self.results_json.RESULTS_KEY]
flaky_types = self._flaky_types_in_results(results_dict, only_ignore_very_flaky)
if len(flaky_types) <= 1:
continue
flakes_by_path[test_path] = sorted(map(self.results_json.expectation_for_type, flaky_types))
return flakes_by_path
def unexpected_results_by_path(self):
"""For tests with unexpected results, returns original expectations + results."""
def exp_to_string(exp):
return TestExpectations.EXPECTATIONS_TO_STRING.get(exp, None).upper()
def string_to_exp(string):
# Needs a bit more logic than the method above,
# since a PASS is 0 and evaluates to False.
result = TestExpectations.EXPECTATIONS.get(string.lower(), None)
if not result is None:
return result
raise ValueError(string)
unexpected_results_by_path = {}
for test_path, entry in self.results_json.walk_results():
# Expectations for this test. No expectation defaults to PASS.
exp_string = entry.get(self.results_json.EXPECTATIONS_KEY, u'PASS')
# All run-length-encoded results for this test.
results_dict = entry.get(self.results_json.RESULTS_KEY, {})
# Set of expectations for this test.
expectations = set(map(string_to_exp, exp_string.split(' ')))
# Set of distinct results for this test.
result_types = self._flaky_types_in_results(results_dict)
# Distinct results as non-encoded strings.
result_strings = map(self.results_json.expectation_for_type, result_types)
# Distinct resulting expectations.
result_exp = map(string_to_exp, result_strings)
expected = lambda e: TestExpectations.result_was_expected(e, expectations, False)
additional_expectations = set(e for e in result_exp if not expected(e))
# Test did not have unexpected results.
if not additional_expectations:
continue
expectations.update(additional_expectations)
unexpected_results_by_path[test_path] = sorted(map(exp_to_string, expectations))
return unexpected_results_by_path
def expectation_lines(self, only_ignore_very_flaky=False):
lines = []
for test_path, entry in self.results_json.walk_results():
results_array = entry[self.results_json.RESULTS_KEY]
flaky_types = self._flaky_types_in_results(results_array, only_ignore_very_flaky)
if len(flaky_types) > 1:
bug_urls = entry.get(self.results_json.BUGS_KEY)
line = self._line_from_test_and_flaky_types_and_bug_urls(test_path, flaky_types, bug_urls)
lines.append(line)
return lines
def _flaky_types_in_results(self, run_length_encoded_results, only_ignore_very_flaky=False):
results_map = {}
seen_results = {}
for result_item in run_length_encoded_results:
_, result_type = self.results_json.occurances_and_type_from_result_item(result_item)
if result_type in self.RESULT_TYPES_TO_IGNORE:
continue
if only_ignore_very_flaky and result_type not in seen_results:
# Only consider a short-lived result if we've seen it more than once.
# Otherwise, we include lots of false-positives due to tests that fail
# for a couple runs and then start passing.
# FIXME: Maybe we should make this more liberal and consider it a flake
# even if we only see that failure once.
seen_results[result_type] = True
continue
results_map[result_type] = True
return results_map.keys()
|
cgvarela/Impala | refs/heads/cdh5-trunk | shell/ext-py/sqlparse-0.1.14/sqlparse/utils.py | 99 | '''
Created on 17/05/2012
@author: piranna
'''
import re
try:
from collections import OrderedDict
except ImportError:
OrderedDict = None
if OrderedDict:
class Cache(OrderedDict):
"""Cache with LRU algorithm using an OrderedDict as basis
"""
def __init__(self, maxsize=100):
OrderedDict.__init__(self)
self._maxsize = maxsize
def __getitem__(self, key, *args, **kwargs):
# Get the key and remove it from the cache, or raise KeyError
value = OrderedDict.__getitem__(self, key)
del self[key]
# Insert the (key, value) pair on the front of the cache
OrderedDict.__setitem__(self, key, value)
# Return the value from the cache
return value
def __setitem__(self, key, value, *args, **kwargs):
# Key was inserted before, remove it so we put it at front later
if key in self:
del self[key]
# Too much items on the cache, remove the least recent used
elif len(self) >= self._maxsize:
self.popitem(False)
# Insert the (key, value) pair on the front of the cache
OrderedDict.__setitem__(self, key, value, *args, **kwargs)
else:
class Cache(dict):
"""Cache that reset when gets full
"""
def __init__(self, maxsize=100):
dict.__init__(self)
self._maxsize = maxsize
def __setitem__(self, key, value, *args, **kwargs):
# Reset the cache if we have too much cached entries and start over
if len(self) >= self._maxsize:
self.clear()
# Insert the (key, value) pair on the front of the cache
dict.__setitem__(self, key, value, *args, **kwargs)
def memoize_generator(func):
"""Memoize decorator for generators
Store `func` results in a cache according to their arguments as 'memoize'
does but instead this works on decorators instead of regular functions.
Obviusly, this is only useful if the generator will always return the same
values for each specific parameters...
"""
cache = Cache()
def wrapped_func(*args, **kwargs):
# params = (args, kwargs)
params = (args, tuple(sorted(kwargs.items())))
# Look if cached
try:
cached = cache[params]
# Not cached, exec and store it
except KeyError:
cached = []
for item in func(*args, **kwargs):
cached.append(item)
yield item
cache[params] = cached
# Cached, yield its items
else:
for item in cached:
yield item
return wrapped_func
# This regular expression replaces the home-cooked parser that was here before.
# It is much faster, but requires an extra post-processing step to get the
# desired results (that are compatible with what you would expect from the
# str.splitlines() method).
#
# It matches groups of characters: newlines, quoted strings, or unquoted text,
# and splits on that basis. The post-processing step puts those back together
# into the actual lines of SQL.
SPLIT_REGEX = re.compile(r"""
(
(?: # Start of non-capturing group
(?:\r\n|\r|\n) | # Match any single newline, or
[^\r\n'"]+ | # Match any character series without quotes or
# newlines, or
"(?:[^"\\]|\\.)*" | # Match double-quoted strings, or
'(?:[^'\\]|\\.)*' # Match single quoted strings
)
)
""", re.VERBOSE)
LINE_MATCH = re.compile(r'(\r\n|\r|\n)')
def split_unquoted_newlines(text):
"""Split a string on all unquoted newlines.
Unlike str.splitlines(), this will ignore CR/LF/CR+LF if the requisite
character is inside of a string."""
lines = SPLIT_REGEX.split(text)
outputlines = ['']
for line in lines:
if not line:
continue
elif LINE_MATCH.match(line):
outputlines.append('')
else:
outputlines[-1] += line
return outputlines |
srvg/ansible | refs/heads/devel | test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.py | 8 | #!/usr/bin/env python
"""Schema validation of ansible-core's ansible_builtin_runtime.yml and collection's meta/runtime.yml"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import os
import re
import sys
from distutils.version import StrictVersion, LooseVersion
from functools import partial
import yaml
from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA
from voluptuous import Required, Schema, Invalid
from voluptuous.humanize import humanize_error
from ansible.module_utils.six import string_types
from ansible.utils.version import SemanticVersion
def isodate(value, check_deprecation_date=False, is_tombstone=False):
"""Validate a datetime.date or ISO 8601 date string."""
# datetime.date objects come from YAML dates, these are ok
if isinstance(value, datetime.date):
removal_date = value
else:
# make sure we have a string
msg = 'Expected ISO 8601 date string (YYYY-MM-DD), or YAML date'
if not isinstance(value, string_types):
raise Invalid(msg)
# From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions,
# we have to do things manually.
if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', value):
raise Invalid(msg)
try:
removal_date = datetime.datetime.strptime(value, '%Y-%m-%d').date()
except ValueError:
raise Invalid(msg)
# Make sure date is correct
today = datetime.date.today()
if is_tombstone:
# For a tombstone, the removal date must be in the past
if today < removal_date:
raise Invalid(
'The tombstone removal_date (%s) must not be after today (%s)' % (removal_date, today))
else:
# For a deprecation, the removal date must be in the future. Only test this if
# check_deprecation_date is truish, to avoid checks to suddenly start to fail.
if check_deprecation_date and today > removal_date:
raise Invalid(
'The deprecation removal_date (%s) must be after today (%s)' % (removal_date, today))
return value
def removal_version(value, is_ansible, current_version=None, is_tombstone=False):
"""Validate a removal version string."""
msg = (
'Removal version must be a string' if is_ansible else
'Removal version must be a semantic version (https://semver.org/)'
)
if not isinstance(value, string_types):
raise Invalid(msg)
try:
if is_ansible:
version = StrictVersion()
version.parse(value)
version = LooseVersion(value) # We're storing Ansible's version as a LooseVersion
else:
version = SemanticVersion()
version.parse(value)
if version.major != 0 and (version.minor != 0 or version.patch != 0):
raise Invalid('removal_version (%r) must be a major release, not a minor or patch release '
'(see specification at https://semver.org/)' % (value, ))
if current_version is not None:
if is_tombstone:
# For a tombstone, the removal version must not be in the future
if version > current_version:
raise Invalid('The tombstone removal_version (%r) must not be after the '
'current version (%s)' % (value, current_version))
else:
# For a deprecation, the removal version must be in the future
if version <= current_version:
raise Invalid('The deprecation removal_version (%r) must be after the '
'current version (%s)' % (value, current_version))
except ValueError:
raise Invalid(msg)
return value
def any_value(value):
"""Accepts anything."""
return value
def get_ansible_version():
"""Return current ansible-core version"""
from ansible.release import __version__
return LooseVersion('.'.join(__version__.split('.')[:3]))
def get_collection_version():
"""Return current collection version, or None if it is not available"""
import importlib.util
collection_detail_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
'collection_detail.py')
collection_detail_spec = importlib.util.spec_from_file_location('collection_detail', collection_detail_path)
collection_detail = importlib.util.module_from_spec(collection_detail_spec)
sys.modules['collection_detail'] = collection_detail
collection_detail_spec.loader.exec_module(collection_detail)
# noinspection PyBroadException
try:
result = collection_detail.read_manifest_json('.') or collection_detail.read_galaxy_yml('.')
return SemanticVersion(result['version'])
except Exception: # pylint: disable=broad-except
# We do not care why it fails, in case we cannot get the version
# just return None to indicate "we don't know".
return None
def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
"""Validate explicit runtime metadata file"""
try:
with open(path, 'r') as f_path:
routing = yaml.safe_load(f_path)
except yaml.error.MarkedYAMLError as ex:
print('%s:%d:%d: YAML load failed: %s' % (path, ex.context_mark.line +
1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex))))
return
except Exception as ex: # pylint: disable=broad-except
print('%s:%d:%d: YAML load failed: %s' %
(path, 0, 0, re.sub(r'\s+', ' ', str(ex))))
return
if is_ansible:
current_version = get_ansible_version()
else:
current_version = get_collection_version()
# Updates to schema MUST also be reflected in the documentation
# ~https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html
# plugin_routing schema
avoid_additional_data = Schema(
Any(
{
Required('removal_version'): any_value,
'warning_text': any_value,
},
{
Required('removal_date'): any_value,
'warning_text': any_value,
}
),
extra=PREVENT_EXTRA
)
deprecation_schema = All(
# The first schema validates the input, and the second makes sure no extra keys are specified
Schema(
{
'removal_version': partial(removal_version, is_ansible=is_ansible,
current_version=current_version),
'removal_date': partial(isodate, check_deprecation_date=check_deprecation_dates),
'warning_text': Any(*string_types),
}
),
avoid_additional_data
)
tombstoning_schema = All(
# The first schema validates the input, and the second makes sure no extra keys are specified
Schema(
{
'removal_version': partial(removal_version, is_ansible=is_ansible,
current_version=current_version, is_tombstone=True),
'removal_date': partial(isodate, is_tombstone=True),
'warning_text': Any(*string_types),
}
),
avoid_additional_data
)
plugin_routing_schema = Any(
Schema({
('deprecation'): Any(deprecation_schema),
('tombstone'): Any(tombstoning_schema),
('redirect'): Any(*string_types),
}, extra=PREVENT_EXTRA),
)
list_dict_plugin_routing_schema = [{str_type: plugin_routing_schema}
for str_type in string_types]
plugin_schema = Schema({
('action'): Any(None, *list_dict_plugin_routing_schema),
('become'): Any(None, *list_dict_plugin_routing_schema),
('cache'): Any(None, *list_dict_plugin_routing_schema),
('callback'): Any(None, *list_dict_plugin_routing_schema),
('cliconf'): Any(None, *list_dict_plugin_routing_schema),
('connection'): Any(None, *list_dict_plugin_routing_schema),
('doc_fragments'): Any(None, *list_dict_plugin_routing_schema),
('filter'): Any(None, *list_dict_plugin_routing_schema),
('httpapi'): Any(None, *list_dict_plugin_routing_schema),
('inventory'): Any(None, *list_dict_plugin_routing_schema),
('lookup'): Any(None, *list_dict_plugin_routing_schema),
('module_utils'): Any(None, *list_dict_plugin_routing_schema),
('modules'): Any(None, *list_dict_plugin_routing_schema),
('netconf'): Any(None, *list_dict_plugin_routing_schema),
('shell'): Any(None, *list_dict_plugin_routing_schema),
('strategy'): Any(None, *list_dict_plugin_routing_schema),
('terminal'): Any(None, *list_dict_plugin_routing_schema),
('test'): Any(None, *list_dict_plugin_routing_schema),
('vars'): Any(None, *list_dict_plugin_routing_schema),
}, extra=PREVENT_EXTRA)
# import_redirection schema
import_redirection_schema = Any(
Schema({
('redirect'): Any(*string_types),
# import_redirect doesn't currently support deprecation
}, extra=PREVENT_EXTRA)
)
list_dict_import_redirection_schema = [{str_type: import_redirection_schema}
for str_type in string_types]
# top level schema
schema = Schema({
# All of these are optional
('plugin_routing'): Any(plugin_schema),
('import_redirection'): Any(None, *list_dict_import_redirection_schema),
# requires_ansible: In the future we should validate this with SpecifierSet
('requires_ansible'): Any(*string_types),
('action_groups'): dict,
}, extra=PREVENT_EXTRA)
# Ensure schema is valid
try:
schema(routing)
except MultipleInvalid as ex:
for error in ex.errors:
# No way to get line/column numbers
print('%s:%d:%d: %s' % (path, 0, 0, humanize_error(routing, error)))
def main():
"""Validate runtime metadata"""
paths = sys.argv[1:] or sys.stdin.read().splitlines()
collection_legacy_file = 'meta/routing.yml'
collection_runtime_file = 'meta/runtime.yml'
# This is currently disabled, because if it is enabled this test can start failing
# at a random date. For this to be properly activated, we (a) need to be able to return
# codes for this test, and (b) make this error optional.
check_deprecation_dates = False
for path in paths:
if path == collection_legacy_file:
print('%s:%d:%d: %s' % (path, 0, 0, ("Should be called '%s'" % collection_runtime_file)))
continue
validate_metadata_file(
path,
is_ansible=path not in (collection_legacy_file, collection_runtime_file),
check_deprecation_dates=check_deprecation_dates)
if __name__ == '__main__':
main()
|
albmarvil/The-Eternal-Sorrow | refs/heads/master | dependencies/luabind/boost-build/test/startup_v1.py | 4 | #!/usr/bin/python
# Copyright 2002 Dave Abrahams
# Copyright 2003, 2004, 2005 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
from BoostBuild import Tester
import os
import re
def expect_substring(actual,expected):
return actual.find(expected) != -1
def match_re(actual,expected):
return re.match(expected,actual,re.DOTALL) != None
# Test the v1 startup behavior
t = Tester(
executable='bjam'
, match=match_re
, boost_build_path=''
, pass_toolset=0
)
t.set_tree('startup')
#if os.name == 'nt':
# t.run_build_system(
# status=1, stdout="You didn't set BOOST_ROOT", match = expect_substring)
t.run_build_system(
extra_args = '-sBOOST_ROOT=.', status=1
, stdout=r'''Unable to load Boost\.Build: could not find "boost-build\.jam".'''
)
os.chdir('no-bootstrap1')
t.run_build_system(
extra_args = '-sBOOST_ROOT=.', status=1
, stdout=r'''Unable to load Boost\.Build: could not find build system\.'''
+ r'''.*attempted to load the build system by invoking'''
+ r'''.*'boost-build ;'.*'''
+ r'''but we were unable to find "bootstrap\.jam"'''
)
# Descend to a subdirectory which /doesn't/ contain a boost-build.jam
# file, and try again to test the crawl-up behavior.
os.chdir('subdir')
t.run_build_system(
extra_args = '-sBOOST_ROOT=.', status=1
, stdout=r'''Unable to load Boost\.Build: could not find build system\.'''
+ r'''.*attempted to load the build system by invoking'''
+ r'''.*'boost-build ;'.*'''
+ r'''but we were unable to find "bootstrap\.jam"'''
)
os.chdir('../../no-bootstrap2')
t.run_build_system(
extra_args = '-sBOOST_ROOT=.', status=1
, stdout=r'''Unable to load Boost\.Build: could not find build system\.'''
+ r'''.*attempted to load the build system by invoking'''
+ r'''.*'boost-build \. ;'.*'''
+ r'''but we were unable to find "bootstrap\.jam"'''
)
os.chdir('../no-bootstrap3')
t.run_build_system(
extra_args = '-sBOOST_ROOT=.', status=1
, stdout=r'''Unable to load Boost.Build
.*boost-build.jam" was found.*
However, it failed to call the "boost-build" rule'''
)
# test bootstrapping based on BOOST_BUILD_PATH
os.chdir('../bootstrap-env')
t.run_build_system(
extra_args = '-sBOOST_ROOT=../boost-root -sBOOST_BUILD_PATH=../boost-root/build'
, stdout = 'build system bootstrapped'
)
# test bootstrapping based on an explicit path in boost-build.jam
os.chdir('../bootstrap-explicit')
t.run_build_system(
extra_args = '-sBOOST_ROOT=../boost-root'
, stdout = 'build system bootstrapped'
)
# test bootstrapping based on BOOST_ROOT
os.chdir('../bootstrap-implicit')
t.run_build_system(
extra_args = '-sBOOST_ROOT=../boost-root'
, stdout = 'build system bootstrapped'
)
t.cleanup()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.