code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
"""This is a simple implementation of the ITranslationDomain interface.
"""
from zope.component import getUtility
from zope.interface import implementer
from zope.i18n import interpolate
from zope.i18n.interfaces import INegotiator
from zope.i18n.interfaces import ITranslationDomain
@implementer(ITranslationDomain)
class SimpleTranslationDomain:
"""This is the simplest implementation of the ITranslationDomain I
could come up with.
The constructor takes one optional argument 'messages', which will be
used to do the translation. The 'messages' attribute has to have the
following structure:
{('language', 'msg_id'): 'message', ...}
Note: This Translation Domain does not use message catalogs.
"""
# See zope.i18n.interfaces.ITranslationDomain
domain = None
def __init__(self, domain, messages=None):
"""Initializes the object. No arguments are needed."""
self.domain = (
domain.decode("utf-8") if isinstance(domain, bytes) else domain)
self.messages = messages if messages is not None else {}
assert self.messages is not None
def translate(self, msgid, mapping=None, context=None,
target_language=None, default=None, msgid_plural=None,
default_plural=None, number=None):
'''See interface ITranslationDomain'''
# Find out what the target language should be
if target_language is None and context is not None:
langs = [m[0] for m in self.messages.keys()]
# Let's negotiate the language to translate to. :)
negotiator = getUtility(INegotiator)
target_language = negotiator.getLanguage(langs, context)
# Find a translation; if nothing is found, use the default
# value
if default is None:
default = str(msgid)
text = self.messages.get((target_language, msgid))
if text is None:
text = default
return interpolate(text, mapping) | zope.i18n | /zope.i18n-5.1-py3-none-any.whl/zope/i18n/simpletranslationdomain.py | simpletranslationdomain.py |
"""i18n support.
"""
import re
from zope.component import queryUtility
# MessageFactory is not used, but it might be here for BBB reasons,
# as it could be imported by other packages.
from zope.i18nmessageid import Message
from zope.i18nmessageid import MessageFactory # noqa
from zope.i18n.config import ALLOWED_LANGUAGES
from zope.i18n.interfaces import IFallbackTranslationDomainFactory
from zope.i18n.interfaces import INegotiator
from zope.i18n.interfaces import ITranslationDomain
# Set up regular expressions for finding interpolation variables in text.
# NAME_RE must exactly match the expression of the same name in the
# zope.tal.taldefs module:
NAME_RE = r"[a-zA-Z_][-a-zA-Z0-9_]*"
_interp_regex = re.compile(r'(?<!\$)(\$(?:(%(n)s)|{(%(n)s)}))'
% ({'n': NAME_RE}))
class _FallbackNegotiator:
def getLanguage(self, _allowed, _context):
return None
_fallback_negotiator = _FallbackNegotiator()
def negotiate(context):
"""
Negotiate language.
This only works if the languages are set globally, otherwise each
message catalog needs to do the language negotiation.
If no languages are set, this always returns None:
>>> import zope.i18n as i18n
>>> from zope.component import queryUtility
>>> old_allowed_languages = i18n.ALLOWED_LANGUAGES
>>> i18n.ALLOWED_LANGUAGES = None
>>> i18n.negotiate('anything') is None
True
If languages are set, but there is no ``INegotiator`` utility,
this returns None:
>>> i18n.ALLOWED_LANGUAGES = ('en',)
>>> queryUtility(i18n.INegotiator) is None
True
>>> i18n.negotiate('anything') is None
True
.. doctest::
:hide:
>>> i18n.ALLOWED_LANGUAGES = old_allowed_languages
"""
if ALLOWED_LANGUAGES is None:
return None
negotiator = queryUtility(INegotiator, default=_fallback_negotiator)
return negotiator.getLanguage(ALLOWED_LANGUAGES, context)
def translate(msgid, domain=None, mapping=None, context=None,
target_language=None, default=None, msgid_plural=None,
default_plural=None, number=None):
"""Translate text.
First setup some test components:
>>> from zope import component, interface
>>> import zope.i18n.interfaces
>>> @interface.implementer(zope.i18n.interfaces.ITranslationDomain)
... class TestDomain:
...
... def __init__(self, **catalog):
... self.catalog = catalog
...
... def translate(self, text, *_, **__):
... return self.catalog[text]
Normally, the translation system will use a domain utility:
>>> component.provideUtility(TestDomain(eek=u"ook"), name='my.domain')
>>> print(translate(u"eek", 'my.domain'))
ook
If no domain is given, or if there is no domain utility
for the given domain, then the text isn't translated:
>>> print(translate(u"eek"))
eek
Moreover the text will be converted to unicode:
>>> not isinstance(translate('eek', 'your.domain'), bytes)
True
A fallback domain factory can be provided. This is normally used
for testing:
>>> def fallback(domain=u""):
... return TestDomain(eek=u"test-from-" + domain)
>>> interface.directlyProvides(
... fallback,
... zope.i18n.interfaces.IFallbackTranslationDomainFactory,
... )
>>> component.provideUtility(fallback)
>>> print(translate(u"eek"))
test-from-
>>> print(translate(u"eek", 'your.domain'))
test-from-your.domain
If no target language is provided, but a context is and we were able to
find a translation domain, we will use the `negotiate` function to
attempt to determine the language to translate to:
.. doctest::
:hide:
>>> from zope import i18n
>>> old_negotiate = i18n.negotiate
>>> def test_negotiate(context):
... print("Negotiating for %r" % (context,))
... return 'en'
>>> i18n.negotiate = test_negotiate
>>> print(translate('eek', 'your.domain', context='context'))
Negotiating for 'context'
test-from-your.domain
.. doctest::
:hide:
>>> i18n.negotiate = old_negotiate
"""
if isinstance(msgid, Message):
domain = msgid.domain
default = msgid.default
mapping = msgid.mapping
msgid_plural = msgid.msgid_plural
default_plural = msgid.default_plural
number = msgid.number
if default is None:
default = str(msgid)
if msgid_plural is not None and default_plural is None:
default_plural = str(msgid_plural)
if domain:
util = queryUtility(ITranslationDomain, domain)
if util is None:
util = queryUtility(IFallbackTranslationDomainFactory)
if util is not None:
util = util(domain)
else:
util = queryUtility(IFallbackTranslationDomainFactory)
if util is not None:
util = util()
if util is None:
return interpolate(default, mapping)
if target_language is None and context is not None:
target_language = negotiate(context)
return util.translate(
msgid, mapping, context, target_language, default,
msgid_plural, default_plural, number)
def interpolate(text, mapping=None):
"""Insert the data passed from mapping into the text.
First setup a test mapping:
>>> mapping = {"name": "Zope", "version": 3}
In the text we can use substitution slots like $varname or ${varname}:
>>> print(interpolate(u"This is $name version ${version}.", mapping))
This is Zope version 3.
Interpolation variables can be used more than once in the text:
>>> print(interpolate(
... u"This is $name version ${version}. ${name} $version!", mapping))
This is Zope version 3. Zope 3!
In case if the variable wasn't found in the mapping or '$$' form
was used no substitution will happens:
>>> print(interpolate(
... u"This is $name $version. $unknown $$name $${version}.", mapping))
This is Zope 3. $unknown $$name $${version}.
>>> print(interpolate(u"This is ${name}"))
This is ${name}
If a mapping value is a message id itself it is interpolated, too:
>>> from zope.i18nmessageid import Message
>>> print(interpolate(u"This is $meta.",
... mapping={'meta': Message(u"$name $version",
... mapping=mapping)}))
This is Zope 3.
"""
def replace(match):
whole, param1, param2 = match.groups()
value = mapping.get(param1 or param2, whole)
if isinstance(value, Message):
value = interpolate(value, value.mapping)
return str(value)
if not text or not mapping:
return text
return _interp_regex.sub(replace, text) | zope.i18n | /zope.i18n-5.1-py3-none-any.whl/zope/i18n/__init__.py | __init__.py |
"""Interfaces related to Locales
"""
import datetime
import re
from zope.interface import Attribute
from zope.interface import Interface
from zope.schema import Bool
from zope.schema import Choice
from zope.schema import Date
from zope.schema import Dict
from zope.schema import Field
from zope.schema import Int
from zope.schema import List
from zope.schema import Text
from zope.schema import TextLine
from zope.schema import Tuple
class ILocaleProvider(Interface):
"""This interface is our connection to the Zope 3 service. From it
we can request various Locale objects that can perform all sorts of
fancy operations.
This service will be singelton global service, since it doe not make much
sense to have many locale facilities, especially since this one will be so
complete, since we will the ICU XML Files as data. """
def loadLocale(language=None, country=None, variant=None):
"""Load the locale with the specs that are given by the arguments of
the method. Note that the LocaleProvider must know where to get the
locales from."""
def getLocale(language=None, country=None, variant=None):
"""Get the Locale object for a particular language, country and
variant."""
class ILocaleIdentity(Interface):
"""Identity information class for ILocale objects.
Three pieces of information are required to identify a locale:
o language -- Language in which all of the locale text information are
returned.
o script -- Script in which all of the locale text information are
returned.
o territory -- Territory for which the locale's information are
appropriate. None means all territories in which language is spoken.
o variant -- Sometimes there are regional or historical differences even
in a certain country. For these cases we use the variant field. A good
example is the time before the Euro in Germany for example. Therefore
a valid variant would be 'PREEURO'.
Note that all of these attributes are read-only once they are set (usually
done in the constructor)!
This object is also used to uniquely identify a locale.
"""
language = TextLine(
title="Language Type",
description="The language for which a locale is applicable.",
constraint=re.compile(r'[a-z]{2}').match,
required=True,
readonly=True)
script = TextLine(
title="Script Type",
description=("""The script for which the language/locale is
applicable."""),
constraint=re.compile(r'[a-z]*').match)
territory = TextLine(
title="Territory Type",
description="The territory for which a locale is applicable.",
constraint=re.compile(r'[A-Z]{2}').match,
required=True,
readonly=True)
variant = TextLine(
title="Variant Type",
description="The variant for which a locale is applicable.",
constraint=re.compile(r'[a-zA-Z]*').match,
required=True,
readonly=True)
version = Field(
title="Locale Version",
description="The value of this field is an ILocaleVersion object.",
readonly=True)
def __repr__(self):
"""Defines the representation of the id, which should be a compact
string that references the language, country and variant."""
class ILocaleVersion(Interface):
"""Represents the version of a locale.
The locale version is part of the ILocaleIdentity object.
"""
number = TextLine(
title="Version Number",
description="The version number of the locale.",
constraint=re.compile(r'^([0-9].)*[0-9]$').match,
required=True,
readonly=True)
generationDate = Date(
title="Generation Date",
description="Specifies the creation date of the locale.",
constraint=lambda date: date < datetime.datetime.now(),
readonly=True)
notes = Text(
title="Notes",
description="Some release notes for the version of this locale.",
readonly=True)
class ILocaleDisplayNames(Interface):
"""Localized Names of common text strings.
This object contains localized strings for many terms, including
language, script and territory names. But also keys and types used
throughout the locale object are localized here.
"""
languages = Dict(
title="Language type to translated name",
key_type=TextLine(title="Language Type"),
value_type=TextLine(title="Language Name"))
scripts = Dict(
title="Script type to script name",
key_type=TextLine(title="Script Type"),
value_type=TextLine(title="Script Name"))
territories = Dict(
title="Territory type to translated territory name",
key_type=TextLine(title="Territory Type"),
value_type=TextLine(title="Territory Name"))
variants = Dict(
title="Variant type to name",
key_type=TextLine(title="Variant Type"),
value_type=TextLine(title="Variant Name"))
keys = Dict(
title="Key type to name",
key_type=TextLine(title="Key Type"),
value_type=TextLine(title="Key Name"))
types = Dict(
title="Type type and key to localized name",
key_type=Tuple(title="Type Type and Key"),
value_type=TextLine(title="Type Name"))
class ILocaleTimeZone(Interface):
"""Represents and defines various timezone information. It mainly manages
all the various names for a timezone and the cities contained in it.
Important: ILocaleTimeZone objects are not intended to provide
implementations for the standard datetime module timezone support. They
are merily used for Locale support.
"""
type = TextLine(
title="Time Zone Type",
description="Standard name of the timezone for unique referencing.",
required=True,
readonly=True)
cities = List(
title="Cities",
description="Cities in Timezone",
value_type=TextLine(title="City Name"),
required=True,
readonly=True)
names = Dict(
title="Time Zone Names",
description="Various names of the timezone.",
key_type=Choice(
title="Time Zone Name Type",
values=("generic", "standard", "daylight")),
value_type=Tuple(title="Time Zone Name and Abbreviation",
min_length=2, max_length=2),
required=True,
readonly=True)
class ILocaleFormat(Interface):
"""Specifies a format for a particular type of data."""
type = TextLine(
title="Format Type",
description="The name of the format",
required=False,
readonly=True)
displayName = TextLine(
title="Display Name",
description="Name of the calendar, for example 'gregorian'.",
required=False,
readonly=True)
pattern = TextLine(
title="Format Pattern",
description="The pattern that is used to format the object.",
required=True,
readonly=True)
class ILocaleFormatLength(Interface):
"""The format length describes a class of formats."""
type = Choice(
title="Format Length Type",
description="Name of the format length",
values=("full", "long", "medium", "short")
)
default = TextLine(
title="Default Format",
description="The name of the defaulkt format.")
formats = Dict(
title="Formats",
description="Maps format types to format objects",
key_type=TextLine(title="Format Type"),
value_type=Field(
title="Format Object",
description="Values are ILocaleFormat objects."),
required=True,
readonly=True)
class ILocaleMonthContext(Interface):
"""Specifices a usage context for month names"""
type = TextLine(
title="Month context type",
description="Name of the month context, format or stand-alone.")
defaultWidth = TextLine(
title="Default month name width",
default="wide")
months = Dict(
title="Month Names",
description=("A mapping of month name widths to a mapping of"
"corresponding month names."),
key_type=Choice(
title="Width type",
values=("wide", "abbreviated", "narrow")),
value_type=Dict(
title="Month name",
key_type=Int(title="Type", min=1, max=12),
value_type=TextLine(title="Month Name"))
)
class ILocaleDayContext(Interface):
"""Specifices a usage context for days names"""
type = TextLine(
title="Day context type",
description="Name of the day context, format or stand-alone.")
defaultWidth = TextLine(
title="Default day name width",
default="wide")
days = Dict(
title="Day Names",
description=("A mapping of day name widths to a mapping of"
"corresponding day names."),
key_type=Choice(
title="Width type",
values=("wide", "abbreviated", "narrow")),
value_type=Dict(
title="Day name",
key_type=Choice(
title="Type",
values=("sun", "mon", "tue", "wed",
"thu", "fri", "sat")),
value_type=TextLine(title="Day Name"))
)
class ILocaleCalendar(Interface):
"""There is a massive amount of information contained in the calendar,
which made it attractive to be added."""
type = TextLine(
title="Calendar Type",
description="Name of the calendar, for example 'gregorian'.")
defaultMonthContext = TextLine(
title="Default month context",
default="format")
monthContexts = Dict(
title="Month Contexts",
description=("A mapping of month context types to "
"ILocaleMonthContext objects"),
key_type=Choice(title="Type",
values=("format", "stand-alone")),
value_type=Field(title="ILocaleMonthContext object"))
# BBB: leftover from CLDR 1.0
months = Dict(
title="Month Names",
description="A mapping of all month names and abbreviations",
key_type=Int(title="Type", min=1, max=12),
value_type=Tuple(title="Month Name and Abbreviation",
min_length=2, max_length=2))
defaultDayContext = TextLine(
title="Default day context",
default="format")
dayContexts = Dict(
title="Day Contexts",
description=("A mapping of day context types to "
"ILocaleDayContext objects"),
key_type=Choice(title="Type",
values=("format", "stand-alone")),
value_type=Field(title="ILocaleDayContext object"))
# BBB: leftover from CLDR 1.0
days = Dict(
title="Weekdays Names",
description="A mapping of all month names and abbreviations",
key_type=Choice(title="Type",
values=("sun", "mon", "tue", "wed",
"thu", "fri", "sat")),
value_type=Tuple(title="Weekdays Name and Abbreviation",
min_length=2, max_length=2))
week = Dict(
title="Week Information",
description="Contains various week information",
key_type=Choice(
title="Type",
description=("""
Varies Week information:
- 'minDays' is just an integer between 1 and 7.
- 'firstDay' specifies the first day of the week by integer.
- The 'weekendStart' and 'weekendEnd' are tuples of the form
(weekDayNumber, datetime.time)
"""),
values=("minDays", "firstDay",
"weekendStart", "weekendEnd")))
am = TextLine(title="AM String")
pm = TextLine(title="PM String")
eras = Dict(
title="Era Names",
key_type=Int(title="Type", min=0),
value_type=Tuple(title="Era Name and Abbreviation",
min_length=2, max_length=2))
defaultDateFormat = TextLine(title="Default Date Format Type")
dateFormats = Dict(
title="Date Formats",
description="Contains various Date Formats.",
key_type=Choice(
title="Type",
description="Name of the format length",
values=("full", "long", "medium", "short")),
value_type=Field(title="ILocaleFormatLength object"))
defaultTimeFormat = TextLine(title="Default Time Format Type")
timeFormats = Dict(
title="Time Formats",
description="Contains various Time Formats.",
key_type=Choice(
title="Type",
description="Name of the format length",
values=("full", "long", "medium", "short")),
value_type=Field(title="ILocaleFormatLength object"))
defaultDateTimeFormat = TextLine(title="Default Date-Time Format Type")
dateTimeFormats = Dict(
title="Date-Time Formats",
description="Contains various Date-Time Formats.",
key_type=Choice(
title="Type",
description="Name of the format length",
values=("full", "long", "medium", "short")),
value_type=Field(title="ILocaleFormatLength object"))
def getMonthNames():
"""Return a list of month names."""
def getMonthTypeFromName(name):
"""Return the type of the month with the right name."""
def getMonthAbbreviations():
"""Return a list of month abbreviations."""
def getMonthTypeFromAbbreviation(abbr):
"""Return the type of the month with the right abbreviation."""
def getDayNames():
"""Return a list of weekday names."""
def getDayTypeFromName(name):
"""Return the id of the weekday with the right name."""
def getDayAbbr():
"""Return a list of weekday abbreviations."""
def getDayTypeFromAbbr(abbr):
"""Return the id of the weekday with the right abbr."""
def isWeekend(datetime):
"""Determines whether a the argument lies in a weekend."""
def getFirstDayName():
"""Return the the type of the first day in the week."""
class ILocaleDates(Interface):
"""This object contains various data about dates, times and time zones."""
localizedPatternChars = TextLine(
title="Localized Pattern Characters",
description="Localized pattern characters used in dates and times")
calendars = Dict(
title="Calendar type to ILocaleCalendar",
key_type=Choice(
title="Calendar Type",
values=("gregorian",
"arabic",
"chinese",
"civil-arabic",
"hebrew",
"japanese",
"thai-buddhist")),
value_type=Field(title="Calendar",
description="This is a ILocaleCalendar object."))
timezones = Dict(
title="Time zone type to ILocaleTimezone",
key_type=TextLine(title="Time Zone type"),
value_type=Field(title="Time Zone",
description="This is a ILocaleTimeZone object."))
def getFormatter(category, length=None, name=None, calendar="gregorian"):
"""Get a date/time formatter.
`category` must be one of 'date', 'dateTime', 'time'.
The 'length' specifies the output length of the value. The allowed
values are: 'short', 'medium', 'long' and 'full'. If no length was
specified, the default length is chosen.
"""
class ILocaleCurrency(Interface):
"""Defines a particular currency."""
type = TextLine(title="Type")
symbol = TextLine(title="Symbol")
displayName = TextLine(title="Official Name")
symbolChoice = Bool(title="Symbol Choice")
class ILocaleNumbers(Interface):
"""This object contains various data about numbers and currencies."""
symbols = Dict(
title="Number Symbols",
key_type=Choice(
title="Format Name",
values=("decimal", "group", "list", "percentSign",
"nativeZeroDigit", "patternDigit", "plusSign",
"minusSign", "exponential", "perMille",
"infinity", "nan")),
value_type=TextLine(title="Symbol"))
defaultDecimalFormat = TextLine(title="Default Decimal Format Type")
decimalFormats = Dict(
title="Decimal Formats",
description="Contains various Decimal Formats.",
key_type=Choice(
title="Type",
description="Name of the format length",
values=("full", "long", "medium", "short")),
value_type=Field(title="ILocaleFormatLength object"))
defaultScientificFormat = TextLine(title="Default Scientific Format Type")
scientificFormats = Dict(
title="Scientific Formats",
description="Contains various Scientific Formats.",
key_type=Choice(
title="Type",
description="Name of the format length",
values=("full", "long", "medium", "short")),
value_type=Field(title="ILocaleFormatLength object"))
defaultPercentFormat = TextLine(title="Default Percent Format Type")
percentFormats = Dict(
title="Percent Formats",
description="Contains various Percent Formats.",
key_type=Choice(
title="Type",
description="Name of the format length",
values=("full", "long", "medium", "short")),
value_type=Field(title="ILocaleFormatLength object"))
defaultCurrencyFormat = TextLine(title="Default Currency Format Type")
currencyFormats = Dict(
title="Currency Formats",
description="Contains various Currency Formats.",
key_type=Choice(
title="Type",
description="Name of the format length",
values=("full", "long", "medium", "short")),
value_type=Field(title="ILocaleFormatLength object"))
currencies = Dict(
title="Currencies",
description="Contains various Currency data.",
key_type=TextLine(
title="Type",
description="Name of the format length"),
value_type=Field(title="ILocaleCurrency object"))
def getFormatter(category, length=None, name=""):
"""Get the NumberFormat based on the category, length and name of the
format.
The 'category' specifies the type of number format you would like to
have. The available options are: 'decimal', 'percent', 'scientific',
'currency'.
The 'length' specifies the output length of the number. The allowed
values are: 'short', 'medium', 'long' and 'full'. If no length was
specified, the default length is chosen.
Every length can have actually several formats. In this case these
formats are named and you can specify the name here. If no name was
specified, the first unnamed format is chosen.
"""
def getDefaultCurrency():
"""Get the default currency."""
_orientations = ["left-to-right", "right-to-left",
"top-to-bottom", "bottom-to-top"]
class ILocaleOrientation(Interface):
"""Information about the orientation of text."""
characters = Choice(
title="Orientation of characters",
values=_orientations,
default="left-to-right"
)
lines = Choice(
title="Orientation of characters",
values=_orientations,
default="top-to-bottom"
)
class ILocale(Interface):
"""This class contains all important information about the locale.
Usually a Locale is identified using a specific language, country and
variant. However, the country and variant are optional, so that a lookup
hierarchy develops. It is easy to recognize that a locale that is missing
the variant is more general applicable than the one with the variant.
Therefore, if a specific Locale does not contain the required information,
it should look one level higher. There will be a root locale that
specifies none of the above identifiers.
"""
id = Field(
title="Locale identity",
description="ILocaleIdentity object identifying the locale.",
required=True,
readonly=True)
displayNames = Field(
title="Display Names",
description=("""ILocaleDisplayNames object that contains localized
names."""))
dates = Field(
title="Dates",
description="ILocaleDates object that contains date/time data.")
numbers = Field(
title="Numbers",
description="ILocaleNumbers object that contains number data.")
orientation = Field(
title="Orientation",
description="ILocaleOrientation with text orientation info.")
delimiters = Dict(
title="Delimiters",
description="Contains various Currency data.",
key_type=Choice(
title="Delimiter Type",
description="Delimiter name.",
values=("quotationStart",
"quotationEnd",
"alternateQuotationStart",
"alternateQuotationEnd")),
value_type=Field(title="Delimiter symbol"))
def getLocaleID():
"""Return a locale id as specified in the LDML specification"""
class ILocaleInheritance(Interface):
"""Locale inheritance support.
Locale-related objects implementing this interface are able to ask for its
inherited self. For example, 'en_US.dates.monthNames' can call on itself
'getInheritedSelf()' and get the value for 'en.dates.monthNames'.
"""
__parent__ = Attribute("The parent in the location hierarchy")
__name__ = TextLine(
title="The name within the parent",
description=("""The parent can be traversed with this name to get
the object."""))
def getInheritedSelf():
"""Return itself but in the next higher up Locale."""
class IAttributeInheritance(ILocaleInheritance):
"""Provides inheritance properties for attributes"""
def __setattr__(name, value):
"""Set a new attribute on the object.
When a value is set on any inheritance-aware object and the value
also implements ILocaleInheritance, then we need to set the
'__parent__' and '__name__' attribute on the value.
"""
def __getattribute__(name):
"""Return the value of the attribute with the specified name.
If an attribute is not found or is None, the next higher up Locale
object is consulted."""
class IDictionaryInheritance(ILocaleInheritance):
"""Provides inheritance properties for dictionary keys"""
def __setitem__(key, value):
"""Set a new item on the object.
Here we assume that the value does not require any inheritance, so
that we do not set '__parent__' or '__name__' on the value.
"""
def __getitem__(key):
"""Return the value of the item with the specified name.
If an key is not found or is None, the next higher up Locale
object is consulted.
"""
class ICollator(Interface):
"""Provide support for collating text strings
This interface will typically be provided by adapting a locale.
"""
def key(text):
"""Return a collation key for the given text.
"""
def cmp(text1, text2):
"""Compare two text strings.
The return value is negative if text1 < text2, 0 is they are
equal, and positive if text1 > text2.
""" | zope.i18n | /zope.i18n-5.1-py3-none-any.whl/zope/i18n/interfaces/locales.py | locales.py |
"""Internationalization of content objects.
"""
from zope.interface import Attribute
from zope.interface import Interface
from zope.schema import Choice
from zope.schema import Dict
from zope.schema import Field
from zope.schema import TextLine
class II18nAware(Interface):
"""Internationalization aware content object."""
def getDefaultLanguage():
"""Return the default language."""
def setDefaultLanguage(language):
"""Set the default language, which will be used if the language is not
specified, or not available.
"""
def getAvailableLanguages():
"""Find all the languages that are available."""
class IMessageCatalog(Interface):
"""A catalog (mapping) of message ids to message text strings.
This interface provides a method for translating a message or message id,
including text with interpolation. The message catalog basically serves
as a fairly simple mapping object.
A single message catalog represents a specific language and domain.
Therefore you will have the following constructor arguments:
language -- The language of the returned messages. This is a read-only
attribute.
domain -- The translation domain for these messages. This is a read-only
attribute. See ITranslationService.
When we refer to text here, we mean text that follows the standard Zope 3
text representation.
Note: The IReadMessageCatalog is the absolut minimal version required for
the TranslationService to function.
"""
def getMessage(msgid):
"""Get the appropriate text for the given message id.
An exception is raised if the message id is not found.
"""
def queryMessage(msgid, default=None):
"""Look for the appropriate text for the given message id.
If the message id is not found, default is returned.
"""
def getPluralMessage(singular, plural, n):
"""Get the appropriate text for the given message id and the
plural id.
An exception is raised if nothing was found.
"""
def queryPluralMessage(singular, plural, n, dft1=None, dft2=None):
"""Look for the appropriate text for the given message id and the
plural id.
If `n` is evaluated as a singular and the id is not found,
`dft1` is returned.
If `n` is evaluated as a plural and the plural id is not found,
`dft2` is returned.
"""
language = TextLine(
title="Language",
description="The language the catalog translates to.",
required=True)
domain = TextLine(
title="Domain",
description="The domain the catalog is registered for.",
required=True)
def getIdentifier():
"""Return a identifier for this message catalog. Note that this
identifier does not have to be unique as several message catalog
could serve the same domain and language.
Also, there are no restrictions on the form of the identifier.
"""
class IGlobalMessageCatalog(IMessageCatalog):
def reload():
"""Reload and parse .po file"""
class ITranslationDomain(Interface):
"""The Translation Domain utility
This interface provides methods for translating text, including text with
interpolation.
When we refer to text here, we mean text that follows the standard Zope 3
text representation.
The domain is used to specify which translation to use. Different
products will often use a specific domain naming translations supplied
with the product.
A favorite example is: How do you translate 'Sun'? Is it our star, the
abbreviation of Sunday or the company? Specifying the domain, such as
'Stars' or 'DaysOfWeek' will solve this problem for us.
Standard arguments in the methods described below:
msgid -- The id of the message that should be translated. This may be
an implicit or an explicit message id.
mapping -- The object to get the interpolation data from.
target_language -- The language to translate to.
msgid_plural -- The id of the plural message that should be translated.
number -- The number of items linked to the plural of the message.
context -- An object that provides contextual information for
determining client language preferences. It must implement
or have an adapter that implements IUserPreferredLanguages.
It will be to determine the language to translate to if
target_language is not specified explicitly.
Also note that language tags are defined by RFC 1766.
"""
domain = TextLine(
title="Domain Name",
description="The name of the domain this object represents.",
required=True)
def translate(msgid, mapping=None, context=None, target_language=None,
default=None, msgid_plural=None, default_plural=None,
number=None):
"""Return the translation for the message referred to by msgid.
Return the default if no translation is found.
However, the method does a little more than a vanilla translation.
The method also looks for a possible language to translate to.
After a translation it also replaces any $name variable variables
inside the post-translation string with data from `mapping`. If a
value of `mapping` is a Message it is also translated before
interpolation.
"""
class IFallbackTranslationDomainFactory(Interface):
"""Factory for creating fallback translation domains
Fallback translation domains are primarily used for testing or
debugging i18n.
"""
def __call__(domain_id=""):
"""Return a fallback translation domain for the given domain id.
"""
class ITranslator(Interface):
"""A collaborative object which contains the domain, context, and locale.
It is expected that object be constructed with enough information to find
the domain, context, and target language.
"""
def translate(msgid, mapping=None, default=None,
msgid_plural=None, default_plural=None, number=None):
"""Translate the source msgid using the given mapping.
See ITranslationService for details.
"""
class IMessageImportFilter(Interface):
"""The Import Filter for Translation Service Messages.
Classes implementing this interface should usually be Adaptors, as
they adapt the IEditableTranslationService interface."""
def importMessages(domains, languages, file):
"""Import all messages that are defined in the specified domains and
languages.
Note that some implementations might limit to only one domain and
one language. A good example for that is a GettextFile.
"""
class ILanguageAvailability(Interface):
def getAvailableLanguages():
"""Return a sequence of 3-tuples for available languages
Each 3-tuple should be of the form (language,country,variant) so as
to be suitable for passing to methods in
zope.i18n.interfaces.locales.ILocaleProvider.
"""
class IUserPreferredLanguages(Interface):
"""This interface provides language negotiation based on user preferences.
See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
def getPreferredLanguages():
"""Return a sequence of user preferred languages.
The sequence is sorted in order of quality, with the most preferred
languages first.
"""
class IModifiableUserPreferredLanguages(IUserPreferredLanguages):
def setPreferredLanguages(languages):
"""Set a sequence of user preferred languages.
The sequence should be sorted in order of quality, with the most
preferred languages first.
"""
class IMessageExportFilter(Interface):
"""The Export Filter for Translation Service Messages.
Classes implementing this interface should usually be Adaptors, as
they adapt the IEditableTranslationService interface."""
def exportMessages(domains, languages):
"""Export all messages that are defined in the specified domains and
languages.
Note that some implementations might limit to only one domain and
one language. A good example for that is a GettextFile.
"""
class INegotiator(Interface):
"""A language negotiation service.
"""
def getLanguage(langs, env):
"""Return the matching language to use.
The decision of which language to use is based on the list of
available languages, and the given user environment. An
IUserPreferredLanguages adapter for the environment is obtained and
the list of acceptable languages is retrieved from the environment.
If no match is found between the list of available languages and the
list of acceptable languages, None is returned.
Arguments:
langs -- sequence of languages (not necessarily ordered)
env -- environment passed to the service to determine a sequence
of user prefered languages
"""
# TODO: I'd like for there to be a symmetric interface method, one in
# which an adapter is gotten for both the first arg and the second
# arg. I.e. getLanguage(obj, env)
# But this isn't a good match for the ITranslationService.translate()
# method. :(
class IUserPreferredCharsets(Interface):
"""This interface provides charset negotiation based on user preferences.
"""
def getPreferredCharsets():
"""Return a sequence of user preferred charsets. Note that the order
should describe the order of preference. Therefore the first
character set in the list is the most preferred one.
"""
class IFormat(Interface):
"""A generic formatting class. It basically contains the parsing and
construction method for the particular object the formatting class
handles.
The constructor will always require a pattern (specific to the object).
"""
def setPattern(pattern):
"""Overwrite the old formatting pattern with the new one."""
def getPattern():
"""Get the currently used pattern."""
def parse(text, pattern=None):
"""Parse the text and convert it to an object, which is returned."""
def format(obj, pattern=None):
"""Format an object to a string using the pattern as a rule."""
class INumberFormat(IFormat):
r"""Specific number formatting interface. Here are the formatting
rules (I modified the rules from ICU a bit, since I think they did not
agree well with the real world XML formatting strings):
.. code-block:: none
posNegPattern := ({subpattern};{subpattern} | {subpattern})
subpattern := {padding}{prefix}{padding}{integer}{fraction}
{exponential}{padding}{suffix}{padding}
prefix := '\u0000'..'\uFFFD' - specialCharacters *
suffix := '\u0000'..'\uFFFD' - specialCharacters *
integer := {digitField}'0'
fraction := {decimalPoint}{digitField}
exponential := E integer
digitField := ( {digitField} {groupingSeparator} |
{digitField} '0'* |
'0'* |
{optionalDigitField} )
optionalDigitField := ( {digitField} {groupingSeparator} |
{digitField} '#'* |
'#'* )
groupingSeparator := ,
decimalPoint := .
padding := * '\u0000'..'\uFFFD'
Possible pattern symbols::
0 A digit. Always show this digit even if the value is zero.
# A digit, suppressed if zero
. Placeholder for decimal separator
, Placeholder for grouping separator
E Separates mantissa and exponent for exponential formats
; Separates formats (that is, a positive number format verses a
negative number format)
- Default negative prefix. Note that the locale's minus sign
character is used.
+ If this symbol is specified the locale's plus sign character is
used.
% Multiply by 100, as percentage
? Multiply by 1000, as per mille
\u00A4 This is the currency sign. it will be replaced by a currency
symbol. If it is present in a pattern, the monetary decimal
separator is used instead of the decimal separator.
\u00A4\u00A4 This is the international currency sign. It will be
replaced by an international currency symbol. If it is present
in a pattern, the monetary decimal separator is used instead of
the decimal separator.
X Any other characters can be used in the prefix or suffix
' Used to quote special characters in a prefix or suffix
"""
type = Field(
title="Type",
description=("The type into which a string is parsed. If ``None``, "
"then ``int`` will be used for whole numbers and "
"``float`` for decimals."),
default=None,
required=False)
symbols = Dict(
title="Number Symbols",
key_type=Choice(
title="Dictionary Class",
values=("decimal", "group", "list", "percentSign",
"nativeZeroDigit", "patternDigit", "plusSign",
"minusSign", "exponential", "perMille",
"infinity", "nan")),
value_type=TextLine(title="Symbol"))
class IDateTimeFormat(IFormat):
"""DateTime formatting and parsing interface. Here is a list of
possible characters and their meaning:
====== ===================== ================= =====================
Symbol Meaning Presentation Example
====== ===================== ================= =====================
G era designator (Text) AD
y year (Number) 1996
M month in year (Text and Number) July and 07
d day in month (Number) 10
h hour in am/pm (1-12) (Number) 12
H hour in day (0-23) (Number) 0
m minute in hour (Number) 30
s second in minute (Number) 55
S millisecond (Number) 978
E day in week (Text and Number) Tuesday
D day in year (Number) 189
F day of week in month (Number) 2 (2nd Wed in July)
w week in year (Number) 27
W week in month (Number) 2
a am/pm marker (Text) pm
k hour in day (1-24) (Number) 24
K hour in am/pm (0-11) (Number) 0
z time zone (Text) Pacific Standard Time
' escape for text
'' single quote '
====== ===================== ================= =====================
Meaning of the amount of characters:
Text
Four or more, use full form, <4, use short or abbreviated form if it
exists. (for example, "EEEE" produces "Monday", "EEE" produces "Mon")
Number
The minimum number of digits. Shorter numbers are zero-padded to this
amount (for example, if "m" produces "6", "mm" produces "06"). Year is
handled specially; that is, if the count of 'y' is 2, the Year will be
truncated to 2 digits. (for example, if "yyyy" produces "1997", "yy"
produces "97".)
Text and Number
Three or over, use text, otherwise use number. (for example, "M"
produces "1", "MM" produces "01", "MMM" produces "Jan", and "MMMM"
produces "January".) """
calendar = Attribute("""This object must implement ILocaleCalendar. See
this interface's documentation for details.""") | zope.i18n | /zope.i18n-5.1-py3-none-any.whl/zope/i18n/interfaces/__init__.py | __init__.py |
import os
from zope.interface import implementer
from zope.i18n.interfaces.locales import ILocaleProvider
class LoadLocaleError(Exception):
"""This error is raised if a locale cannot be loaded."""
@implementer(ILocaleProvider)
class LocaleProvider:
"""A locale provider that gets its data from the XML data."""
def __init__(self, locale_dir):
self._locales = {}
self._locale_dir = locale_dir
def _compute_filename(self, language, country, variant):
# Creating the filename
if language is None and country is None and variant is None:
filename = 'root.xml'
else:
filename = language
if country is not None:
filename += '_' + country
if variant is not None:
if '_' not in filename:
filename += '_'
filename += '_' + variant
filename += '.xml'
return filename
def loadLocale(self, language=None, country=None, variant=None):
"""See zope.i18n.interfaces.locales.ILocaleProvider"""
filename = self._compute_filename(language, country, variant)
# Making sure we have this locale
path = os.path.join(self._locale_dir, filename)
if not os.path.exists(path):
raise LoadLocaleError(
'The desired locale is not available.\nPath: %s' % path)
# Import here to avoid circular imports
from zope.i18n.locales.xmlfactory import LocaleFactory
# Let's get it!
locale = LocaleFactory(path)()
self._locales[(language, country, variant)] = locale
def getLocale(self, language=None, country=None, variant=None):
"""See zope.i18n.interfaces.locales.ILocaleProvider"""
# We want to be liberal in what we accept, but the standard is lower
# case language codes, upper case country codes, and upper case
# variants, so coerce case here.
if language:
language = language.lower()
if country:
country = country.upper()
if variant:
variant = variant.upper()
if (language, country, variant) not in self._locales:
self.loadLocale(language, country, variant)
return self._locales[(language, country, variant)] | zope.i18n | /zope.i18n-5.1-py3-none-any.whl/zope/i18n/locales/provider.py | provider.py |
__docformat__ = 'restructuredtext'
from zope.deprecation import deprecate
from zope.interface import implementer
from zope.i18n.interfaces.locales import IAttributeInheritance
from zope.i18n.interfaces.locales import IDictionaryInheritance
from zope.i18n.interfaces.locales import ILocaleInheritance
class NoParentException(AttributeError):
pass
@implementer(ILocaleInheritance)
class Inheritance:
"""A simple base version of locale inheritance.
This object contains some shared code amongst the various
'ILocaleInheritance' implementations.
"""
# See zope.i18n.interfaces.locales.ILocaleInheritance
__parent__ = None
# See zope.i18n.interfaces.locales.ILocaleInheritance
__name__ = None
def getInheritedSelf(self):
"""See zope.i18n.interfaces.locales.ILocaleInheritance"""
if self.__parent__ is None:
raise NoParentException('No parent was specified.')
parent = self.__parent__.getInheritedSelf()
if isinstance(parent, dict):
return parent[self.__name__]
return getattr(parent, self.__name__)
@implementer(IAttributeInheritance)
class AttributeInheritance(Inheritance):
r"""Implementation of locale inheritance for attributes.
Example::
>>> from zope.i18n.locales.tests.test_docstrings import \
... LocaleInheritanceStub
>>> root = LocaleInheritanceStub()
>>> root.data = 'value'
>>> root.attr = 'bar value'
>>> root.data2 = AttributeInheritance()
>>> root.data2.attr = 'value2'
>>> locale = LocaleInheritanceStub(root)
>>> locale.attr = 'foo value'
>>> locale.data2 = AttributeInheritance()
Here is an attribute lookup directly from the locale::
>>> locale.data
'value'
>>> locale.attr
'foo value'
... however, we can also have any amount of nesting::
>>> locale.data2.attr
'value2'
Once we have looked up a particular attribute, it should be cached,
i.e. exist in the dictionary of this inheritance object::
>>> 'attr' in locale.data2.__dict__
True
>>> locale.data2.__dict__['attr']
'value2'
Make sure that None can be assigned as value as well::
>>> locale.data2.attr = None
>>> locale.data2.attr is None
True
"""
def __setattr__(self, name, value):
"""See zope.i18n.interfaces.locales.ILocaleInheritance"""
# If we have a value that can also inherit data from other locales, we
# set its parent and name, so that we know how to get to it.
if (ILocaleInheritance.providedBy(value) and
not name.startswith('__')):
value.__parent__ = self
value.__name__ = name
super().__setattr__(name, value)
def __getattr__(self, name):
"""See zope.i18n.interfaces.locales.ILocaleInheritance"""
try:
selfUp = self.getInheritedSelf()
except NoParentException:
# There was simply no parent anymore, so let's raise an error
# for good
raise AttributeError("'%s' object (or any of its parents) has no "
"attribute '%s'" % (self.__class__.__name__,
name))
else:
value = getattr(selfUp, name)
# Since a locale hierarchy never changes after startup, we can
# cache the value locally, saving the time to ever look it up
# again.
# Note that we cannot use the normal setattr function, since
# __setattr__ of this class tries to assign a parent and name,
# which we do not want to override.
super().__setattr__(name, value)
return value
@implementer(IDictionaryInheritance)
class InheritingDictionary(Inheritance, dict):
"""Implementation of a dictionary that can also inherit values.
Example::
>>> from zope.i18n.locales.tests.test_docstrings import \\
... LocaleInheritanceStub
>>> root = LocaleInheritanceStub()
>>> root.data = InheritingDictionary({1: 'one', 2: 'two', 3: 'three'})
>>> root.data2 = AttributeInheritance()
>>> root.data2.dict = InheritingDictionary({1: 'i', 2: 'ii', 3: 'iii'})
>>> locale = LocaleInheritanceStub(root)
>>> locale.data = InheritingDictionary({1: 'eins'})
>>> locale.data2 = AttributeInheritance()
>>> locale.data2.dict = InheritingDictionary({1: 'I'})
Here is a dictionary lookup directly from the locale::
>>> locale.data[1]
'eins'
>>> locale.data[2]
'two'
... however, we can also have any amount of nesting::
>>> locale.data2.dict[1]
'I'
>>> locale.data2.dict[2]
'ii'
We also have to overwrite `get`, `keys` and `items` since we want
to make sure that all upper locales are consulted before returning the
default or to construct the list of elements, respectively::
>>> locale.data2.dict.get(2)
'ii'
>>> locale.data2.dict.get(4) is None
True
>>> sorted(locale.data.keys())
[1, 2, 3]
>>> sorted(locale.data.items())
[(1, 'eins'), (2, 'two'), (3, 'three')]
We also override `values`::
>>> sorted(locale.data.values())
['eins', 'three', 'two']
Historically, `value` was a synonym of this method; it is still
available, but is deprecated::
>>> import warnings
>>> with warnings.catch_warnings(record=True) as w:
... sorted(locale.data.value())
['eins', 'three', 'two']
>>> print(w[0].message)
`value` is a deprecated synonym for `values`
"""
def __setitem__(self, name, value):
"""See zope.i18n.interfaces.locales.ILocaleInheritance"""
if ILocaleInheritance.providedBy(value):
value.__parent__ = self
value.__name__ = name
super().__setitem__(name, value)
def __getitem__(self, name):
"""See zope.i18n.interfaces.locales.ILocaleInheritance"""
if name not in self:
try:
selfUp = self.getInheritedSelf()
except NoParentException:
pass
else:
return selfUp.__getitem__(name)
return super().__getitem__(name)
def get(self, name, default=None):
"""See zope.i18n.interfaces.locales.ILocaleInheritance"""
try:
return self[name]
except KeyError:
return default
def _make_reified_inherited_dict(self):
try:
d = dict(self.getInheritedSelf())
except NoParentException:
d = {}
d.update(self)
return d
def items(self):
return self._make_reified_inherited_dict().items()
def keys(self):
return list(self._make_reified_inherited_dict().keys())
def values(self):
return list(self._make_reified_inherited_dict().values())
value = deprecate("`value` is a deprecated synonym for `values`")(values) | zope.i18n | /zope.i18n-5.1-py3-none-any.whl/zope/i18n/locales/inheritance.py | inheritance.py |
=================
Locales XML Files
=================
The XML locale files were produced as part of the Unicode Common Locale
Data Repository (CLDR). They are provided here under the Unicode Terms of
Use (see http://unicode.org/copyright.html).
CLDR Web site
-------------
http://www.unicode.org/cldr/
Locale Data Markup Language
---------------------------
The XML files follow the now public and completed LDML format.
The DTD can be found at
http://www.unicode.org/cldr/dtd/1.1/ldml.dtd
The specification is at
http://www.unicode.org/reports/tr35/tr35-2.html
Download::
http://www.unicode.org/cldr/repository_access.html
| zope.i18n | /zope.i18n-5.1-py3-none-any.whl/zope/i18n/locales/README.txt | README.txt |
"""XML Locale-related objects and functions
"""
from datetime import date
from datetime import time
from xml.dom.minidom import parse as parseXML
from zope.i18n.locales import Locale
from zope.i18n.locales import LocaleCalendar
from zope.i18n.locales import LocaleCurrency
from zope.i18n.locales import LocaleDates
from zope.i18n.locales import LocaleDayContext
from zope.i18n.locales import LocaleDisplayNames
from zope.i18n.locales import LocaleFormat
from zope.i18n.locales import LocaleFormatLength
from zope.i18n.locales import LocaleIdentity
from zope.i18n.locales import LocaleMonthContext
from zope.i18n.locales import LocaleNumbers
from zope.i18n.locales import LocaleOrientation
from zope.i18n.locales import LocaleTimeZone
from zope.i18n.locales import LocaleVersion
from zope.i18n.locales import calendarAliases
from zope.i18n.locales import dayMapping
from zope.i18n.locales.inheritance import InheritingDictionary
class LocaleFactory:
"""This class creates a Locale object from an ICU XML file."""
def __init__(self, path):
"""Initialize factory."""
self._path = path
# Mainly for testing
if path:
self._data = parseXML(path).documentElement
def _getText(self, nodelist):
rc = ''
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
rc = rc + node.data
return rc
def _extractVersion(self, identity_node):
"""Extract the Locale's version info based on data from the DOM
tree.
Example::
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <identity>
... <version number="1.0">Some notes</version>
... <generation date="2003-12-19" />
... <language type="de" />
... <territory type="DE" />
... </identity>'''
>>> dom = parseString(xml)
>>> version = factory._extractVersion(dom.documentElement)
>>> version.number
'1.0'
>>> version.generationDate
datetime.date(2003, 12, 19)
>>> version.notes
'Some notes'
"""
number = generationDate = notes = None
# Retrieve the version number and notes of the locale
nodes = identity_node.getElementsByTagName('version')
if nodes:
number = nodes[0].getAttribute('number')
notes = self._getText(nodes[0].childNodes)
# Retrieve the generationDate of the locale
nodes = identity_node.getElementsByTagName('generation')
if nodes:
year, month, day = nodes[0].getAttribute('date').split('-')
generationDate = date(int(year), int(month), int(day))
return LocaleVersion(number, generationDate, notes)
def _extractIdentity(self):
"""Extract the Locale's identity object based on info from the DOM
tree.
Example::
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <ldml>
... <identity>
... <version number="1.0"/>
... <generation date="2003-12-19" />
... <language type="en" />
... <territory type="US" />
... <variant type="POSIX" />
... </identity>
... </ldml>'''
>>> factory = LocaleFactory(None)
>>> factory._data = parseString(xml).documentElement
>>> id = factory._extractIdentity()
>>> id.language
'en'
>>> id.script is None
True
>>> id.territory
'US'
>>> id.variant
'POSIX'
>>> id.version.number
'1.0'
"""
id = LocaleIdentity()
identity = self._data.getElementsByTagName('identity')[0]
# Retrieve the language of the locale
nodes = identity.getElementsByTagName('language')
if nodes != []:
id.language = nodes[0].getAttribute('type') or None
# Retrieve the territory of the locale
nodes = identity.getElementsByTagName('territory')
if nodes != []:
id.territory = nodes[0].getAttribute('type') or None
# Retrieve the varriant of the locale
nodes = identity.getElementsByTagName('variant')
if nodes != []:
id.variant = nodes[0].getAttribute('type') or None
id.version = self._extractVersion(identity)
return id
def _extractTypes(self, names_node):
"""Extract all types from the names_node.
Example::
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <displayNames>
... <types>
... <type type="Fallback" key="calendar"></type>
... <type type="buddhist" key="calendar">BUDDHIST</type>
... <type type="chinese" key="calendar">CHINESE</type>
... <type type="gregorian" key="calendar">GREGORIAN</type>
... <type type="stroke" key="collation">STROKE</type>
... <type type="traditional" key="collation">TRADITIONAL</type>
... </types>
... </displayNames>'''
>>> dom = parseString(xml)
>>> types = factory._extractTypes(dom.documentElement)
>>> keys = types.keys()
>>> keys.sort()
>>> keys[:2]
[('Fallback', 'calendar'), ('buddhist', 'calendar')]
>>> keys[2:4]
[('chinese', 'calendar'), ('gregorian', 'calendar')]
>>> keys[4:]
[('stroke', 'collation'), ('traditional', 'collation')]
>>> types[('chinese', 'calendar')]
'CHINESE'
>>> types[('stroke', 'collation')]
'STROKE'
"""
# 'types' node has not to exist
types_nodes = names_node.getElementsByTagName('types')
if types_nodes == []:
return
# Retrieve all types
types = InheritingDictionary()
for type_node in types_nodes[0].getElementsByTagName('type'):
type = type_node.getAttribute('type')
key = type_node.getAttribute('key')
types[(type, key)] = self._getText(type_node.childNodes)
return types
def _extractDisplayNames(self):
"""Extract all display names from the DOM tree.
Example::
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <ldml>
... <localeDisplayNames>
... <languages>
... <language type="Fallback"></language>
... <language type="aa">aa</language>
... <language type="ab">ab</language>
... </languages>
... <scripts>
... <script type="Arab">Arab</script>
... <script type="Armn">Armn</script>
... </scripts>
... <territories>
... <territory type="AD">AD</territory>
... <territory type="AE">AE</territory>
... </territories>
... <variants>
... <variant type="Fallback"></variant>
... <variant type="POSIX">POSIX</variant>
... </variants>
... <keys>
... <key type="calendar">CALENDAR</key>
... <key type="collation">COLLATION</key>
... </keys>
... <types>
... <type type="buddhist" key="calendar">BUDDHIST</type>
... <type type="stroke" key="collation">STROKE</type>
... </types>
... </localeDisplayNames>
... </ldml>'''
>>> factory = LocaleFactory(None)
>>> factory._data = parseString(xml).documentElement
>>> names = factory._extractDisplayNames()
>>> keys = names.languages.keys()
>>> keys.sort()
>>> keys
['Fallback', 'aa', 'ab']
>>> names.languages["aa"]
'aa'
>>> keys = names.scripts.keys()
>>> keys.sort()
>>> keys
['Arab', 'Armn']
>>> names.scripts["Arab"]
'Arab'
>>> keys = names.territories.keys()
>>> keys.sort()
>>> keys
['AD', 'AE']
>>> names.territories["AD"]
'AD'
>>> keys = names.variants.keys()
>>> keys.sort()
>>> keys
['Fallback', 'POSIX']
>>> names.variants["Fallback"]
''
>>> keys = names.keys.keys()
>>> keys.sort()
>>> keys
['calendar', 'collation']
>>> names.keys["calendar"]
'CALENDAR'
>>> names.types[("stroke", "collation")]
'STROKE'
"""
displayNames = LocaleDisplayNames()
# Neither the 'localeDisplayNames' or 'scripts' node has to exist
names_nodes = self._data.getElementsByTagName('localeDisplayNames')
if names_nodes == []:
return displayNames
for group_tag, single_tag in (('languages', 'language'),
('scripts', 'script'),
('territories', 'territory'),
('variants', 'variant'),
('keys', 'key')):
group_nodes = names_nodes[0].getElementsByTagName(group_tag)
if group_nodes == []:
continue
# Retrieve all children
elements = InheritingDictionary()
for element in group_nodes[0].getElementsByTagName(single_tag):
type = element.getAttribute('type')
elements[type] = self._getText(element.childNodes)
setattr(displayNames, group_tag, elements)
types = self._extractTypes(names_nodes[0])
if types is not None:
displayNames.types = types
return displayNames
def _extractMonths(self, months_node, calendar):
"""Extract all month entries from cal_node and store them in calendar.
Example::
>>> class CalendarStub(object):
... months = None
>>> calendar = CalendarStub()
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <months>
... <default type="format" />
... <monthContext type="format">
... <default type="wide" />
... <monthWidth type="wide">
... <month type="1">Januar</month>
... <month type="2">Februar</month>
... <month type="3">Maerz</month>
... <month type="4">April</month>
... <month type="5">Mai</month>
... <month type="6">Juni</month>
... <month type="7">Juli</month>
... <month type="8">August</month>
... <month type="9">September</month>
... <month type="10">Oktober</month>
... <month type="11">November</month>
... <month type="12">Dezember</month>
... </monthWidth>
... <monthWidth type="abbreviated">
... <month type="1">Jan</month>
... <month type="2">Feb</month>
... <month type="3">Mrz</month>
... <month type="4">Apr</month>
... <month type="5">Mai</month>
... <month type="6">Jun</month>
... <month type="7">Jul</month>
... <month type="8">Aug</month>
... <month type="9">Sep</month>
... <month type="10">Okt</month>
... <month type="11">Nov</month>
... <month type="12">Dez</month>
... </monthWidth>
... </monthContext>
... </months>'''
>>> dom = parseString(xml)
>>> factory._extractMonths(dom.documentElement, calendar)
The contexts and widths were introduced in CLDR 1.1, the way
of getting month names is like this::
>>> calendar.defaultMonthContext
'format'
>>> ctx = calendar.monthContexts["format"]
>>> ctx.defaultWidth
'wide'
>>> names = [ctx.months["wide"][type] for type in range(1,13)]
>>> names[:7]
['Januar', 'Februar', 'Maerz', 'April', 'Mai', 'Juni', 'Juli']
>>> names[7:]
['August', 'September', 'Oktober', 'November', 'Dezember']
>>> abbrs = [ctx.months["abbreviated"][type]
... for type in range(1,13)]
>>> abbrs[:6]
['Jan', 'Feb', 'Mrz', 'Apr', 'Mai', 'Jun']
>>> abbrs[6:]
['Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez']
The old, CLDR 1.0 way of getting month names and abbreviations::
>>> names = [calendar.months.get(type, (None, None))[0]
... for type in range(1, 13)]
>>> names[:7]
['Januar', 'Februar', 'Maerz', 'April', 'Mai', 'Juni', 'Juli']
>>> names[7:]
['August', 'September', 'Oktober', 'November', 'Dezember']
>>> abbrs = [calendar.months.get(type, (None, None))[1]
... for type in range(1, 13)]
>>> abbrs[:6]
['Jan', 'Feb', 'Mrz', 'Apr', 'Mai', 'Jun']
>>> abbrs[6:]
['Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez']
If there are no months, nothing happens:
>>> calendar = CalendarStub()
>>> factory = LocaleFactory(None)
>>> xml = '''<months><default type="format" /></months>'''
>>> dom = parseString(xml)
>>> factory._extractMonths(dom.documentElement, calendar)
>>> calendar.months
"""
defaultMonthContext_node = months_node.getElementsByTagName('default')
if defaultMonthContext_node:
calendar.defaultMonthContext = defaultMonthContext_node[
0].getAttribute('type')
monthContext_nodes = months_node.getElementsByTagName('monthContext')
if not monthContext_nodes:
return
calendar.monthContexts = InheritingDictionary()
names_node = abbrs_node = None # BBB
for node in monthContext_nodes:
context_type = node.getAttribute('type')
mctx = LocaleMonthContext(context_type)
calendar.monthContexts[context_type] = mctx
defaultWidth_node = node.getElementsByTagName('default')
if defaultWidth_node:
mctx.defaultWidth = defaultWidth_node[0].getAttribute('type')
widths = InheritingDictionary()
mctx.months = widths
for width_node in node.getElementsByTagName('monthWidth'):
width_type = width_node.getAttribute('type')
width = InheritingDictionary()
widths[width_type] = width
for month_node in width_node.getElementsByTagName('month'):
mtype = int(month_node.getAttribute('type'))
width[mtype] = self._getText(month_node.childNodes)
if context_type == 'format':
if width_type == 'abbreviated':
abbrs_node = width_node
elif width_type == 'wide':
names_node = width_node
if not (names_node and abbrs_node):
return
# Get all month names
names = {}
for name_node in names_node.getElementsByTagName('month'):
type = int(name_node.getAttribute('type'))
names[type] = self._getText(name_node.childNodes)
# Get all month abbrs
abbrs = {}
for abbr_node in abbrs_node.getElementsByTagName('month'):
type = int(abbr_node.getAttribute('type'))
abbrs[type] = self._getText(abbr_node.childNodes)
# Put the info together
calendar.months = InheritingDictionary()
for type in range(1, 13):
calendar.months[type] = (names.get(type, None),
abbrs.get(type, None))
def _extractDays(self, days_node, calendar):
"""Extract all day entries from cal_node and store them in
calendar.
Example::
>>> class CalendarStub(object):
... days = None
>>> calendar = CalendarStub()
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <days>
... <default type="format" />
... <dayContext type="format">
... <default type="wide" />
... <dayWidth type="wide">
... <day type="sun">Sonntag</day>
... <day type="mon">Montag</day>
... <day type="tue">Dienstag</day>
... <day type="wed">Mittwoch</day>
... <day type="thu">Donnerstag</day>
... <day type="fri">Freitag</day>
... <day type="sat">Samstag</day>
... </dayWidth>
... <dayWidth type="abbreviated">
... <day type="sun">So</day>
... <day type="mon">Mo</day>
... <day type="tue">Di</day>
... <day type="wed">Mi</day>
... <day type="thu">Do</day>
... <day type="fri">Fr</day>
... <day type="sat">Sa</day>
... </dayWidth>
... </dayContext>
... </days>'''
>>> dom = parseString(xml)
>>> factory._extractDays(dom.documentElement, calendar)
Day contexts and widths were introduced in CLDR 1.1, here's
how to use them::
>>> calendar.defaultDayContext
'format'
>>> ctx = calendar.dayContexts["format"]
>>> ctx.defaultWidth
'wide'
>>> names = [ctx.days["wide"][type] for type in range(1,8)]
>>> names[:4]
['Montag', 'Dienstag', 'Mittwoch', 'Donnerstag']
>>> names[4:]
['Freitag', 'Samstag', 'Sonntag']
>>> abbrs = [ctx.days["abbreviated"][type] for type in range(1,8)]
>>> abbrs
['Mo', 'Di', 'Mi', 'Do', 'Fr', 'Sa', 'So']
And here's the old CLDR 1.0 way of getting day names and
abbreviations::
>>> names = [calendar.days.get(type, (None, None))[0]
... for type in range(1, 8)]
>>> names[:4]
['Montag', 'Dienstag', 'Mittwoch', 'Donnerstag']
>>> names[4:]
['Freitag', 'Samstag', 'Sonntag']
>>> abbrs = [calendar.days.get(type, (None, None))[1]
... for type in range(1, 8)]
>>> abbrs
['Mo', 'Di', 'Mi', 'Do', 'Fr', 'Sa', 'So']
If there are no days, nothing happens:
>>> calendar = CalendarStub()
>>> factory = LocaleFactory(None)
>>> xml = '''<days><default type="format" /></days>'''
>>> dom = parseString(xml)
>>> factory._extractDays(dom.documentElement, calendar)
>>> calendar.days
"""
defaultDayContext_node = days_node.getElementsByTagName('default')
if defaultDayContext_node:
calendar.defaultDayContext = defaultDayContext_node[
0].getAttribute('type')
dayContext_nodes = days_node.getElementsByTagName('dayContext')
if not dayContext_nodes:
return
calendar.dayContexts = InheritingDictionary()
names_node = abbrs_node = None # BBB
for node in dayContext_nodes:
context_type = node.getAttribute('type')
dctx = LocaleDayContext(context_type)
calendar.dayContexts[context_type] = dctx
defaultWidth_node = node.getElementsByTagName('default')
if defaultWidth_node:
dctx.defaultWidth = defaultWidth_node[0].getAttribute('type')
widths = InheritingDictionary()
dctx.days = widths
for width_node in node.getElementsByTagName('dayWidth'):
width_type = width_node.getAttribute('type')
width = InheritingDictionary()
widths[width_type] = width
for day_node in width_node.getElementsByTagName('day'):
dtype = dayMapping[day_node.getAttribute('type')]
width[dtype] = self._getText(day_node.childNodes)
if context_type == 'format':
if width_type == 'abbreviated':
abbrs_node = width_node
elif width_type == 'wide':
names_node = width_node
if not (names_node and abbrs_node):
return
# Get all weekday names
names = {}
for name_node in names_node.getElementsByTagName('day'):
type = dayMapping[name_node.getAttribute('type')]
names[type] = self._getText(name_node.childNodes)
# Get all weekday abbreviations
abbrs = {}
for abbr_node in abbrs_node.getElementsByTagName('day'):
type = dayMapping[abbr_node.getAttribute('type')]
abbrs[type] = self._getText(abbr_node.childNodes)
# Put the info together
calendar.days = InheritingDictionary()
for type in range(1, 13):
calendar.days[type] = (names.get(type, None),
abbrs.get(type, None))
def _extractWeek(self, cal_node, calendar):
"""Extract all week entries from cal_node and store them in
calendar.
Example::
>>> class CalendarStub(object):
... week = None
>>> calendar = CalendarStub()
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <calendar type="gregorian">
... <week>
... <minDays count="1"/>
... <firstDay day="sun"/>
... <weekendStart day="fri" time="18:00"/>
... <weekendEnd day="sun" time="18:00"/>
... </week>
... </calendar>'''
>>> dom = parseString(xml)
>>> factory._extractWeek(dom.documentElement, calendar)
>>> calendar.week['minDays']
1
>>> calendar.week['firstDay']
7
>>> calendar.week['weekendStart']
(5, datetime.time(18, 0))
>>> calendar.week['weekendEnd']
(7, datetime.time(18, 0))
"""
# See whether we have week entries
week_nodes = cal_node.getElementsByTagName('week')
if not week_nodes:
return
calendar.week = InheritingDictionary()
# Get the 'minDays' value if available
for node in week_nodes[0].getElementsByTagName('minDays'):
calendar.week['minDays'] = int(node.getAttribute('count'))
# Get the 'firstDay' value if available
for node in week_nodes[0].getElementsByTagName('firstDay'):
calendar.week['firstDay'] = dayMapping[node.getAttribute('day')]
# Get the 'weekendStart' value if available
for node in week_nodes[0].getElementsByTagName('weekendStart'):
day = dayMapping[node.getAttribute('day')]
time_args = map(int, node.getAttribute('time').split(':'))
calendar.week['weekendStart'] = (day, time(*time_args))
# Get the 'weekendEnd' value if available
for node in week_nodes[0].getElementsByTagName('weekendEnd'):
day = dayMapping[node.getAttribute('day')]
time_args = map(int, node.getAttribute('time').split(':'))
calendar.week['weekendEnd'] = (day, time(*time_args))
def _extractEras(self, cal_node, calendar):
"""Extract all era entries from cal_node and store them in
calendar.
Example::
>>> class CalendarStub(object):
... days = None
>>> calendar = CalendarStub()
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <calendar type="gregorian">
... <eras>
... <eraAbbr>
... <era type="0">BC</era>
... <era type="1">AD</era>
... </eraAbbr>
... <eraName>
... <era type="0">Before Christ</era>
... </eraName>
... </eras>
... </calendar>'''
>>> dom = parseString(xml)
>>> factory._extractEras(dom.documentElement, calendar)
>>> names = [calendar.eras.get(type, (None, None))[0]
... for type in range(2)]
>>> names
['Before Christ', None]
>>> abbrs = [calendar.eras.get(type, (None, None))[1]
... for type in range(2)]
>>> abbrs
['BC', 'AD']
"""
# See whether we have era names and abbreviations
eras_nodes = cal_node.getElementsByTagName('eras')
if not eras_nodes:
return
names_nodes = eras_nodes[0].getElementsByTagName('eraName')
abbrs_nodes = eras_nodes[0].getElementsByTagName('eraAbbr')
# Get all era names
names = {}
if names_nodes:
for name_node in names_nodes[0].getElementsByTagName('era'):
type = int(name_node.getAttribute('type'))
names[type] = self._getText(name_node.childNodes)
# Get all era abbreviations
abbrs = {}
if abbrs_nodes:
for abbr_node in abbrs_nodes[0].getElementsByTagName('era'):
type = int(abbr_node.getAttribute('type'))
abbrs[type] = self._getText(abbr_node.childNodes)
calendar.eras = InheritingDictionary()
for type in abbrs.keys():
calendar.eras[type] = (names.get(type, None),
abbrs.get(type, None))
def _extractFormats(self, formats_node, lengthNodeName, formatNodeName):
"""Extract all format entries from formats_node and return a
tuple of the form (defaultFormatType, [LocaleFormatLength, ...]).
Example::
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <dateFormats>
... <default type="medium"/>
... <dateFormatLength type="full">
... <dateFormat>
... <pattern>EEEE, MMMM d, yyyy</pattern>
... </dateFormat>
... </dateFormatLength>
... <dateFormatLength type="medium">
... <default type="DateFormatsKey2"/>
... <dateFormat type="DateFormatsKey2">
... <displayName>Standard Date</displayName>
... <pattern>MMM d, yyyy</pattern>
... </dateFormat>
... <dateFormat type="DateFormatsKey3">
... <pattern>MMM dd, yyyy</pattern>
... </dateFormat>
... </dateFormatLength>
... </dateFormats>'''
>>> dom = parseString(xml)
>>> default, lengths = factory._extractFormats(
... dom.documentElement, 'dateFormatLength', 'dateFormat')
>>> default
'medium'
>>> lengths["full"].formats[None].pattern
'EEEE, MMMM d, yyyy'
>>> lengths["medium"].default
'DateFormatsKey2'
>>> lengths["medium"].formats['DateFormatsKey3'].pattern
'MMM dd, yyyy'
>>> lengths["medium"].formats['DateFormatsKey2'].displayName
'Standard Date'
"""
formats_default = None
default_nodes = formats_node.getElementsByTagName('default')
if default_nodes:
formats_default = default_nodes[0].getAttribute('type')
lengths = InheritingDictionary()
for length_node in formats_node.getElementsByTagName(lengthNodeName):
type = length_node.getAttribute('type') or None
length = LocaleFormatLength(type)
default_nodes = length_node.getElementsByTagName('default')
if default_nodes:
length.default = default_nodes[0].getAttribute('type')
if length_node.getElementsByTagName(formatNodeName):
length.formats = InheritingDictionary()
for format_node in length_node.getElementsByTagName(
formatNodeName):
format = LocaleFormat()
format.type = format_node.getAttribute('type') or None
pattern_node = format_node.getElementsByTagName('pattern')[0]
format.pattern = self._getText(pattern_node.childNodes)
name_nodes = format_node.getElementsByTagName('displayName')
if name_nodes:
format.displayName = self._getText(
name_nodes[0].childNodes)
length.formats[format.type] = format
lengths[length.type] = length
return (formats_default, lengths)
def _extractCalendars(self, dates_node):
"""Extract all calendars and their specific information from the
Locale's DOM tree.
Example::
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <dates>
... <calendars>
... <calendar type="gregorian">
... <monthNames>
... <month type="1">January</month>
... <month type="12">December</month>
... </monthNames>
... <monthAbbr>
... <month type="1">Jan</month>
... <month type="12">Dec</month>
... </monthAbbr>
... <dayNames>
... <day type="sun">Sunday</day>
... <day type="sat">Saturday</day>
... </dayNames>
... <dayAbbr>
... <day type="sun">Sun</day>
... <day type="sat">Sat</day>
... </dayAbbr>
... <week>
... <minDays count="1"/>
... <firstDay day="sun"/>
... </week>
... <am>AM</am>
... <pm>PM</pm>
... <eras>
... <eraAbbr>
... <era type="0">BC</era>
... <era type="1">AD</era>
... </eraAbbr>
... </eras>
... <dateFormats>
... <dateFormatLength type="full">
... <dateFormat>
... <pattern>EEEE, MMMM d, yyyy</pattern>
... </dateFormat>
... </dateFormatLength>
... </dateFormats>
... <timeFormats>
... <default type="medium"/>
... <timeFormatLength type="medium">
... <timeFormat>
... <pattern>h:mm:ss a</pattern>
... </timeFormat>
... </timeFormatLength>
... </timeFormats>
... <dateTimeFormats>
... <dateTimeFormatLength>
... <dateTimeFormat>
... <pattern>{0} {1}</pattern>
... </dateTimeFormat>
... </dateTimeFormatLength>
... </dateTimeFormats>
... </calendar>
... <calendar type="buddhist">
... <eras>
... <era type="0">BE</era>
... </eras>
... </calendar>
... </calendars>
... </dates>'''
>>> dom = parseString(xml)
>>> calendars = factory._extractCalendars(dom.documentElement)
>>> keys = calendars.keys()
>>> keys.sort()
>>> keys
['buddhist', 'gregorian', 'thai-buddhist']
Note that "thai-buddhist" are added as an alias to "buddhist".
>>> calendars['buddhist'] is calendars['thai-buddhist']
True
If there are no calendars, nothing happens:
>>> xml = '''<dates />'''
>>> dom = parseString(xml)
>>> factory._extractCalendars(dom.documentElement)
"""
cals_nodes = dates_node.getElementsByTagName('calendars')
# no calendar node
if cals_nodes == []:
return None
calendars = InheritingDictionary()
for cal_node in cals_nodes[0].getElementsByTagName('calendar'):
# get the calendar type
type = cal_node.getAttribute('type')
calendar = LocaleCalendar(type)
# get month names and abbreviations
months_nodes = cal_node.getElementsByTagName('months')
if months_nodes:
self._extractMonths(months_nodes[0], calendar)
# get weekday names and abbreviations
days_nodes = cal_node.getElementsByTagName('days')
if days_nodes:
self._extractDays(days_nodes[0], calendar)
# get week information
self._extractWeek(cal_node, calendar)
# get am/pm designation values
nodes = cal_node.getElementsByTagName('am')
if nodes:
calendar.am = self._getText(nodes[0].childNodes)
nodes = cal_node.getElementsByTagName('pm')
if nodes:
calendar.pm = self._getText(nodes[0].childNodes)
# get era names and abbreviations
self._extractEras(cal_node, calendar)
for formatsName, lengthName, formatName in (
('dateFormats', 'dateFormatLength', 'dateFormat'),
('timeFormats', 'timeFormatLength', 'timeFormat'),
('dateTimeFormats', 'dateTimeFormatLength',
'dateTimeFormat')):
formats_nodes = cal_node.getElementsByTagName(formatsName)
if formats_nodes:
default, formats = self._extractFormats(
formats_nodes[0], lengthName, formatName)
setattr(calendar,
'default' + formatName[0].upper() + formatName[1:],
default)
setattr(calendar, formatsName, formats)
calendars[calendar.type] = calendar
if calendar.type in calendarAliases:
for alias in calendarAliases[calendar.type]:
calendars[alias] = calendar
return calendars
def _extractTimeZones(self, dates_node):
"""Extract all timezone information for the locale from the DOM
tree.
Example::
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <dates>
... <timeZoneNames>
... <zone type="America/Los_Angeles" >
... <long>
... <generic>Pacific Time</generic>
... <standard>Pacific Standard Time</standard>
... <daylight>Pacific Daylight Time</daylight>
... </long>
... <short>
... <generic>PT</generic>
... <standard>PST</standard>
... <daylight>PDT</daylight>
... </short>
... <exemplarCity>San Francisco</exemplarCity>
... </zone>
... <zone type="Europe/London">
... <long>
... <generic>British Time</generic>
... <standard>British Standard Time</standard>
... <daylight>British Daylight Time</daylight>
... </long>
... <exemplarCity>York</exemplarCity>
... </zone>
... </timeZoneNames>
... </dates>'''
>>> dom = parseString(xml)
>>> zones = factory._extractTimeZones(dom.documentElement)
>>> keys = zones.keys()
>>> keys.sort()
>>> keys
['America/Los_Angeles', 'Europe/London']
>>> zones["Europe/London"].names["generic"]
('British Time', None)
>>> zones["Europe/London"].cities
['York']
>>> zones["America/Los_Angeles"].names["generic"]
('Pacific Time', 'PT')
"""
tz_names = dates_node.getElementsByTagName('timeZoneNames')
if not tz_names:
return
zones = InheritingDictionary()
for node in tz_names[0].getElementsByTagName('zone'):
type = node.getAttribute('type')
zone = LocaleTimeZone(type)
# get the short and long name node
long = node.getElementsByTagName('long')
short = node.getElementsByTagName('short')
for type in ("generic", "standard", "daylight"):
# get long name
long_desc = None
if long:
long_nodes = long[0].getElementsByTagName(type)
if long_nodes:
long_desc = self._getText(long_nodes[0].childNodes)
# get short name
short_desc = None
if short:
short_nodes = short[0].getElementsByTagName(type)
if short_nodes:
short_desc = self._getText(short_nodes[0].childNodes)
if long_desc is not None or short_desc is not None:
zone.names[type] = (long_desc, short_desc)
for city in node.getElementsByTagName('exemplarCity'):
zone.cities.append(self._getText(city.childNodes))
zones[zone.type] = zone
return zones
def _extractDates(self):
"""Extract all date information from the DOM tree"""
dates_nodes = self._data.getElementsByTagName('dates')
if dates_nodes == []:
return
dates = LocaleDates()
calendars = self._extractCalendars(dates_nodes[0])
if calendars is not None:
dates.calendars = calendars
timezones = self._extractTimeZones(dates_nodes[0])
if timezones is not None:
dates.timezones = timezones
return dates
def _extractSymbols(self, numbers_node):
"""Extract all week entries from cal_node and store them in
calendar.
Example::
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <numbers>
... <symbols>
... <decimal>.</decimal>
... <group>,</group>
... <list>;</list>
... <percentSign>%</percentSign>
... <nativeZeroDigit>0</nativeZeroDigit>
... <patternDigit>#</patternDigit>
... <plusSign>+</plusSign>
... <minusSign>-</minusSign>
... <exponential>E</exponential>
... <perMille>o/oo</perMille>
... <infinity>oo</infinity>
... <nan>NaN</nan>
... </symbols>
... </numbers>'''
>>> dom = parseString(xml)
>>> symbols = factory._extractSymbols(dom.documentElement)
>>> symbols['list']
';'
>>> keys = symbols.keys()
>>> keys.sort()
>>> keys[:5]
['decimal', 'exponential', 'group', 'infinity', 'list']
>>> keys[5:9]
['minusSign', 'nan', 'nativeZeroDigit', 'patternDigit']
>>> keys[9:]
['perMille', 'percentSign', 'plusSign']
"""
# See whether we have symbols entries
symbols_nodes = numbers_node.getElementsByTagName('symbols')
if not symbols_nodes:
return
symbols = InheritingDictionary()
for name in ("decimal", "group", "list", "percentSign",
"nativeZeroDigit", "patternDigit", "plusSign",
"minusSign", "exponential", "perMille",
"infinity", "nan"):
nodes = symbols_nodes[0].getElementsByTagName(name)
if nodes:
symbols[name] = self._getText(nodes[0].childNodes)
return symbols
def _extractNumberFormats(self, numbers_node, numbers):
"""Extract all number formats from the numbers_node and save the data
in numbers.
Example::
>>> class Numbers(object):
... defaultDecimalFormat = None
... decimalFormats = None
... defaultScientificFormat = None
... scientificFormats = None
... defaultPercentFormat = None
... percentFormats = None
... defaultCurrencyFormat = None
... currencyFormats = None
>>> numbers = Numbers()
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <numbers>
... <decimalFormats>
... <decimalFormatLength type="long">
... <decimalFormat>
... <pattern>#,##0.###</pattern>
... </decimalFormat>
... </decimalFormatLength>
... </decimalFormats>
... <scientificFormats>
... <default type="long"/>
... <scientificFormatLength type="long">
... <scientificFormat>
... <pattern>0.000###E+00</pattern>
... </scientificFormat>
... </scientificFormatLength>
... <scientificFormatLength type="medium">
... <scientificFormat>
... <pattern>0.00##E+00</pattern>
... </scientificFormat>
... </scientificFormatLength>
... </scientificFormats>
... <percentFormats>
... <percentFormatLength type="long">
... <percentFormat>
... <pattern>#,##0%</pattern>
... </percentFormat>
... </percentFormatLength>
... </percentFormats>
... <currencyFormats>
... <currencyFormatLength type="long">
... <currencyFormat>
... <pattern>$ #,##0.00;($ #,##0.00)</pattern>
... </currencyFormat>
... </currencyFormatLength>
... </currencyFormats>
... </numbers>'''
>>> dom = parseString(xml)
>>> factory._extractNumberFormats(dom.documentElement, numbers)
>>> numbers.decimalFormats["long"].formats[None].pattern
'#,##0.###'
>>> numbers.defaultScientificFormat
'long'
>>> numbers.scientificFormats["long"].formats[None].pattern
'0.000###E+00'
>>> numbers.scientificFormats["medium"].formats[None].pattern
'0.00##E+00'
>>> numbers.percentFormats["long"].formats[None].pattern
'#,##0%'
>>> numbers.percentFormats.get("medium", None) is None
True
>>> numbers.currencyFormats["long"].formats[None].pattern
'$ #,##0.00;($ #,##0.00)'
>>> numbers.currencyFormats.get("medium", None) is None
True
"""
for category in ('decimal', 'scientific', 'percent', 'currency'):
formatsName = category + 'Formats'
lengthName = category + 'FormatLength'
formatName = category + 'Format'
defaultName = 'default' + formatName[0].upper() + formatName[1:]
formats_nodes = numbers_node.getElementsByTagName(formatsName)
if formats_nodes:
default, formats = self._extractFormats(
formats_nodes[0], lengthName, formatName)
setattr(numbers, defaultName, default)
setattr(numbers, formatsName, formats)
def _extractCurrencies(self, numbers_node):
"""Extract all currency definitions and their information from the
Locale's DOM tree.
Example::
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <numbers>
... <currencies>
... <currency type="USD">
... <displayName>Dollar</displayName>
... <symbol>$</symbol>
... </currency>
... <currency type ="JPY">
... <displayName>Yen</displayName>
... <symbol>Y</symbol>
... </currency>
... <currency type ="INR">
... <displayName>Rupee</displayName>
... <symbol choice="true">0<=Rf|1<=Ru|1<Rf</symbol>
... </currency>
... <currency type="PTE">
... <displayName>Escudo</displayName>
... <symbol>$</symbol>
... </currency>
... </currencies>
... </numbers>'''
>>> dom = parseString(xml)
>>> currencies = factory._extractCurrencies(dom.documentElement)
>>> keys = currencies.keys()
>>> keys.sort()
>>> keys
['INR', 'JPY', 'PTE', 'USD']
>>> currencies['USD'].symbol
'$'
>>> currencies['USD'].displayName
'Dollar'
>>> currencies['USD'].symbolChoice
False
"""
currs_nodes = numbers_node.getElementsByTagName('currencies')
if not currs_nodes:
return
currencies = InheritingDictionary()
for curr_node in currs_nodes[0].getElementsByTagName('currency'):
type = curr_node.getAttribute('type')
currency = LocaleCurrency(type)
nodes = curr_node.getElementsByTagName('symbol')
if nodes:
currency.symbol = self._getText(nodes[0].childNodes)
currency.symbolChoice = \
nodes[0].getAttribute('choice') == "true"
nodes = curr_node.getElementsByTagName('displayName')
if nodes:
currency.displayName = self._getText(nodes[0].childNodes)
currencies[type] = currency
return currencies
def _extractNumbers(self):
"""Extract all number information from the DOM tree"""
numbers_nodes = self._data.getElementsByTagName('numbers')
if not numbers_nodes:
return
numbers = LocaleNumbers()
symbols = self._extractSymbols(numbers_nodes[0])
if symbols is not None:
numbers.symbols = symbols
self._extractNumberFormats(numbers_nodes[0], numbers)
currencies = self._extractCurrencies(numbers_nodes[0])
if currencies is not None:
numbers.currencies = currencies
return numbers
def _extractDelimiters(self):
"""Extract all delimiter entries from the DOM tree.
Example::
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <ldml>
... <delimiters>
... <quotationStart>``</quotationStart>
... <quotationEnd>''</quotationEnd>
... <alternateQuotationStart>`</alternateQuotationStart>
... <alternateQuotationEnd>'</alternateQuotationEnd>
... </delimiters>
... <identity>
... <version number="1.0"/>
... <generation date="2003-12-19" />
... <language type="en" />
... <territory type="US" />
... <variant type="POSIX" />
... </identity>
... </ldml>'''
>>> dom = parseString(xml)
>>> factory._data = parseString(xml).documentElement
>>> delimiters = factory._extractDelimiters()
>>> delimiters["quotationStart"]
'``'
>>> delimiters["quotationEnd"]
"''"
>>> delimiters["alternateQuotationStart"]
'`'
>>> delimiters["alternateQuotationEnd"]
"'"
Escape: "'"
>>> factory().delimiters == delimiters
True
"""
# See whether we have symbols entries
delimiters_nodes = self._data.getElementsByTagName('delimiters')
if not delimiters_nodes:
return
delimiters = InheritingDictionary()
for name in ('quotationStart', "quotationEnd",
"alternateQuotationStart", "alternateQuotationEnd"):
nodes = delimiters_nodes[0].getElementsByTagName(name)
if nodes:
delimiters[name] = self._getText(nodes[0].childNodes)
return delimiters
def _extractOrientation(self):
"""Extract orientation information.
>>> factory = LocaleFactory(None)
>>> from xml.dom.minidom import parseString
>>> xml = '''
... <ldml>
... <layout>
... <orientation lines="bottom-to-top"
... characters="right-to-left" />
... </layout>
... </ldml>'''
>>> dom = parseString(xml)
>>> factory._data = parseString(xml).documentElement
>>> orientation = factory._extractOrientation()
>>> orientation.lines
'bottom-to-top'
>>> orientation.characters
'right-to-left'
"""
orientation_nodes = self._data.getElementsByTagName('orientation')
if not orientation_nodes:
return
orientation = LocaleOrientation()
for name in ("characters", "lines"):
value = orientation_nodes[0].getAttribute(name)
if value:
setattr(orientation, name, value)
return orientation
def __call__(self):
"""Create the Locale."""
locale = Locale(self._extractIdentity())
names = self._extractDisplayNames()
if names is not None:
locale.displayNames = names
dates = self._extractDates()
if dates is not None:
locale.dates = dates
numbers = self._extractNumbers()
if numbers is not None:
locale.numbers = numbers
delimiters = self._extractDelimiters()
if delimiters is not None:
locale.delimiters = delimiters
orientation = self._extractOrientation()
if orientation is not None:
locale.orientation = orientation
# Unmapped:
#
# - <characters>
# - <measurement>
# - <collations>, <collation>
return locale | zope.i18n | /zope.i18n-5.1-py3-none-any.whl/zope/i18n/locales/xmlfactory.py | xmlfactory.py |
"""Locale and LocaleProvider Implementation.
"""
__docformat__ = 'restructuredtext'
import os
from datetime import date
from zope.interface import implementer
# Setup the locale directory
from zope import i18n
from zope.i18n.format import DateTimeFormat
from zope.i18n.format import NumberFormat
from zope.i18n.interfaces.locales import ILocale
from zope.i18n.interfaces.locales import ILocaleCalendar
from zope.i18n.interfaces.locales import ILocaleCurrency
from zope.i18n.interfaces.locales import ILocaleDates
from zope.i18n.interfaces.locales import ILocaleDayContext
from zope.i18n.interfaces.locales import ILocaleDisplayNames
from zope.i18n.interfaces.locales import ILocaleFormat
from zope.i18n.interfaces.locales import ILocaleFormatLength
from zope.i18n.interfaces.locales import ILocaleIdentity
from zope.i18n.interfaces.locales import ILocaleMonthContext
from zope.i18n.interfaces.locales import ILocaleNumbers
from zope.i18n.interfaces.locales import ILocaleOrientation
from zope.i18n.interfaces.locales import ILocaleTimeZone
from zope.i18n.interfaces.locales import ILocaleVersion
from zope.i18n.locales.inheritance import AttributeInheritance
from zope.i18n.locales.inheritance import InheritingDictionary
from zope.i18n.locales.inheritance import NoParentException
# LoadLocaleError is not used, but might be imported from here by others.
from zope.i18n.locales.provider import LoadLocaleError # noqa
from zope.i18n.locales.provider import LocaleProvider
LOCALEDIR = os.path.join(os.path.dirname(i18n.__file__), "locales", "data")
# Global LocaleProvider. We really just need this single one.
locales = LocaleProvider(LOCALEDIR)
# Define some constants that can be used
JANUARY = 1
FEBRUARY = 2
MARCH = 3
APRIL = 4
MAY = 5
JUNE = 6
JULY = 7
AUGUST = 8
SEPTEMBER = 9
OCTOBER = 10
NOVEMBER = 11
DECEMBER = 12
MONDAY = 1
TUESDAY = 2
WEDNESDAY = 3
THURSDAY = 4
FRIDAY = 5
SATURDAY = 6
SUNDAY = 7
dayMapping = {'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4,
'fri': 5, 'sat': 6, 'sun': 7}
BC = 1
AD = 2
calendarAliases = {'islamic': ('arabic',),
'islamic-civil': ('civil-arabic',),
'buddhist': ('thai-buddhist', )}
@implementer(ILocaleIdentity)
class LocaleIdentity:
"""Represents a unique identification of the locale
This class does not have to deal with inheritance.
Examples::
>>> id = LocaleIdentity('en')
>>> id
<LocaleIdentity (en, None, None, None)>
>>> id = LocaleIdentity('en', 'latin')
>>> id
<LocaleIdentity (en, latin, None, None)>
>>> id = LocaleIdentity('en', 'latin', 'US')
>>> id
<LocaleIdentity (en, latin, US, None)>
>>> id = LocaleIdentity('en', 'latin', 'US', 'POSIX')
>>> id
<LocaleIdentity (en, latin, US, POSIX)>
>>> id = LocaleIdentity('en', None, 'US', 'POSIX')
>>> id
<LocaleIdentity (en, None, US, POSIX)>
"""
def __init__(self, language=None, script=None,
territory=None, variant=None):
"""Initialize object."""
self.language = language
self.script = script
self.territory = territory
self.variant = variant
def __repr__(self):
"""See zope.i18n.interfaces.ILocaleIdentity
"""
return "<LocaleIdentity ({}, {}, {}, {})>".format(
self.language, self.script, self.territory, self.variant)
@implementer(ILocaleVersion)
class LocaleVersion:
"""Represents a particular version of a locale
This class does not have to deal with inheritance.
Examples::
>>> from datetime import datetime
>>> (LocaleVersion('1.0', datetime(2004, 1, 1), 'no notes') ==
... LocaleVersion('1.0', datetime(2004, 1, 1), 'no notes again'))
True
>>> (LocaleVersion('1.0', datetime(2004, 1, 1), 'no notes') <
... LocaleVersion('1.0', datetime(2004, 1, 2), 'no notes again'))
True
>>> (LocaleVersion('1.0', datetime(2004, 1, 1), 'no notes') <
... LocaleVersion('0.9', datetime(2004, 1, 2), 'no notes again'))
True
>>> (LocaleVersion('1.0', datetime(2004, 1, 1), 'no notes') >
... LocaleVersion('0.9', datetime(2004, 1, 1), 'no notes again'))
True
"""
def __init__(self, number, generationDate, notes):
"""Initialize object."""
self.number = number
assert isinstance(generationDate, (date, type(None)))
self.generationDate = generationDate
self.notes = notes
def __lt__(self, other):
return ((self.generationDate, self.number) <
(other.generationDate, other.number))
def __eq__(self, other):
return ((self.generationDate, self.number) ==
(other.generationDate, other.number))
@implementer(ILocaleDisplayNames)
class LocaleDisplayNames(AttributeInheritance):
"""Locale display names with inheritable data.
Examples::
>>> from zope.i18n.locales.tests.test_docstrings import \\
... LocaleInheritanceStub
>>> root = LocaleInheritanceStub()
>>> root.displayNames = LocaleDisplayNames()
>>> root.displayNames.languages = ['en', 'de']
>>> root.displayNames.keys = ['foo', 'bar']
>>> locale = LocaleInheritanceStub(nextLocale=root)
>>> locale.displayNames = LocaleDisplayNames()
>>> locale.displayNames.keys = ['f', 'bahr']
Here you can see the inheritance in action::
>>> locale.displayNames.languages
['en', 'de']
>>> locale.displayNames.keys
['f', 'bahr']
"""
@implementer(ILocaleTimeZone)
class LocaleTimeZone:
"""Specifies one of the timezones of a specific locale.
The attributes of this class are not inherited, since all timezone
information is always provided together.
Example::
>>> tz = LocaleTimeZone('Europe/Berlin')
>>> tz.cities = ['Berlin']
>>> tz.names = {'standard': ('Mitteleuropaeische Zeit', 'MEZ'),
... 'daylight': ('Mitteleuropaeische Sommerzeit', 'MESZ')}
>>> tz.type
'Europe/Berlin'
>>> tz.cities
['Berlin']
"""
def __init__(self, type):
"""Initialize the object."""
self.type = type
self.cities = []
self.names = {}
@implementer(ILocaleFormat)
class LocaleFormat:
"""Specifies one of the format of a specific format length.
The attributes of this class are not inherited, since all format
information is always provided together. Note that this information by
itself is often not useful, since other calendar data is required to use
the specified pattern for formatting and parsing.
"""
def __init__(self, type=None):
"""Initialize the object."""
self.type = type
self.displayName = ""
self.pattern = ""
@implementer(ILocaleFormatLength)
class LocaleFormatLength(AttributeInheritance):
"""Specifies one of the format lengths of a specific quantity, like
numbers, dates, times and datetimes."""
def __init__(self, type=None):
"""Initialize the object."""
self.type = type
self.default = None
@implementer(ILocaleMonthContext)
class LocaleMonthContext(AttributeInheritance):
def __init__(self, type=None):
"""Initialize the object."""
self.type = type
self.default = "wide"
@implementer(ILocaleDayContext)
class LocaleDayContext(AttributeInheritance):
def __init__(self, type=None):
"""Initialize the object."""
self.type = type
self.default = "wide"
@implementer(ILocaleCalendar)
class LocaleCalendar(AttributeInheritance):
"""Represents locale data for a calendar, like 'gregorian'.
This object is particular tricky, since the calendar not only inherits
from higher-up locales, but also from the specified gregorian calendar
available for this locale. This was done, since most other calendars have
different year and era data, but everything else remains the same.
Example:
Even though the 'Locale' object has no 'calendar' attribute for real, it
helps us here to make the example simpler.
>>> from zope.i18n.locales.tests.test_docstrings import \\
... LocaleInheritanceStub
>>> root = LocaleInheritanceStub()
>>> root.calendar = LocaleCalendar('gregorian')
>>> locale = LocaleInheritanceStub(nextLocale=root)
>>> locale.calendar = LocaleCalendar('gregorian')
>>> root.calendar.months = InheritingDictionary(
... {1: (u"January", u"Jan"), 2: (u"February", u"Feb")})
>>> locale.calendar.months = InheritingDictionary(
... {2: (u"Februar", u"Feb"), 3: (u"Maerz", u"Mrz")})
>>> locale.calendar.getMonthNames()[:4]
['January', 'Februar', 'Maerz', None]
>>> locale.calendar.getMonthTypeFromName(u"January")
1
>>> locale.calendar.getMonthTypeFromName(u"Februar")
2
>>> locale.calendar.getMonthAbbreviations()[:4]
['Jan', 'Feb', 'Mrz', None]
>>> locale.calendar.getMonthTypeFromAbbreviation(u"Jan")
1
>>> locale.calendar.getMonthTypeFromAbbreviation(u"Mrz")
3
>>> root.calendar.days = InheritingDictionary(
... {1: (u"Monday", u"Mon"), 2: (u"Tuesday", u"Tue")})
>>> locale.calendar.days = InheritingDictionary(
... {2: (u"Dienstag", u"Die"), 3: (u"Mittwoch", u"Mit")})
>>> locale.calendar.getDayNames()[:4]
['Monday', 'Dienstag', 'Mittwoch', None]
>>> locale.calendar.getDayTypeFromName(u"Monday")
1
>>> locale.calendar.getDayTypeFromName(u"Dienstag")
2
>>> locale.calendar.getDayAbbreviations()[:4]
['Mon', 'Die', 'Mit', None]
>>> locale.calendar.getDayTypeFromAbbreviation(u"Mon")
1
>>> locale.calendar.getDayTypeFromAbbreviation(u"Die")
2
>>> root.calendar.week = {'firstDay': 1}
>>> locale.calendar.getFirstWeekDayName()
'Monday'
Let's test the direct attribute access as well.
>>> root.am = u"AM"
>>> root.pm = u"PM"
>>> locale.pm = u"nachm."
>>> locale.pm
'nachm.'
>>> locale.am
'AM'
Note that ``isWeekend`` is not implemented:
>>> locale.calendar.isWeekend(object())
False
>>> locale.calendar.isWeekend(None)
False
>>> locale.calendar.isWeekend('anything')
False
"""
def __init__(self, type):
"""Initialize the object."""
self.type = type
def getMonthNames(self):
"""See zope.i18n.interfaces.ILocaleCalendar"""
return [self.months.get(type, (None, None))[0]
for type in range(1, 13)]
def getMonthTypeFromName(self, name):
"""See zope.i18n.interfaces.ILocaleCalendar"""
for item in self.months.items():
if item[1][0] == name:
return item[0]
def getMonthAbbreviations(self):
"""See zope.i18n.interfaces.ILocaleCalendar"""
return [self.months.get(type, (None, None))[1]
for type in range(1, 13)]
def getMonthTypeFromAbbreviation(self, abbr):
"""See zope.i18n.interfaces.ILocaleCalendar"""
for item in self.months.items():
if item[1][1] == abbr:
return item[0]
def getDayNames(self):
"""See zope.i18n.interfaces.ILocaleCalendar"""
return [self.days.get(type, (None, None))[0] for type in range(1, 8)]
def getDayTypeFromName(self, name):
"""See zope.i18n.interfaces.ILocaleCalendar"""
for item in self.days.items():
if item[1][0] == name:
return item[0]
def getDayAbbreviations(self):
"""See zope.i18n.interfaces.ILocaleCalendar"""
return [self.days.get(type, (None, None))[1] for type in range(1, 8)]
def getDayTypeFromAbbreviation(self, abbr):
"""See zope.i18n.interfaces.ILocaleCalendar"""
for item in self.days.items():
if item[1][1] == abbr:
return item[0]
def isWeekend(self, datetime):
"""See zope.i18n.interfaces.ILocaleCalendar"""
# TODO: Implement this method
return False
def getFirstWeekDayName(self):
"""See zope.i18n.interfaces.ILocaleCalendar"""
firstDayNumber = self.week['firstDay']
return self.days[firstDayNumber][0]
@implementer(ILocaleDates)
class LocaleDates(AttributeInheritance):
"""Simple ILocaleDates implementation that can inherit data from other
locales.
Examples::
>>> from zope.i18n.tests.test_formats import LocaleCalendarStub as Stub
>>> from datetime import datetime, date, time
>>> dates = LocaleDates()
>>> cal = LocaleCalendar('gregorian')
>>> cal.months = Stub.months
>>> cal.days = Stub.days
>>> cal.am = Stub.am
>>> cal.pm = Stub.pm
>>> cal.eras = Stub.eras
>>> cal.week = {'firstDay': 1, 'minDays': 1}
>>> dates.calendars = {'gregorian': cal}
Setting up and accessing date format through a specific length
(very common scenario)::
>>> fulllength = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u"EEEE, d. MMMM yyyy"
>>> fulllength.formats = {None: format}
>>> mediumlength = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u"dd.MM.yyyy"
>>> mediumlength.formats = {None: format}
>>> cal.dateFormats = {'full': fulllength, 'medium': mediumlength}
>>> cal.defaultDateFormat = 'medium'
>>> formatter = dates.getFormatter('date')
>>> formatter.format(date(2004, 2, 4))
'04.02.2004'
>>> formatter = dates.getFormatter('date', length='full')
>>> formatter.format(date(2004, 2, 4))
'Mittwoch, 4. Februar 2004'
Let's also test the time formatter::
>>> fulllength = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u"H:mm' Uhr 'z"
>>> fulllength.formats = {None: format}
>>> mediumlength = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u"HH:mm:ss"
>>> mediumlength.formats = {None: format}
>>> cal.timeFormats = {'full': fulllength, 'medium': mediumlength}
>>> cal.defaultTimeFormat = 'medium'
>>> formatter = dates.getFormatter('time')
>>> formatter.format(time(12, 15, 00))
'12:15:00'
>>> formatter = dates.getFormatter('time', length='full')
>>> formatter.format(time(12, 15, 00))
'12:15 Uhr +000'
The datetime formatter is a bit special, since it is constructed from
the other two::
>>> length = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u"{1} {0}"
>>> length.formats = {None: format}
>>> cal.dateTimeFormats = {None: length}
>>> formatter = dates.getFormatter('dateTime')
>>> formatter.format(datetime(2004, 2, 4, 12, 15, 00))
'04.02.2004 12:15:00'
>>> formatter = dates.getFormatter('dateTime', length='full')
>>> formatter.format(datetime(2004, 2, 4, 12, 15, 00))
'Mittwoch, 4. Februar 2004 12:15 Uhr +000'
Finally, we'll test some invalid input::
>>> dates.getFormatter('timeDate')
Traceback (most recent call last):
ValueError: Invalid category: timeDate
>>> dates.getFormatter('date', length='superlong')
Traceback (most recent call last):
ValueError: Invalid format length: superlong
>>> dates.getFormatter('date', calendar='irish-catholic')
Traceback (most recent call last):
ValueError: Invalid calendar: irish-catholic
"""
def getFormatter(self, category, length=None, name=None,
calendar="gregorian"):
"""See zope.i18n.interfaces.locales.ILocaleDates"""
if category not in ("date", "time", "dateTime"):
raise ValueError('Invalid category: %s' % category)
if calendar not in ("gregorian", "arabic", "chinese",
"civil-arabic", "hebrew", "japanese",
"thai-buddhist"):
raise ValueError('Invalid calendar: %s' % calendar)
if length not in ("short", "medium", "long", "full", None):
raise ValueError('Invalid format length: %s' % length)
cal = self.calendars[calendar]
formats = getattr(cal, category + 'Formats')
if length is None:
length = getattr(
cal,
'default' + category[0].upper() + category[1:] + 'Format',
list(formats.keys())[0])
# 'datetime' is always a bit special; we often do not have a length
# specification, but we need it for looking up the date and time
# formatters
if category == 'dateTime':
formatLength = formats.get(length, formats[None])
else:
formatLength = formats[length]
if name is None:
name = formatLength.default
format = formatLength.formats[name]
pattern = format.pattern
if category == 'dateTime':
date_pat = self.getFormatter(
'date', length, name, calendar).getPattern()
time_pat = self.getFormatter(
'time', length, name, calendar).getPattern()
pattern = pattern.replace('{1}', date_pat)
pattern = pattern.replace('{0}', time_pat)
return DateTimeFormat(pattern, cal)
@implementer(ILocaleCurrency)
class LocaleCurrency:
"""Simple implementation of ILocaleCurrency without inheritance support,
since it is not needed for a single currency."""
def __init__(self, type):
"""Initialize object."""
self.type = type
self.symbol = None
self.symbolChoice = False
self.displayName = None
@implementer(ILocaleNumbers)
class LocaleNumbers(AttributeInheritance):
"""Implementation of ILocaleCurrency including inheritance support.
Examples::
>>> numbers = LocaleNumbers()
>>> numbers.symbols = {
... 'decimal': ',', 'group': '.', 'list': ';', 'percentSign': '%',
... 'nativeZeroDigit': '0', 'patternDigit': '#', 'plusSign': '+',
... 'minusSign': '-', 'exponential': 'E', 'perMille': 'o/oo',
... 'infinity': 'oo', 'nan': 'N/A'}
Setting up and accessing totally unnamed decimal format
(very common scenario)::
>>> length = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u"#,##0.###;-#,##0.###"
>>> length.formats = {None: format}
>>> numbers.decimalFormats = {None: length}
>>> formatter = numbers.getFormatter('decimal')
>>> formatter.format(3.4)
'3,4'
>>> formatter.format(-3.4567)
'-3,457'
>>> formatter.format(3210.4)
'3.210,4'
Setting up and accessing scientific formats with named format lengths::
>>> longlength = LocaleFormatLength('long')
>>> format = LocaleFormat()
>>> format.pattern = u"0.000###E+00"
>>> longlength.formats = {None: format}
>>> mediumlength = LocaleFormatLength('long')
>>> format = LocaleFormat()
>>> format.pattern = u"0.00##E+00"
>>> mediumlength.formats = {None: format}
>>> numbers.scientificFormats = {'long': longlength,
... 'medium': mediumlength}
>>> numbers.defaultScientificFormat = 'long'
>>> formatter = numbers.getFormatter('scientific')
>>> formatter.format(1234.5678)
'1,234568E+03'
>>> formatter = numbers.getFormatter('scientific', 'medium')
>>> formatter.format(1234.5678)
'1,2346E+03'
Setting up and accessing percent formats with named format lengths
and format names::
>>> longlength = LocaleFormatLength('long')
>>> fooformat = LocaleFormat()
>>> fooformat.pattern = u"0.##0%"
>>> barformat = LocaleFormat()
>>> barformat.pattern = u"0%"
>>> longlength.formats = {None: fooformat, 'bar': barformat}
>>> numbers.percentFormats = {'long': longlength}
>>> numbers.defaultPercentFormat = 'long'
>>> formatter = numbers.getFormatter('percent')
>>> formatter.format(123.45678)
'123,457%'
>>> formatter = numbers.getFormatter('percent', name='bar')
>>> formatter.format(123.45678)
'123%'
...using a default name::
>>> numbers.percentFormats['long'].default = 'bar'
>>> formatter = numbers.getFormatter('percent')
>>> formatter.format(123.45678)
'123%'
"""
def getFormatter(self, category, length=None, name=None):
"""See zope.i18n.interfaces.locales.ILocaleNumbers"""
assert category in ("decimal", "percent", "scientific", "currency")
assert length in ("short", "medium", "long", "full", None)
formats = getattr(self, category + 'Formats')
if length is None:
length = getattr(
self,
'default' + category[0].upper() + category[1:] + 'Format',
list(formats.keys())[0])
formatLength = formats[length]
if name is None:
name = formatLength.default
format = formatLength.formats[name]
return NumberFormat(format.pattern, self.symbols)
@implementer(ILocaleOrientation)
class LocaleOrientation(AttributeInheritance):
"""Implementation of ILocaleOrientation
"""
@implementer(ILocale)
class Locale(AttributeInheritance):
"""Implementation of the ILocale interface."""
def __init__(self, id):
self.id = id
def getLocaleID(self):
"""
Return the locale id.
Example::
>>> lid = LocaleIdentity('en', 'latin', 'US', 'POSIX')
>>> locale = Locale(lid)
>>> locale.getLocaleID()
'en_latin_US_POSIX'
>>> lid = LocaleIdentity('en', 'latin')
>>> locale = Locale(lid)
>>> locale.getLocaleID()
'en_latin'
>>> lid = LocaleIdentity()
>>> locale = Locale(lid)
>>> locale.getLocaleID()
''
"""
id = self.id
pieces = [x for x in
(id.language, id.script, id.territory, id.variant)
if x]
id_string = '_'.join(pieces)
# TODO: What about keys??? Where do I get this info from?
# Notice that 'pieces' is always empty.
pieces = [key + '=' + type for (key, type) in ()]
assert not pieces
if pieces: # pragma: no cover
id_string += '@' + ','.join(pieces)
return id_string
def getInheritedSelf(self):
"""See zope.i18n.interfaces.locales.ILocaleInheritance
This is the really interesting method that looks up the next (more
general) Locale object. This is used in case this locale object does
not have the required information.
This method works closely with with LocaleProvider.
"""
language = self.id.language
territory = self.id.territory
variant = self.id.variant
if variant is not None:
return locales.getLocale(language, territory, None)
elif territory is not None:
return locales.getLocale(language, None, None)
elif language is not None:
return locales.getLocale(None, None, None)
else:
# Well, this is bad; we are already at the root locale
raise NoParentException('Cannot find a more general locale.') | zope.i18n | /zope.i18n-5.1-py3-none-any.whl/zope/i18n/locales/__init__.py | __init__.py |
__docformat__ = "reStructuredText"
_marker = object()
class Message(str):
"""Message (Python implementation)
This is a string used as a message. It has a domain attribute that is
its source domain, and a default attribute that is its default text to
display when there is no translation. domain may be None meaning there is
no translation domain. default may also be None, in which case the
message id itself implicitly serves as the default text.
"""
__slots__ = (
'domain', 'default', 'mapping', '_readonly',
'msgid_plural', 'default_plural', 'number')
def __new__(cls, ustr, domain=_marker, default=_marker, mapping=_marker,
msgid_plural=_marker, default_plural=_marker, number=_marker):
self = str.__new__(cls, ustr)
if isinstance(ustr, self.__class__):
self.domain = ustr.domain
self.default = ustr.default
self.mapping = ustr.mapping
self.msgid_plural = ustr.msgid_plural
self.default_plural = ustr.default_plural
self.number = ustr.number
else:
self.domain = None
self.default = None
self.mapping = None
self.msgid_plural = None
self.default_plural = None
self.number = None
if domain is not _marker:
self.domain = domain
if default is not _marker:
self.default = default
if mapping is not _marker:
self.mapping = mapping
if msgid_plural is not _marker:
self.msgid_plural = msgid_plural
if default_plural is not _marker:
self.default_plural = default_plural
if number is not _marker:
self.number = number
if self.number is not None and not isinstance(
self.number, (int, float)):
raise TypeError('`number` should be an integer or a float')
self._readonly = True
return self
def __setattr__(self, key, value):
"""Message is immutable
It cannot be changed once the message id is created.
"""
if getattr(self, '_readonly', False):
raise TypeError('readonly attribute')
else:
return str.__setattr__(self, key, value)
def __getstate__(self):
return (
str(self), self.domain, self.default, self.mapping,
self.msgid_plural, self.default_plural, self.number)
def __reduce__(self):
return self.__class__, self.__getstate__()
# Name the fallback Python implementation to make it easier to test.
pyMessage = Message
try:
from ._zope_i18nmessageid_message import Message
except ImportError: # pragma: no cover
pass
class MessageFactory:
"""Factory for creating i18n messages."""
def __init__(self, domain):
self._domain = domain
def __call__(self, ustr, default=None, mapping=None,
msgid_plural=None, default_plural=None, number=None):
return Message(ustr, self._domain, default, mapping,
msgid_plural, default_plural, number) | zope.i18nmessageid | /zope.i18nmessageid-6.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl/zope/i18nmessageid/message.py | message.py |
"""Basic interfaces shared between different types of index.
"""
from zope.interface import Interface
class IInjection(Interface):
"""Interface for injecting documents into an index."""
def index_doc(docid, value):
"""Add a document to the index.
docid: int, identifying the document
value: the value to be indexed
return: None
This can also be used to reindex documents.
"""
def unindex_doc(docid):
"""Remove a document from the index.
docid: int, identifying the document
return: None
This call is a no-op if the docid isn't in the index, however,
after this call, the index should have no references to the docid.
"""
def clear():
"""Unindex all documents indexed by the index
"""
class IIndexSearch(Interface):
"""
Interface for searching indexes.
"""
def apply(query):
"""Apply an index to the given query
The type of the query is index specific.
TODO
This is somewhat problemetic. It means that application
code that calls apply has to be aware of the
expected query type. This isn't too much of a problem now,
as we have no more general query language nor do we have
any sort of automatic query-form generation.
It would be nice to have a system later for having
query-form generation or, perhaps, some sort of query
language. At that point, we'll need some sort of way to
determine query types, presumably through introspection of
the index objects.
A result is returned that is:
- An IFBTree or an IFBucket mapping document ids to floating-point
scores for document ids of documents that match the query,
- An IFSet or IFTreeSet containing document ids of documents
that match the query, or
- None, indicating that the index could not use the query and
that the result should have no impact on determining a final
result.
"""
class IIndexSort(Interface):
"""
Interface for sorting documents.
"""
def sort(docids, reverse=False, limit=None):
"""Sort document ids sequence using indexed values
If some of docids are not indexed they are skipped
from resulting iterable.
Return a sorted iterable of document ids. Limited by
value of the "limit" argument and optionally
reversed, using the "reverse" argument.
"""
class IStatistics(Interface):
"""An index that provides statistical information about itself."""
def documentCount():
"""Return the number of documents currently indexed."""
def wordCount():
"""Return the number of words currently indexed."""
class INBest(Interface):
"""Interface for an N-Best chooser."""
def add(item, score):
"""Record that item 'item' has score 'score'. No return value.
The N best-scoring items are remembered, where N was passed to
the constructor. 'item' can by anything. 'score' should be
a number, and larger numbers are considered better.
"""
def addmany(sequence):
"""Like "for item, score in sequence: self.add(item, score)".
This is simply faster than calling add() len(seq) times.
"""
def getbest():
"""Return the (at most) N best-scoring items as a sequence.
The return value is a sequence of 2-tuples, (item, score), with
the largest score first. If .add() has been called fewer than
N times, this sequence will contain fewer than N pairs.
"""
def pop_smallest():
"""Return and remove the (item, score) pair with lowest score.
If len(self) is 0, raise IndexError.
To be cleaer, this is the lowest score among the N best-scoring
seen so far. This is most useful if the capacity of the NBest
object is never exceeded, in which case pop_smallest() allows
using the object as an ordinary smallest-in-first-out priority
queue.
"""
def __len__():
"""Return the number of (item, score) pairs currently known.
This is N (the value passed to the constructor), unless .add()
has been called fewer than N times.
"""
def capacity():
"""Return the maximum number of (item, score) pairs.
This is N (the value passed to the constructor).
""" | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/interfaces.py | interfaces.py |
"""Topic index
"""
import BTrees
from persistent import Persistent
from zope.interface import implementer
from zope.index.interfaces import IIndexSearch
from zope.index.interfaces import IInjection
from zope.index.topic.interfaces import ITopicQuerying
@implementer(IInjection, ITopicQuerying, IIndexSearch)
class TopicIndex(Persistent):
"""
Topic index.
Implements :class:`zope.index.interfaces.IInjection`,
:class:`zope.index.interfaces.IIndexSearch` and
:class:`zope.index.topic.interfaces.ITopicQuerying`.
"""
family = BTrees.family32
def __init__(self, family=None):
if family is not None:
self.family = family
self.clear()
def clear(self):
# mapping filter id -> filter
self._filters = self.family.OO.BTree()
def addFilter(self, f):
""" Add filter 'f' with ID 'id' """
self._filters[f.getId()] = f
def delFilter(self, id):
""" remove a filter given by its ID 'id' """
del self._filters[id]
def clearFilters(self):
""" Clear existing filters of their docids, but leave them in place.
"""
for filter in self._filters.values():
filter.clear()
def index_doc(self, docid, obj):
"""index an object"""
for f in self._filters.values():
f.index_doc(docid, obj)
def unindex_doc(self, docid):
"""unindex an object"""
for f in self._filters.values():
f.unindex_doc(docid)
def search(self, query, operator='and'):
if isinstance(query, str):
query = [query]
if not isinstance(query, (tuple, list)):
raise TypeError(
'query argument must be a list/tuple of filter ids')
sets = []
for id in self._filters.keys():
if id in query:
docids = self._filters[id].getIds()
sets.append(docids)
if operator == 'or':
rs = self.family.IF.multiunion(sets)
elif operator == 'and':
# sort smallest to largest set so we intersect the smallest
# number of document identifiers possible
sets.sort(key=len)
rs = None
for set in sets:
rs = self.family.IF.intersection(rs, set)
if not rs:
break
else:
raise TypeError('Topic index only supports `and` and `or` '
'operators, not `%s`.' % operator)
if rs:
return rs
else:
return self.family.IF.Set()
def apply(self, query):
operator = 'and'
if isinstance(query, dict):
if 'operator' in query:
operator = query.pop('operator')
query = query['query']
return self.search(query, operator=operator) | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/topic/index.py | index.py |
"""Keyword index
"""
import BTrees
from BTrees.Length import Length
from persistent import Persistent
from zope.interface import implementer
from zope.index.interfaces import IIndexSearch
from zope.index.interfaces import IInjection
from zope.index.interfaces import IStatistics
from zope.index.keyword.interfaces import IKeywordQuerying
@implementer(IInjection, IStatistics, IIndexSearch, IKeywordQuerying)
class KeywordIndex(Persistent):
"""
Keyword index.
Implements :class:`zope.index.interfaces.IInjection`,
:class:`zope.index.interfaces.IStatistics`,
:class:`zope.index.interfaces.IIndexSearch` and
:class:`zope.index.keyword.interfaces.IKeywordQuerying`.
"""
family = BTrees.family32
# If a word is referenced by at least tree_threshold docids,
# use a TreeSet for that word instead of a Set.
tree_threshold = 64
def __init__(self, family=None):
if family is not None:
self.family = family
self.clear()
def clear(self):
"""Initialize forward and reverse mappings."""
# The forward index maps index keywords to a sequence of docids
self._fwd_index = self.family.OO.BTree()
# The reverse index maps a docid to its keywords
# TODO: Using a vocabulary might be the better choice to store
# keywords since it would allow use to use integers instead of strings
self._rev_index = self.family.IO.BTree()
self._num_docs = Length(0)
def documentCount(self):
"""Return the number of documents in the index."""
return self._num_docs()
def wordCount(self):
"""Return the number of indexed words"""
return len(self._fwd_index)
def has_doc(self, docid):
return bool(docid in self._rev_index)
def normalize(self, seq):
"""Perform normalization on sequence of keywords.
Return normalized sequence. This method may be
overriden by subclasses.
"""
return seq
def index_doc(self, docid, seq):
if isinstance(seq, str):
raise TypeError('seq argument must be a list/tuple of strings')
old_kw = self._rev_index.get(docid, None)
if not seq:
if old_kw:
self.unindex_doc(docid)
return
seq = self.normalize(seq)
new_kw = self.family.OO.Set(seq)
if old_kw is None:
self._insert_forward(docid, new_kw)
self._insert_reverse(docid, new_kw)
self._num_docs.change(1)
else:
# determine added and removed keywords
kw_added = self.family.OO.difference(new_kw, old_kw)
kw_removed = self.family.OO.difference(old_kw, new_kw)
# removed keywords are removed from the forward index
for word in kw_removed:
fwd = self._fwd_index[word]
fwd.remove(docid)
if not fwd:
del self._fwd_index[word]
# now update reverse and forward indexes
self._insert_forward(docid, kw_added)
self._insert_reverse(docid, new_kw)
def unindex_doc(self, docid):
idx = self._fwd_index
try:
for word in self._rev_index[docid]:
idx[word].remove(docid)
if not idx[word]:
del idx[word]
except KeyError:
# 'WAAA! Inconsistent'
return
try:
del self._rev_index[docid]
except KeyError: # pragma: no cover
# 'WAAA! Inconsistent'
pass
self._num_docs.change(-1)
def _insert_forward(self, docid, words):
"""insert a sequence of words into the forward index """
idx = self._fwd_index
get_word_idx = idx.get
IF = self.family.IF
Set = IF.Set
TreeSet = IF.TreeSet
for word in words:
word_idx = get_word_idx(word)
if word_idx is None:
idx[word] = word_idx = Set()
word_idx.insert(docid)
if (not isinstance(word_idx, TreeSet) and
len(word_idx) >= self.tree_threshold):
# Convert to a TreeSet.
idx[word] = TreeSet(word_idx)
def _insert_reverse(self, docid, words):
""" add words to forward index """
if words:
self._rev_index[docid] = words
def search(self, query, operator='and'):
"""Execute a search given by 'query'."""
if isinstance(query, str):
query = [query]
query = self.normalize(query)
sets = []
for word in query:
docids = self._fwd_index.get(word, self.family.IF.Set())
sets.append(docids)
if operator == 'or':
rs = self.family.IF.multiunion(sets)
elif operator == 'and':
# sort smallest to largest set so we intersect the smallest
# number of document identifiers possible
sets.sort(key=len)
rs = None
for set in sets:
rs = self.family.IF.intersection(rs, set)
if not rs:
break
else:
raise TypeError('Keyword index only supports `and` and `or` '
'operators, not `%s`.' % operator)
if rs:
return rs
return self.family.IF.Set()
def apply(self, query):
operator = 'and'
if isinstance(query, dict):
if 'operator' in query:
operator = query['operator']
query = query['query']
return self.search(query, operator=operator)
def optimize(self):
"""Optimize the index. Call this after changing tree_threshold.
This converts internal data structures between
Sets and TreeSets based on tree_threshold.
"""
idx = self._fwd_index
IF = self.family.IF
Set = IF.Set
TreeSet = IF.TreeSet
items = list(self._fwd_index.items())
for word, word_idx in items:
if len(word_idx) >= self.tree_threshold:
if not isinstance(word_idx, TreeSet):
# Convert to a TreeSet.
idx[word] = TreeSet(word_idx)
else:
if isinstance(word_idx, TreeSet):
# Convert to a Set.
idx[word] = Set(word_idx)
class CaseInsensitiveKeywordIndex(KeywordIndex):
"""A case-normalizing keyword index (for strings as keywords)"""
def normalize(self, seq):
"""
Normalize by calling ``lower`` on every item in *seq*.
"""
return [w.lower() for w in seq] | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/keyword/index.py | index.py |
===============
Field Indexes
===============
Field indexes index orderable values. Note that they don't check for
orderability. That is, all of the values added to the index must be
orderable together. It is up to applications to provide only mutually
orderable values.
>>> from zope.index.field import FieldIndex
>>> index = FieldIndex()
>>> index.index_doc(0, 6)
>>> index.index_doc(1, 26)
>>> index.index_doc(2, 94)
>>> index.index_doc(3, 68)
>>> index.index_doc(4, 30)
>>> index.index_doc(5, 68)
>>> index.index_doc(6, 82)
>>> index.index_doc(7, 30)
>>> index.index_doc(8, 43)
>>> index.index_doc(9, 15)
Field indexes are searched with ``apply``, which returns an instance of
``IFSet``. Let's write a function to display those sets portably
(across CPython and PyPy).
The argument to ``apply`` is a tuple with a minimum and maximum value.
>>> def show_ifset(ifset):
... print('IFSet(%s)' % list(ifset))
...
>>> show_ifset(index.apply((30, 70)))
IFSet([3, 4, 5, 7, 8])
A common mistake is to pass a single value. If anything other than a
two-tuple is passed, a type error is raised:
>>> index.apply('hi')
Traceback (most recent call last):
...
TypeError: ('two-length tuple expected', 'hi')
Open-ended ranges can be provided by provinding None as an end point:
>>> show_ifset(index.apply((30, None)))
IFSet([2, 3, 4, 5, 6, 7, 8])
>>> show_ifset(index.apply((None, 70)))
IFSet([0, 1, 3, 4, 5, 7, 8, 9])
>>> show_ifset(index.apply((None, None)))
IFSet([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
To do an exact value search, supply equal minimum and maximum values:
>>> show_ifset(index.apply((30, 30)))
IFSet([4, 7])
>>> show_ifset(index.apply((70, 70)))
IFSet([])
Field indexes support basic statistics:
>>> index.documentCount()
10
>>> index.wordCount()
8
Documents can be reindexed:
>>> show_ifset(index.apply((15, 15)))
IFSet([9])
>>> index.index_doc(9, 14)
>>> show_ifset(index.apply((15, 15)))
IFSet([])
>>> show_ifset(index.apply((14, 14)))
IFSet([9])
Documents can be unindexed:
>>> index.unindex_doc(7)
>>> index.documentCount()
9
>>> index.wordCount()
8
>>> index.unindex_doc(8)
>>> index.documentCount()
8
>>> index.wordCount()
7
>>> show_ifset(index.apply((30, 70)))
IFSet([3, 4, 5])
Unindexing a document id that isn't present is ignored:
>>> index.unindex_doc(8)
>>> index.unindex_doc(80)
>>> index.documentCount()
8
>>> index.wordCount()
7
We can also clear the index entirely:
>>> index.clear()
>>> index.documentCount()
0
>>> index.wordCount()
0
>>> show_ifset(index.apply((30, 70)))
IFSet([])
Sorting
=======
Field indexes also implement IIndexSort interface that
provides a method for sorting document ids by their indexed
values.
>>> index.index_doc(1, 9)
>>> index.index_doc(2, 8)
>>> index.index_doc(3, 7)
>>> index.index_doc(4, 6)
>>> index.index_doc(5, 5)
>>> index.index_doc(6, 4)
>>> index.index_doc(7, 3)
>>> index.index_doc(8, 2)
>>> index.index_doc(9, 1)
>>> list(index.sort([4, 2, 9, 7, 3, 1, 5]))
[9, 7, 5, 4, 3, 2, 1]
We can also specify the ``reverse`` argument to reverse results:
>>> list(index.sort([4, 2, 9, 7, 3, 1, 5], reverse=True))
[1, 2, 3, 4, 5, 7, 9]
And as per IIndexSort, we can limit results by specifying the ``limit``
argument:
>>> list(index.sort([4, 2, 9, 7, 3, 1, 5], limit=3))
[9, 7, 5]
If we pass an id that is not indexed by this index, it won't be included
in the result.
>>> list(index.sort([2, 10]))
[2]
>>> index.clear()
Bugfix testing
==============
Happened at least once that the value dropped out of the forward index,
but the index still contains the object, the unindex broke
>>> index.index_doc(0, 6)
>>> index.index_doc(1, 26)
>>> index.index_doc(2, 94)
>>> index.index_doc(3, 68)
>>> index.index_doc(4, 30)
>>> index.index_doc(5, 68)
>>> index.index_doc(6, 82)
>>> index.index_doc(7, 30)
>>> index.index_doc(8, 43)
>>> index.index_doc(9, 15)
>>> show_ifset(index.apply((None, None)))
IFSet([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
Here is the damage:
>>> del index._fwd_index[68]
Unindex should succeed:
>>> index.unindex_doc(5)
>>> index.unindex_doc(3)
>>> show_ifset(index.apply((None, None)))
IFSet([0, 1, 2, 4, 6, 7, 8, 9])
Optimizations
=============
There is an optimization which makes sure that nothing is changed in the
internal data structures if the value of the ducument was not changed.
To test this optimization we patch the index instance to make sure unindex_doc
is not called.
>>> def unindex_doc(doc_id):
... raise KeyError
>>> index.unindex_doc = unindex_doc
Now we get a KeyError if we try to change the value.
>>> index.index_doc(9, 14)
Traceback (most recent call last):
...
KeyError
Leaving the value unchange doesn't call unindex_doc.
>>> index.index_doc(9, 15)
>>> show_ifset(index.apply((15, 15)))
IFSet([9])
| zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/field/README.rst | README.rst |
"""A sorting mixin class for FieldIndex-like indexes.
"""
import bisect
import heapq
from itertools import islice
from zope.interface import implementer
from zope.index.interfaces import IIndexSort
@implementer(IIndexSort)
class SortingIndexMixin:
"""
Implementation of :class:`zope.index.interfaces.IIndexSort`.
"""
_sorting_num_docs_attr = '_num_docs' # Length object
_sorting_fwd_index_attr = '_fwd_index' # forward BTree index
_sorting_rev_index_attr = '_rev_index' # reverse BTree index
def sort(self, docids, reverse=False, limit=None):
if (limit is not None) and (limit < 1):
raise ValueError('limit value must be 1 or greater')
numdocs = getattr(self, self._sorting_num_docs_attr).value
if not numdocs:
return
if not isinstance(docids,
(self.family.IF.Set, self.family.IF.TreeSet)):
docids = self.family.IF.Set(docids)
if not docids:
return
rlen = len(docids)
fwd_index = getattr(self, self._sorting_fwd_index_attr)
rev_index = getattr(self, self._sorting_rev_index_attr)
getValue = lambda x, d=-1: rev_index.get(x, d) # noqa: E731 use def
marker = object()
# use_lazy and use_nbest computations lifted wholesale from
# Zope2 catalog without questioning reasoning
use_lazy = rlen > numdocs * (rlen / 100 + 1)
use_nbest = limit and limit * 4 < rlen
# overrides for unit tests
if getattr(self, '_use_lazy', False):
use_lazy = True
if getattr(self, '_use_nbest', False):
use_nbest = True
if use_nbest:
# this is a sort with a limit that appears useful, try to
# take advantage of the fact that we can keep a smaller
# set of simultaneous values in memory; use generators
# and heapq functions to do so.
def nsort():
for docid in docids:
val = getValue(docid, marker)
if val is not marker:
yield (val, docid)
iterable = nsort()
if reverse:
# we use a generator as an iterable in the reverse
# sort case because the nlargest implementation does
# not manifest the whole thing into memory at once if
# we do so.
for val in heapq.nlargest(limit, iterable):
yield val[1]
else:
# lifted from heapq.nsmallest
it = iter(iterable)
result = sorted(islice(it, 0, limit))
if not result:
return
insort = bisect.insort
pop = result.pop
los = result[-1] # los --> Largest of the nsmallest
for elem in it:
if los <= elem:
continue
insort(result, elem)
pop()
los = result[-1]
for val in result:
yield val[1]
else:
if use_lazy and not reverse:
# Since this the sort is not reversed, and the number
# of results in the search result set is much larger
# than the number of items in this index, we assume it
# will be fastest to iterate over all of our forward
# BTree's items instead of using a full sort, as our
# forward index is already sorted in ascending order
# by value. The Zope 2 catalog implementation claims
# that this case is rarely exercised in practice.
n = 0
for stored_docids in fwd_index.values():
for docid in self.family.IF.intersection(docids,
stored_docids):
n += 1
yield docid
if limit and n >= limit:
return
else:
# If the result set is not much larger than the number
# of documents in this index, or if we need to sort in
# reverse order, use a non-lazy sort.
n = 0
for docid in sorted(docids, key=getValue, reverse=reverse):
if getValue(docid, marker) is not marker:
n += 1
yield docid
if limit and n >= limit:
return | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/field/sorting.py | sorting.py |
"""Field index
"""
import BTrees
import persistent
import zope.interface
from BTrees.Length import Length
from zope.index import interfaces
from zope.index.field.sorting import SortingIndexMixin
_MARKER = object()
@zope.interface.implementer(
interfaces.IInjection,
interfaces.IStatistics,
interfaces.IIndexSearch,
)
class FieldIndex(SortingIndexMixin, persistent.Persistent):
"""
A field index.
Implements :class:`zope.index.interfaces.IInjection`,
:class:`zope.index.interfaces.IStatistics` and
:class:`zope.index.interfaces.IIndexSearch`.
"""
family = BTrees.family32
def __init__(self, family=None):
if family is not None:
self.family = family
self.clear()
def clear(self):
"""Initialize forward and reverse mappings."""
# The forward index maps indexed values to a sequence of docids
self._fwd_index = self.family.OO.BTree()
# The reverse index maps a docid to its index value
self._rev_index = self.family.IO.BTree()
self._num_docs = Length(0)
def documentCount(self):
"""See interface IStatistics"""
return self._num_docs()
def wordCount(self):
"""See interface IStatistics"""
return len(self._fwd_index)
def index_doc(self, docid, value):
"""See interface IInjection"""
rev_index = self._rev_index
if docid in rev_index:
if docid in self._fwd_index.get(value, ()):
# no need to index the doc, its already up to date
return
self.unindex_doc(docid)
# Insert into forward index.
set = self._fwd_index.get(value)
if set is None:
set = self.family.IF.TreeSet()
self._fwd_index[value] = set
set.insert(docid)
# increment doc count
self._num_docs.change(1)
# Insert into reverse index.
rev_index[docid] = value
def unindex_doc(self, docid):
"""See interface IInjection"""
rev_index = self._rev_index
value = rev_index.get(docid, _MARKER)
if value is _MARKER:
return # not in index
del rev_index[docid]
try:
set = self._fwd_index[value]
set.remove(docid)
except KeyError: # pragma: no cover
# This is fishy, but we don't want to raise an error.
# We should probably log something.
# but keep it from throwing a dirty exception
set = 1
if not set:
del self._fwd_index[value]
self._num_docs.change(-1)
def apply(self, query):
if len(query) != 2 or not isinstance(query, tuple):
raise TypeError("two-length tuple expected", query)
return self.family.IF.multiunion(
self._fwd_index.values(*query)) | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/field/index.py | index.py |
"""Full text index with relevance ranking, using a cosine measure.
"""
import math
from zope.index.text.baseindex import BaseIndex
from zope.index.text.baseindex import inverse_doc_frequency
class CosineIndex(BaseIndex):
"""
Full text index with relevance ranking, using a cosine measure.
"""
def __init__(self, lexicon, family=None):
BaseIndex.__init__(self, lexicon, family=family)
# ._wordinfo for cosine is wid -> {docid -> weight};
# t -> D -> w(d, t)/W(d)
# ._docweight for cosine is
# docid -> W(docid)
# Most of the computation for computing a relevance score for the
# document occurs in the _search_wids() method. The code currently
# implements the cosine similarity function described in Managing
# Gigabytes, eq. 4.3, p. 187. The index_object() method
# precomputes some values that are independent of the particular
# query.
# The equation is
#
# sum(for t in I(d,q): w(d,t) * w(q,t))
# cosine(d, q) = -------------------------------------
# W(d) * W(q)
#
# where
# I(d, q) = the intersection of the terms in d and q.
#
# w(d, t) = 1 + log f(d, t)
# computed by doc_term_weight(); for a given word t,
# self._wordinfo[t] is a map from d to w(d, t).
#
# w(q, t) = log(1 + N/f(t))
# computed by inverse_doc_frequency()
#
# W(d) = sqrt(sum(for t in d: w(d, t) ** 2))
# computed by _get_frequencies(), and remembered in
# self._docweight[d]
#
# W(q) = sqrt(sum(for t in q: w(q, t) ** 2))
# computed by self.query_weight()
def _search_wids(self, wids):
if not wids:
return []
N = float(len(self._docweight))
L = []
DictType = type({})
for wid in wids:
assert wid in self._wordinfo # caller responsible for OOV
d2w = self._wordinfo[wid] # maps docid to w(docid, wid)
idf = inverse_doc_frequency(len(d2w), N) # an unscaled float
# print "idf = %.3f" % idf
if isinstance(d2w, DictType):
d2w = self.family.IF.Bucket(d2w)
L.append((d2w, idf))
return L
def query_weight(self, terms):
wids = []
for term in terms:
wids += self._lexicon.termToWordIds(term)
N = float(len(self._docweight))
sum = 0.0
for wid in self._remove_oov_wids(wids):
wt = inverse_doc_frequency(len(self._wordinfo[wid]), N)
sum += wt ** 2.0
return math.sqrt(sum)
def _get_frequencies(self, wids):
d = {}
dget = d.get
for wid in wids:
d[wid] = dget(wid, 0) + 1
Wsquares = 0.0
for wid, count in d.items():
w = doc_term_weight(count)
Wsquares += w * w
d[wid] = w
W = math.sqrt(Wsquares)
# print "W = %.3f" % W
for wid, weight in d.items():
# print i, ":", "%.3f" % weight,
d[wid] = weight / W
# print "->", d[wid]
return d, W
def doc_term_weight(count):
"""Return the doc-term weight for a term that appears count times."""
# implements w(d, t) = 1 + log f(d, t)
return 1.0 + math.log(count) | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/text/cosineindex.py | cosineindex.py |
==============
Text Indexes
==============
Text indexes combine an inverted index and a lexicon to support text
indexing and searching. A text index can be created without passing
any arguments:
>>> from zope.index.text.textindex import TextIndex
>>> index = TextIndex()
By default, it uses an "Okapi" inverted index and a lexicon with a
pipeline consistening is a simple word splitter, a case normalizer,
and a stop-word remover.
We index text using the `index_doc` method:
>>> index.index_doc(1, u"the quick brown fox jumps over the lazy dog")
>>> index.index_doc(2,
... u"the brown fox and the yellow fox don't need the retriever")
>>> index.index_doc(3, u"""
... The Conservation Pledge
... =======================
...
... I give my pledge, as an American, to save, and faithfully
... to defend from waste, the natural resources of my Country;
... it's soils, minerals, forests, waters and wildlife.
... """)
>>> index.index_doc(4, u"Fran\xe7ois")
>>> word = (
... u"\N{GREEK SMALL LETTER DELTA}"
... u"\N{GREEK SMALL LETTER EPSILON}"
... u"\N{GREEK SMALL LETTER LAMDA}"
... u"\N{GREEK SMALL LETTER TAU}"
... u"\N{GREEK SMALL LETTER ALPHA}"
... )
>>> index.index_doc(5, word + u"\N{EM DASH}\N{GREEK SMALL LETTER ALPHA}")
>>> index.index_doc(6, u"""
... What we have here, is a failure to communicate.
... """)
>>> index.index_doc(7, u"""
... Hold on to your butts!
... """)
>>> index.index_doc(8, u"""
... The Zen of Python, by Tim Peters
...
... Beautiful is better than ugly.
... Explicit is better than implicit.
... Simple is better than complex.
... Complex is better than complicated.
... Flat is better than nested.
... Sparse is better than dense.
... Readability counts.
... Special cases aren't special enough to break the rules.
... Although practicality beats purity.
... Errors should never pass silently.
... Unless explicitly silenced.
... In the face of ambiguity, refuse the temptation to guess.
... There should be one-- and preferably only one --obvious way to do it.
... Although that way may not be obvious at first unless you're Dutch.
... Now is better than never.
... Although never is often better than *right* now.
... If the implementation is hard to explain, it's a bad idea.
... If the implementation is easy to explain, it may be a good idea.
... Namespaces are one honking great idea -- let's do more of those!
... """)
Then we can search using the apply method, which takes a search
string.
>>> [(k, "%.4f" % v) for (k, v) in index.apply(u'brown fox').items()]
[(1, '0.6153'), (2, '0.6734')]
>>> [(k, "%.4f" % v) for (k, v) in index.apply(u'quick fox').items()]
[(1, '0.6153')]
>>> [(k, "%.4f" % v) for (k, v) in index.apply(u'brown python').items()]
[]
>>> [(k, "%.4f" % v) for (k, v) in index.apply(u'dalmatian').items()]
[]
>>> [(k, "%.4f" % v) for (k, v) in index.apply(u'brown or python').items()]
[(1, '0.2602'), (2, '0.2529'), (8, '0.0934')]
>>> [(k, "%.4f" % v) for (k, v) in index.apply(u'butts').items()]
[(7, '0.6948')]
The outputs are mappings from document ids to float scores. Items
with higher scores are more relevent.
We can use unicode characters in search strings.
>>> [(k, "%.4f" % v) for (k, v) in index.apply(u"Fran\xe7ois").items()]
[(4, '0.7427')]
>>> [(k, "%.4f" % v) for (k, v) in index.apply(word).items()]
[(5, '0.7179')]
We can use globbing in search strings.
>>> [(k, "%.3f" % v) for (k, v) in index.apply('fo*').items()]
[(1, '2.179'), (2, '2.651'), (3, '2.041')]
Text indexes support basic statistics:
>>> index.documentCount()
8
>>> index.wordCount()
114
If we index the same document twice, once with a zero value, and then
with a normal value, it should still work:
>>> index2 = TextIndex()
>>> index2.index_doc(1, [])
>>> index2.index_doc(1, ["Zorro"])
>>> [(k, "%.4f" % v) for (k, v) in index2.apply("Zorro").items()]
[(1, '0.4545')]
Tracking Changes
================
If we index a document the first time it updates the _totaldoclen of
the underlying object.
>>> index = TextIndex()
>>> index.index._totaldoclen()
0
>>> index.index_doc(100, u"a new funky value")
>>> index.index._totaldoclen()
3
If we index it a second time, the underlying index length should not
be changed.
>>> index.index_doc(100, u"a new funky value")
>>> index.index._totaldoclen()
3
But if we change it the length changes too.
>>> index.index_doc(100, u"an even newer funky value")
>>> index.index._totaldoclen()
5
The same as for index_doc applies to unindex_doc, if an object is
unindexed that is not indexed no indexes chould change state.
>>> index.unindex_doc(100)
>>> index.index._totaldoclen()
0
>>> index.unindex_doc(100)
>>> index.index._totaldoclen()
0
| zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/text/README.rst | README.rst |
import array
class BitArray:
def __init__(self, buf=None):
self.bytes = array.array('B')
self.nbits = 0
self.bitsleft = 0
def tostring(self):
return self.bytes.tobytes()
def __getitem__(self, i):
byte, offset = divmod(i, 8)
mask = 2 ** offset
if self.bytes[byte] & mask:
return 1
else:
return 0
def __setitem__(self, i, val):
byte, offset = divmod(i, 8)
mask = 2 ** offset
if val:
self.bytes[byte] |= mask
else:
self.bytes[byte] &= ~mask
def __len__(self):
return self.nbits
def append(self, bit):
"""Append a 1 if bit is true or 1 if it is false."""
if self.bitsleft == 0:
self.bytes.append(0)
self.bitsleft = 8
self.__setitem__(self.nbits, bit)
self.nbits += 1
self.bitsleft -= 1
def __getstate__(self):
return self.nbits, self.bitsleft, self.tostring()
def __setstate__(self, xxx_todo_changeme):
(nbits, bitsleft, s) = xxx_todo_changeme
self.bytes = array.array('B', s)
self.nbits = nbits
self.bitsleft = bitsleft
class RiceCode:
"""
Rice coding.
"""
len = 0
def __init__(self, m):
"""Constructor a RiceCode for m-bit values."""
if m < 0 or m > 16:
raise ValueError("m must be between 0 and 16")
self.init(m)
self.bits = BitArray()
def init(self, m):
self.m = m
self.lower = (1 << m) - 1
self.mask = 1 << (m - 1)
def append(self, val):
"""Append an item to the list."""
if val < 1:
raise ValueError("value >= 1 expected, got %s" % repr(val))
val -= 1
# emit the unary part of the code
q = val >> self.m
for i in range(q):
self.bits.append(1)
self.bits.append(0)
# emit the binary part
r = val & self.lower
mask = self.mask
while mask:
self.bits.append(r & mask)
mask >>= 1
self.len += 1
def __len__(self):
return self.len
def tolist(self):
"""Return the items as a list."""
l_ = []
i = 0 # bit offset
binary_range = list(range(self.m))
for j in range(self.len):
unary = 0
while self.bits[i] == 1:
unary += 1
i += 1
assert self.bits[i] == 0
i += 1
binary = 0
for k in binary_range:
binary = (binary << 1) | self.bits[i]
i += 1
l_.append((unary << self.m) + (binary + 1))
return l_
def tostring(self):
"""Return a binary string containing the encoded data.
The binary string may contain some extra zeros at the end.
"""
return self.bits.tostring()
def __getstate__(self):
return self.m, self.bits
def __setstate__(self, xxx_todo_changeme1):
(m, bits) = xxx_todo_changeme1
self.init(m)
self.bits = bits
def encode(m, l_):
"""
Encode elements in list *l* using a :class:`RiceCode` of size *m*.
"""
c = RiceCode(m)
for elt in l_:
c.append(elt)
assert c.tolist() == l_
return c
def encode_deltas(l_):
"""Encode deltas in list *l* using a :class:`RiceCode` of size 6."""
if len(l_) == 1:
return l_[0], []
deltas = RiceCode(6)
deltas.append(l_[1] - l_[0])
for i in range(2, len(l_)):
deltas.append(l_[i] - l_[i - 1])
return l_[0], deltas
def decode_deltas(start, enc_deltas):
l_ = [start]
if not enc_deltas:
return l_
deltas = enc_deltas.tolist()
for i in range(1, len(deltas)):
l_.append(l_[i - 1] + deltas[i])
l_.append(l_[-1] + deltas[-1])
return l_
def pickle_efficiency(bits=(4, 8, 12),
sizes=(10, 20, 50, 100, 200, 500, 1000, 2000, 5000),
elt_ranges=(10, 20, 50, 100, 200, 500, 1000)):
import collections
import pickle
import random
all_results = {}
for m in bits:
all_results[m] = collections.defaultdict(dict)
for size in sizes:
for elt_range in elt_ranges:
l_ = [random.randint(1, elt_range) for i in range(size)]
raw = pickle.dumps(l_, 1)
enc = pickle.dumps(encode(m, l_), 1)
all_results[m][size][elt_range] = "win" if len(
raw) > len(enc) else "lose"
return all_results | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/text/ricecode.py | ricecode.py |
"""Abstract base class for full text index with relevance ranking.
"""
import math
import BTrees
from BTrees import Length
from BTrees.IOBTree import IOBTree
from persistent import Persistent
from zope.interface import implementer
from zope.index.interfaces import IInjection
from zope.index.interfaces import IStatistics
from zope.index.text import widcode
from zope.index.text.interfaces import IExtendedQuerying
from zope.index.text.interfaces import ILexiconBasedIndex
from zope.index.text.setops import mass_weightedIntersection
from zope.index.text.setops import mass_weightedUnion
@implementer(IInjection, IStatistics, ILexiconBasedIndex, IExtendedQuerying)
class BaseIndex(Persistent):
"""
Implements :class:`zope.index.interfaces.IInjection`,
:class:`zope.index.interfaces.IStatistics`,
:class:`zope.index.text.interfaces.IExtendedQuerying`
and :class:`zope.index.text.interfaces.ILexiconBasedIndex`.
"""
family = BTrees.family32
lexicon = property(lambda self: self._lexicon,)
def __init__(self, lexicon, family=None):
if family is not None:
self.family = family
self._lexicon = lexicon
self.clear()
def clear(self):
# wid -> {docid -> weight}; t -> D -> w(D, t)
# Different indexers have different notions of term weight, but we
# expect each indexer to use ._wordinfo to map wids to its notion
# of a docid-to-weight map.
# There are two kinds of OOV words: wid 0 is explicitly OOV,
# and it's possible that the lexicon will return a non-zero wid
# for a word we don't currently know about. For example, if we
# unindex the last doc containing a particular word, that wid
# remains in the lexicon, but is no longer in our _wordinfo map;
# lexicons can also be shared across indices, and some other index
# may introduce a lexicon word we've never seen.
# A word is in-vocabulary for this index if and only if
# _wordinfo.has_key(wid). Note that wid 0 must not be a key.
# This does not use the BTree family since wids are always "I"
# flavor trees.
self._wordinfo = IOBTree()
# docid -> weight
# Different indexers have different notions of doc weight, but we
# expect each indexer to use ._docweight to map docids to its
# notion of what a doc weight is.
self._docweight = self.family.IF.BTree()
# docid -> WidCode'd list of wids
# Used for un-indexing, and for phrase search.
self._docwords = self.family.IO.BTree()
# Use a BTree length for efficient length computation w/o conflicts
self.wordCount = Length.Length()
self.documentCount = Length.Length()
def wordCount(self):
"""Return the number of words in the index."""
# This must be overridden by subclasses which do not set the
# attribute on their instances.
raise NotImplementedError
def documentCount(self):
"""Return the number of documents in the index."""
# This must be overridden by subclasses which do not set the
# attribute on their instances.
raise NotImplementedError
def get_words(self, docid):
"""Return a list of the wordids for a given docid."""
return widcode.decode(self._docwords[docid])
# A subclass may wish to extend or override this.
def index_doc(self, docid, text):
if docid in self._docwords:
return self._reindex_doc(docid, text)
wids = self._lexicon.sourceToWordIds(text)
wid2weight, docweight = self._get_frequencies(wids)
self._mass_add_wordinfo(wid2weight, docid)
self._docweight[docid] = docweight
self._docwords[docid] = widcode.encode(wids)
try:
self.documentCount.change(1)
except AttributeError:
# upgrade documentCount to Length object
self.documentCount = Length.Length(len(self._docweight))
return len(wids)
# A subclass may wish to extend or override this. This is for adjusting
# to a new version of a doc that already exists. The goal is to be
# faster than simply unindexing the old version in its entirety and then
# adding the new version in its entirety.
def _reindex_doc(self, docid, text):
# Touch as few docid->w(docid, score) maps in ._wordinfo as possible.
old_wids = self.get_words(docid)
old_wid2w, old_docw = self._get_frequencies(old_wids)
new_wids = self._lexicon.sourceToWordIds(text)
new_wid2w, new_docw = self._get_frequencies(new_wids)
old_widset = self.family.IF.TreeSet(old_wid2w.keys())
new_widset = self.family.IF.TreeSet(new_wid2w.keys())
IF = self.family.IF
in_both_widset = IF.intersection(old_widset, new_widset)
only_old_widset = IF.difference(old_widset, in_both_widset)
only_new_widset = IF.difference(new_widset, in_both_widset)
del old_widset, new_widset
for wid in only_old_widset.keys():
self._del_wordinfo(wid, docid)
for wid in only_new_widset.keys():
self._add_wordinfo(wid, new_wid2w[wid], docid)
for wid in in_both_widset.keys():
# For the Okapi indexer, the "if" will trigger only for words
# whose counts have changed. For the cosine indexer, the "if"
# may trigger for every wid, since W(d) probably changed and
# W(d) is divided into every score.
newscore = new_wid2w[wid]
if old_wid2w[wid] != newscore:
self._add_wordinfo(wid, newscore, docid)
self._docweight[docid] = new_docw
self._docwords[docid] = widcode.encode(new_wids)
return len(new_wids)
# Subclass must override.
def _get_frequencies(self, wids):
# Compute term frequencies and a doc weight, whatever those mean
# to an indexer.
# Return pair:
# {wid0: w(d, wid0), wid1: w(d, wid1), ...],
# docweight
# The wid->weight mappings are fed into _add_wordinfo, and docweight
# becomes the value of _docweight[docid].
raise NotImplementedError
def has_doc(self, docid):
return docid in self._docwords
# A subclass may wish to extend or override this.
def unindex_doc(self, docid):
if docid not in self._docwords:
return
for wid in self.family.IF.TreeSet(self.get_words(docid)).keys():
self._del_wordinfo(wid, docid)
del self._docwords[docid]
del self._docweight[docid]
try:
self.documentCount.change(-1)
except AttributeError:
# upgrade documentCount to Length object
self.documentCount = Length.Length(len(self._docweight))
def search(self, term):
wids = self._lexicon.termToWordIds(term)
if not wids:
return None # All docs match
wids = self._remove_oov_wids(wids)
return mass_weightedUnion(self._search_wids(wids), self.family)
def search_glob(self, pattern):
wids = self._lexicon.globToWordIds(pattern)
wids = self._remove_oov_wids(wids)
return mass_weightedUnion(self._search_wids(wids), self.family)
def search_phrase(self, phrase):
wids = self._lexicon.termToWordIds(phrase)
cleaned_wids = self._remove_oov_wids(wids)
if len(wids) != len(cleaned_wids):
# At least one wid was OOV: can't possibly find it.
return self.family.IF.BTree()
scores = self._search_wids(wids)
hits = mass_weightedIntersection(scores, self.family)
if not hits:
return hits
code = widcode.encode(wids)
result = self.family.IF.BTree()
for docid, weight in hits.items():
docwords = self._docwords[docid]
if docwords.find(code) >= 0:
result[docid] = weight
return result
def _remove_oov_wids(self, wids):
return list(filter(self._wordinfo.has_key, wids))
# Subclass must override.
# The workhorse. Return a list of (IFBucket, weight) pairs, one pair
# for each wid t in wids. The IFBucket, times the weight, maps D to
# TF(D,t) * IDF(t) for every docid D containing t. wids must not
# contain any OOV words.
def _search_wids(self, wids):
raise NotImplementedError
# Subclass must override.
# It's not clear what it should do. It must return an upper bound on
# document scores for the query. It would be nice if a document score
# divided by the query's query_weight gave the proabability that a
# document was relevant, but nobody knows how to do that. For
# CosineIndex, the ratio is the cosine of the angle between the document
# and query vectors. For OkapiIndex, the ratio is a (probably
# unachievable) upper bound with no "intuitive meaning" beyond that.
def query_weight(self, terms):
raise NotImplementedError
DICT_CUTOFF = 10
def _add_wordinfo(self, wid, f, docid):
# Store a wordinfo in a dict as long as there are less than
# DICT_CUTOFF docids in the dict. Otherwise use an IFBTree.
# The pickle of a dict is smaller than the pickle of an
# IFBTree, substantially so for small mappings. Thus, we use
# a dictionary until the mapping reaches DICT_CUTOFF elements.
# The cutoff is chosen based on the implementation
# characteristics of Python dictionaries. The dict hashtable
# always has 2**N slots and is resized whenever it is 2/3s
# full. A pickled dict with 10 elts is half the size of an
# IFBTree with 10 elts, and 10 happens to be 2/3s of 2**4. So
# choose 10 as the cutoff for now.
# The IFBTree has a smaller in-memory representation than a
# dictionary, so pickle size isn't the only consideration when
# choosing the threshold. The pickle of a 500-elt dict is 92%
# of the size of the same IFBTree, but the dict uses more
# space when it is live in memory. An IFBTree stores two C
# arrays of ints, one for the keys and one for the values. It
# holds up to 120 key-value pairs in a single bucket.
doc2score = self._wordinfo.get(wid)
if doc2score is None:
doc2score = {} # XXX Holy ConflictError, Batman!
try:
self.wordCount.change(1)
except AttributeError:
# upgrade wordCount to Length object
self.wordCount = Length.Length(len(self._wordinfo))
self.wordCount.change(1)
else:
# _add_wordinfo() is called for each update. If the map
# size exceeds the DICT_CUTOFF, convert to an IFBTree.
# Obscure: First check the type. If it's not a dict, it
# can't need conversion, and then we can avoid an expensive
# len(IFBTree).
if (isinstance(doc2score, type({})) and
len(doc2score) == self.DICT_CUTOFF):
doc2score = self.family.IF.BTree(doc2score)
doc2score[docid] = f
self._wordinfo[wid] = doc2score # not redundant: Persistency!
# self._mass_add_wordinfo(wid2weight, docid)
#
# is the same as
#
# for wid, weight in wid2weight.items():
# self._add_wordinfo(wid, weight, docid)
#
# except that _mass_add_wordinfo doesn't require so many function calls.
def _mass_add_wordinfo(self, wid2weight, docid):
dicttype = type({})
get_doc2score = self._wordinfo.get
new_word_count = 0
for wid, weight in wid2weight.items():
doc2score = get_doc2score(wid)
if doc2score is None:
doc2score = {}
new_word_count += 1
elif (isinstance(doc2score, dicttype) and
len(doc2score) == self.DICT_CUTOFF):
doc2score = self.family.IF.BTree(doc2score)
doc2score[docid] = weight
self._wordinfo[wid] = doc2score # not redundant: Persistency!
try:
self.wordCount.change(new_word_count)
except AttributeError:
# upgrade wordCount to Length object
self.wordCount = Length.Length(len(self._wordinfo))
def _del_wordinfo(self, wid, docid):
doc2score = self._wordinfo[wid]
del doc2score[docid]
if doc2score:
self._wordinfo[wid] = doc2score # not redundant: Persistency!
else:
del self._wordinfo[wid]
try:
self.wordCount.change(-1)
except AttributeError:
# upgrade wordCount to Length object
self.wordCount = Length.Length(len(self._wordinfo))
def inverse_doc_frequency(term_count, num_items):
"""Return the inverse doc frequency for a term,
that appears in term_count items in a collection with num_items
total items.
"""
# implements IDF(q, t) = log(1 + N/f(t))
return math.log(1.0 + float(num_items) / term_count) | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/text/baseindex.py | baseindex.py |
"""Generic parser support: exception and parse tree nodes.
"""
from zope.interface import implementer
from zope.index.text.interfaces import IQueryParseTree
from zope.index.text.setops import mass_weightedIntersection
from zope.index.text.setops import mass_weightedUnion
class QueryError(Exception):
pass
class ParseError(Exception):
pass
@implementer(IQueryParseTree)
class ParseTreeNode:
_nodeType = None
def __init__(self, value):
self._value = value
def nodeType(self):
return self._nodeType
def getValue(self):
return self._value
def __repr__(self):
return "{}({!r})".format(self.__class__.__name__, self.getValue())
def terms(self):
t = []
for v in self.getValue():
t.extend(v.terms())
return t
def executeQuery(self, index):
raise NotImplementedError
class NotNode(ParseTreeNode):
_nodeType = "NOT"
def terms(self):
return []
def executeQuery(self, index):
raise QueryError("NOT parse tree node cannot be executed directly")
class AndNode(ParseTreeNode):
_nodeType = "AND"
def executeQuery(self, index):
L = []
Nots = []
for subnode in self.getValue():
if subnode.nodeType() == "NOT":
r = subnode.getValue().executeQuery(index)
# If None, technically it matches every doc, but we treat
# it as if it matched none (we want
# real_word AND NOT stop_word
# to act like plain real_word).
if r is not None:
Nots.append((r, 1))
else:
r = subnode.executeQuery(index)
# If None, technically it matches every doc, so needn't be
# included.
if r is not None:
L.append((r, 1))
set = mass_weightedIntersection(L, index.family)
if Nots:
notset = mass_weightedUnion(Nots, index.family)
set = index.family.IF.difference(set, notset)
return set
class OrNode(ParseTreeNode):
_nodeType = "OR"
def executeQuery(self, index):
weighted = []
for node in self.getValue():
r = node.executeQuery(index)
# If None, technically it matches every doc, but we treat
# it as if it matched none (we want
# real_word OR stop_word
# to act like plain real_word).
if r is not None:
weighted.append((r, 1))
return mass_weightedUnion(weighted, index.family)
class AtomNode(ParseTreeNode):
_nodeType = "ATOM"
def terms(self):
return [self.getValue()]
def executeQuery(self, index):
return index.search(self.getValue())
class PhraseNode(AtomNode):
_nodeType = "PHRASE"
def executeQuery(self, index):
return index.search_phrase(self.getValue())
class GlobNode(AtomNode):
_nodeType = "GLOB"
def executeQuery(self, index):
return index.search_glob(self.getValue()) | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/text/parsetree.py | parsetree.py |
"""Text index.
"""
from persistent import Persistent
from zope.interface import implementer
from zope.index.interfaces import IIndexSearch
from zope.index.interfaces import IInjection
from zope.index.interfaces import IStatistics
from zope.index.text.lexicon import CaseNormalizer
from zope.index.text.lexicon import Lexicon
from zope.index.text.lexicon import Splitter
from zope.index.text.lexicon import StopWordRemover
from zope.index.text.okapiindex import OkapiIndex
from zope.index.text.queryparser import QueryParser
@implementer(IInjection, IIndexSearch, IStatistics)
class TextIndex(Persistent):
"""
Text index.
Implements :class:`zope.index.interfaces.IInjection` and
:class:`zope.index.interfaces.IIndexSearch`.
"""
def __init__(self, lexicon=None, index=None):
"""Provisional constructor.
This creates the lexicon and index if not passed in.
"""
_explicit_lexicon = True
if lexicon is None:
_explicit_lexicon = False
lexicon = Lexicon(Splitter(), CaseNormalizer(), StopWordRemover())
if index is None:
index = OkapiIndex(lexicon)
self.lexicon = _explicit_lexicon and lexicon or index.lexicon
self.index = index
def index_doc(self, docid, text):
self.index.index_doc(docid, text)
def unindex_doc(self, docid):
self.index.unindex_doc(docid)
def clear(self):
self.index.clear()
def documentCount(self):
"""Return the number of documents in the index."""
return self.index.documentCount()
def wordCount(self):
"""Return the number of words in the index."""
return self.index.wordCount()
def apply(self, querytext, start=0, count=None):
parser = QueryParser(self.lexicon)
tree = parser.parseQuery(querytext)
results = tree.executeQuery(self.index)
if results:
qw = self.index.query_weight(tree.terms())
# Hack to avoid ZeroDivisionError
if qw == 0:
qw = 1.0
qw *= 1.0
for docid, score in results.items():
try:
results[docid] = score / qw
except TypeError:
# We overflowed the score, perhaps wildly unlikely.
# Who knows.
results[docid] = 2**64 // 10
return results | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/text/textindex.py | textindex.py |
"""Text-indexing interfaces
"""
from zope.interface import Attribute
from zope.interface import Interface
class ILexicon(Interface):
"""Object responsible for converting text to word identifiers."""
def termToWordIds(text):
"""Return a sequence of ids of the words parsed from the text.
The input text may be either a string or a list of strings.
Parse the text as if they are search terms, and skips words
that aren't in the lexicon.
"""
def sourceToWordIds(text):
"""Return a sequence of ids of the words parsed from the text.
The input text may be either a string or a list of strings.
Parse the text as if they come from a source document, and
creates new word ids for words that aren't (yet) in the
lexicon.
"""
def globToWordIds(pattern):
"""Return a sequence of ids of words matching the pattern.
The argument should be a single word using globbing syntax,
e.g. 'foo*' meaning anything starting with 'foo'.
Return the wids for all words in the lexicon that match the
pattern.
"""
def wordCount():
"""Return the number of unique terms in the lexicon."""
def get_word(wid):
"""Return the word for the given word id.
Raise KeyError if the word id is not in the lexicon.
"""
def get_wid(word):
"""Return the wird id for the given word.
Return 0 of the word is not in the lexicon.
"""
def parseTerms(text):
"""Pass the text through the pipeline.
Return a list of words, normalized by the pipeline
(e.g. stopwords removed, case normalized etc.).
"""
def isGlob(word):
"""Return true if the word is a globbing pattern.
The word should be one of the words returned by parseTerms().
"""
class ILexiconBasedIndex(Interface):
""" Interface for indexes which hold a lexicon."""
lexicon = Attribute('Lexicon used by the index.')
class IQueryParser(Interface):
"""Interface for Query Parsers."""
def parseQuery(query):
"""Parse a query string.
Return a parse tree (which implements IQueryParseTree).
Some of the query terms may be ignored because they are
stopwords; use getIgnored() to find out which terms were
ignored. But if the entire query consists only of stop words,
or of stopwords and one or more negated terms, an exception is
raised.
May raise ParseTree.ParseError.
"""
def getIgnored():
"""Return the list of ignored terms.
Return the list of terms that were ignored by the most recent
call to parseQuery() because they were stopwords.
If parseQuery() was never called this returns None.
"""
def parseQueryEx(query):
"""Parse a query string.
Return a tuple (tree, ignored) where 'tree' is the parse tree
as returned by parseQuery(), and 'ignored' is a list of
ignored terms as returned by getIgnored().
May raise ParseTree.ParseError.
"""
class IQueryParseTree(Interface):
"""Interface for parse trees returned by parseQuery()."""
def nodeType():
"""Return the node type.
This is one of 'AND', 'OR', 'NOT', 'ATOM', 'PHRASE' or 'GLOB'.
"""
def getValue():
"""Return a node-type specific value.
For node type: Return:
'AND' a list of parse trees
'OR' a list of parse trees
'NOT' a parse tree
'ATOM' a string (representing a single search term)
'PHRASE' a string (representing a search phrase)
'GLOB' a string (representing a pattern, e.g. "foo*")
"""
def terms():
"""Return a list of all terms in this node, excluding NOT subtrees."""
def executeQuery(index):
"""Execute the query represented by this node against the index.
The index argument must implement the IIndex interface.
Return an IFBucket or IFBTree mapping document ids to scores
(higher scores mean better results).
May raise ParseTree.QueryError.
"""
class ISearchableText(Interface):
"""Interface that text-indexable objects should implement."""
def getSearchableText():
"""Return a sequence of unicode strings to be indexed.
Each unicode string in the returned sequence will be run
through the splitter pipeline; the combined stream of words
coming out of the pipeline will be indexed.
returning None indicates the object should not be indexed
"""
class IPipelineElement(Interface):
""" An element in a lexicon's processing pipeline.
"""
def process(terms):
""" Transform each term in terms.
Return the sequence of transformed terms.
"""
class ISplitter(IPipelineElement):
""" Split text into a sequence of words.
"""
def processGlob(terms):
""" Transform terms, leaving globbing markers in place.
"""
class IExtendedQuerying(Interface):
"""An index that supports advanced search setups."""
def search(term):
"""Execute a search on a single term given as a string.
Return an IFBTree mapping docid to score, or None if all docs
match due to the lexicon returning no wids for the term (e.g.,
if the term is entirely composed of stopwords).
"""
def search_phrase(phrase):
"""Execute a search on a phrase given as a string.
Return an IFBtree mapping docid to score.
"""
def search_glob(pattern):
"""Execute a pattern search.
The pattern represents a set of words by using * and ?. For
example, "foo*" represents the set of all words in the lexicon
starting with "foo".
Return an IFBTree mapping docid to score.
"""
def query_weight(terms):
"""Return the weight for a set of query terms.
'terms' is a sequence of all terms included in the query,
although not terms with a not. If a term appears more than
once in a query, it should appear more than once in terms.
Nothing is defined about what "weight" means, beyond that the
result is an upper bound on document scores returned for the
query.
""" | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/text/interfaces.py | interfaces.py |
import re
from sys import intern
from zope.interface import implementer
from zope.index.text import parsetree
from zope.index.text.interfaces import IQueryParser
# Create unique symbols for token types.
_AND = intern("AND")
_OR = intern("OR")
_NOT = intern("NOT")
_LPAREN = intern("(")
_RPAREN = intern(")")
_ATOM = intern("ATOM")
_EOF = intern("EOF")
# Map keyword string to token type.
_keywords = {
_AND: _AND,
_OR: _OR,
_NOT: _NOT,
_LPAREN: _LPAREN,
_RPAREN: _RPAREN,
}
# Regular expression to tokenize.
_tokenizer_regex = re.compile(r"""
# a paren
[()]
# or an optional hyphen
| -?
# followed by
(?:
# a string inside double quotes (and not containing these)
" [^"]* "
# or a non-empty stretch w/o whitespace, parens or double quotes
| [^()\s"]+
)
""", re.VERBOSE)
@implementer(IQueryParser)
class QueryParser:
"""
Implementation of
:class:`zope.index.text.interfaces.IQueryParser`.
This class is not thread-safe; each thread should have its own
instance.
"""
def __init__(self, lexicon):
self._lexicon = lexicon
self._ignored = None
# Public API methods
def parseQuery(self, query):
# Lexical analysis.
tokens = _tokenizer_regex.findall(query)
self._tokens = tokens
# classify tokens
self._tokentypes = [_keywords.get(token.upper(), _ATOM)
for token in tokens]
# add _EOF
self._tokens.append(_EOF)
self._tokentypes.append(_EOF)
self._index = 0
# Syntactical analysis.
self._ignored = [] # Ignored words in the query, for parseQueryEx
tree = self._parseOrExpr()
self._require(_EOF)
if tree is None:
raise parsetree.ParseError(
"Query contains only common words: %s" % repr(query))
return tree
def getIgnored(self):
return self._ignored
def parseQueryEx(self, query):
tree = self.parseQuery(query)
ignored = self.getIgnored()
return tree, ignored
# Recursive descent parser
def _require(self, tokentype):
if not self._check(tokentype):
t = self._tokens[self._index]
msg = "Token {!r} required, {!r} found".format(tokentype, t)
raise parsetree.ParseError(msg)
def _check(self, tokentype):
if self._tokentypes[self._index] is tokentype:
self._index += 1
return 1
else:
return 0
def _peek(self, tokentype):
return self._tokentypes[self._index] is tokentype
def _get(self, tokentype):
t = self._tokens[self._index]
self._require(tokentype)
return t
def _parseOrExpr(self):
L = []
L.append(self._parseAndExpr())
while self._check(_OR):
L.append(self._parseAndExpr())
L = list(filter(None, L))
if not L:
return None # Only stopwords
elif len(L) == 1:
return L[0]
else:
return parsetree.OrNode(L)
def _parseAndExpr(self):
L = []
t = self._parseTerm()
if t is not None:
L.append(t)
Nots = []
while True:
if self._check(_AND):
t = self._parseNotExpr()
if t is None:
continue
if isinstance(t, parsetree.NotNode):
Nots.append(t)
else:
L.append(t)
elif self._check(_NOT):
t = self._parseTerm()
if t is None:
continue # Only stopwords
Nots.append(parsetree.NotNode(t))
else:
break
if not L:
return None # Only stopwords
L.extend(Nots)
if len(L) == 1:
return L[0]
else:
return parsetree.AndNode(L)
def _parseNotExpr(self):
if self._check(_NOT):
t = self._parseTerm()
if t is None:
return None # Only stopwords
return parsetree.NotNode(t)
else:
return self._parseTerm()
def _parseTerm(self):
if self._check(_LPAREN):
tree = self._parseOrExpr()
self._require(_RPAREN)
else:
nodes = []
nodes = [self._parseAtom()]
while self._peek(_ATOM):
nodes.append(self._parseAtom())
nodes = list(filter(None, nodes))
if not nodes:
return None # Only stopwords
structure = sorted(
[(isinstance(nodes[i], parsetree.NotNode), i, nodes[i])
for i in range(len(nodes))])
nodes = [node for (bit, index, node) in structure]
if isinstance(nodes[0], parsetree.NotNode):
raise parsetree.ParseError(
"a term must have at least one positive word")
if len(nodes) == 1:
return nodes[0]
tree = parsetree.AndNode(nodes)
return tree
def _parseAtom(self):
term = self._get(_ATOM)
words = self._lexicon.parseTerms(term)
if not words:
self._ignored.append(term)
return None
if len(words) > 1:
tree = parsetree.PhraseNode(words)
elif self._lexicon.isGlob(words[0]):
tree = parsetree.GlobNode(words[0])
else:
tree = parsetree.AtomNode(words[0])
if term[0] == "-":
tree = parsetree.NotNode(tree)
return tree | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/text/queryparser.py | queryparser.py |
import os
import platform
from BTrees.Length import Length
from zope.index.text.baseindex import BaseIndex
from zope.index.text.baseindex import inverse_doc_frequency
_py_impl = getattr(platform, 'python_implementation', lambda: None)
_is_pypy = _py_impl() == 'PyPy'
PURE_PYTHON = os.environ.get('PURE_PYTHON') or _is_pypy
try:
from zope.index.text.okascore import score
except ImportError: # pragma: no cover
score = None
score = None if PURE_PYTHON else score
class OkapiIndex(BaseIndex):
"""
Full text index with relevance ranking, using an Okapi BM25 rank.
"""
# BM25 free parameters.
K1 = 1.2
B = 0.75
assert K1 >= 0.0
assert 0.0 <= B <= 1.0
def __init__(self, lexicon, family=None):
BaseIndex.__init__(self, lexicon, family=family)
# ._wordinfo for Okapi is
# wid -> {docid -> frequency}; t -> D -> f(D, t)
# ._docweight for Okapi is
# docid -> # of words in the doc
# This is just len(self._docwords[docid]), but _docwords is stored
# in compressed form, so uncompressing it just to count the list
# length would be ridiculously expensive.
# sum(self._docweight.values()), the total # of words in all docs
# This is a long for "better safe than sorry" reasons. It isn't
# used often enough that speed should matter.
self._totaldoclen = Length(0)
def index_doc(self, docid, text):
count = BaseIndex.index_doc(self, docid, text)
self._change_doc_len(count)
return count
def _reindex_doc(self, docid, text):
self._change_doc_len(-self._docweight[docid])
return BaseIndex._reindex_doc(self, docid, text)
def unindex_doc(self, docid):
if docid not in self._docwords:
return
self._change_doc_len(-self._docweight[docid])
BaseIndex.unindex_doc(self, docid)
def _change_doc_len(self, delta):
# Change total doc length used for scoring
delta = int(delta)
try:
self._totaldoclen.change(delta)
except AttributeError:
# Opportunistically upgrade _totaldoclen attribute to Length object
self._totaldoclen = Length(int(self._totaldoclen + delta))
# The workhorse. Return a list of (IFBucket, weight) pairs, one pair
# for each wid t in wids. The IFBucket, times the weight, maps D to
# TF(D,t) * IDF(t) for every docid D containing t.
# As currently written, the weights are always 1, and the IFBucket maps
# D to TF(D,t)*IDF(t) directly, where the product is computed as a float.
# NOTE: This may be overridden below, by a function that computes the
# same thing but with the inner scoring loop in C.
def _python_search_wids(self, wids):
if not wids:
return []
N = float(self.documentCount()) # total # of docs
try:
doclen = self._totaldoclen()
except TypeError:
# _totaldoclen has not yet been upgraded
doclen = self._totaldoclen
meandoclen = doclen / N
K1 = self.K1
B = self.B
K1_plus1 = K1 + 1.0
B_from1 = 1.0 - B
# f(D, t) * (k1 + 1)
# TF(D, t) = -------------------------------------------
# f(D, t) + k1 * ((1-b) + b*len(D)/E(len(D)))
L = []
docid2len = self._docweight
for t in wids:
d2f = self._wordinfo[t] # map {docid -> f(docid, t)}
idf = inverse_doc_frequency(len(d2f), N) # an unscaled float
result = self.family.IF.Bucket()
for docid, f in d2f.items():
lenweight = B_from1 + B * docid2len[docid] / meandoclen
tf = f * K1_plus1 / (f + K1 * lenweight)
result[docid] = tf * idf
L.append((result, 1))
return L
# Note about the above: the result is tf * idf. tf is
# small -- it can't be larger than k1+1 = 2.2. idf is
# formally unbounded, but is less than 14 for a term that
# appears in only 1 of a million documents. So the
# product is probably less than 32, or 5 bits before the
# radix point. If we did the scaled-int business on both
# of them, we'd be up to 25 bits. Add 64 of those and
# we'd be in overflow territory. That's pretty unlikely,
# so we *could* just store scaled_int(tf) in
# result[docid], and use scaled_int(idf) as an invariant
# weight across the whole result. But besides skating
# near the edge, it's not a speed cure, since the
# computation of tf would still be done at Python speed,
# and it's a lot more work than just multiplying by idf.
# The same function as _search_wids above, but with the inner scoring
# loop written in C (module okascore, function score()).
# Cautions: okascore hardcodes the values of K, B1, and the scaled_int
# function.
def _c_search_wids(self, wids):
if not wids:
return []
N = float(self.documentCount()) # total # of docs
try:
doclen = self._totaldoclen()
except TypeError:
# _totaldoclen has not yet been upgraded
doclen = self._totaldoclen
meandoclen = doclen / N
# K1 = self.K1
# B = self.B
# K1_plus1 = K1 + 1.0
# B_from1 = 1.0 - B
# f(D, t) * (k1 + 1)
# TF(D, t) = -------------------------------------------
# f(D, t) + k1 * ((1-b) + b*len(D)/E(len(D)))
L = []
docid2len = self._docweight
for t in wids:
d2f = self._wordinfo[t] # map {docid -> f(docid, t)}
idf = inverse_doc_frequency(len(d2f), N) # an unscaled float
result = self.family.IF.Bucket()
items = list(d2f.items())
score(result, items, docid2len, idf, meandoclen)
L.append((result, 1))
return L
_search_wids = _python_search_wids if score is None else _c_search_wids
def query_weight(self, terms):
# Get the wids.
wids = []
for term in terms:
termwids = self._lexicon.termToWordIds(term)
wids.extend(termwids)
# The max score for term t is the maximum value of
# TF(D, t) * IDF(Q, t)
# We can compute IDF directly, and as noted in the comments below
# TF(D, t) is bounded above by 1+K1.
N = float(len(self._docweight))
tfmax = 1.0 + self.K1
sum = 0
for t in self._remove_oov_wids(wids):
idf = inverse_doc_frequency(len(self._wordinfo[t]), N)
sum += idf * tfmax
return sum
def _get_frequencies(self, wids):
d = {}
dget = d.get
for wid in wids:
d[wid] = dget(wid, 0) + 1
return d, len(wids) | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/text/okapiindex.py | okapiindex.py |
"""Lexicon
"""
import re
from BTrees.IOBTree import IOBTree
from BTrees.Length import Length
from BTrees.OIBTree import OIBTree
from persistent import Persistent
from zope.interface import implementer
from zope.index.text.interfaces import ILexicon
from zope.index.text.interfaces import IPipelineElement
from zope.index.text.interfaces import ISplitter
from zope.index.text.parsetree import QueryError
from zope.index.text.stopdict import get_stopdict
@implementer(ILexicon)
class Lexicon(Persistent):
"""
Implementation of :class:`zope.index.text.interfaces.ILexicon`.
"""
def __init__(self, *pipeline):
self._wids = OIBTree() # word -> wid
self._words = IOBTree() # wid -> word
# wid 0 is reserved for words that aren't in the lexicon (OOV -- out
# of vocabulary). This can happen, e.g., if a query contains a word
# we never saw before, and that isn't a known stopword (or otherwise
# filtered out). Returning a special wid value for OOV words is a
# way to let clients know when an OOV word appears.
self.wordCount = Length()
self._pipeline = pipeline
def wordCount(self):
"""Return the number of unique terms in the lexicon."""
# overridden per instance
return len(self._wids)
def words(self):
return self._wids.keys()
def wids(self):
return self._words.keys()
def items(self):
return self._wids.items()
def sourceToWordIds(self, text):
if text is None:
text = ''
last = _text2list(text)
for element in self._pipeline:
last = element.process(last)
if not isinstance(self.wordCount, Length):
# Make sure wordCount is overridden with a BTrees.Length.Length
self.wordCount = Length(self.wordCount())
# Strategically unload the length value so that we get the most
# recent value written to the database to minimize conflicting wids
# Because length is independent, this will load the most
# recent value stored, regardless of whether MVCC is enabled
self.wordCount._p_deactivate()
return list(map(self._getWordIdCreate, last))
def termToWordIds(self, text):
last = _text2list(text)
for element in self._pipeline:
last = element.process(last)
wids = []
for word in last:
wids.append(self._wids.get(word, 0))
return wids
def parseTerms(self, text):
last = _text2list(text)
for element in self._pipeline:
process = getattr(element, "processGlob", element.process)
last = process(last)
return last
def isGlob(self, word):
return "*" in word or "?" in word
def get_word(self, wid):
return self._words[wid]
def get_wid(self, word):
return self._wids.get(word, 0)
def globToWordIds(self, pattern):
# Implement * and ? just as in the shell, except the pattern
# must not start with either of these
prefix = ""
while pattern and pattern[0] not in "*?":
prefix += pattern[0]
pattern = pattern[1:]
if not pattern:
# There were no globbing characters in the pattern
wid = self._wids.get(prefix, 0)
if wid:
return [wid]
else:
return []
if not prefix:
# The pattern starts with a globbing character.
# This is too efficient, so we raise an exception.
raise QueryError(
"pattern %r shouldn't start with glob character" % pattern)
pat = prefix
for c in pattern:
if c == "*":
pat += ".*"
elif c == "?":
pat += "."
else:
pat += re.escape(c)
pat += "$"
prog = re.compile(pat)
keys = self._wids.keys(prefix) # Keys starting at prefix
wids = []
for key in keys:
if not key.startswith(prefix):
break
if prog.match(key):
wids.append(self._wids[key])
return wids
def _getWordIdCreate(self, word):
wid = self._wids.get(word)
if wid is None:
wid = self._new_wid()
self._wids[word] = wid
self._words[wid] = word
return wid
def _new_wid(self):
count = self.wordCount
count.change(1)
while count() in self._words:
# just to be safe
count.change(1)
return count()
def _text2list(text):
# Helper: splitter input may be a string or a list of strings
try:
text + ""
except BaseException:
return text
else:
return [text]
# Sample pipeline elements
@implementer(ISplitter)
class Splitter:
"""
A simple :class:`zope.index.text.interfaces.ISplitter`.
"""
rx = re.compile(r"(?u)\w+")
rxGlob = re.compile(r"(?u)\w+[\w*?]*") # See globToWordIds() above
def process(self, lst):
result = []
for s in lst:
result += self.rx.findall(s)
return result
def processGlob(self, lst):
result = []
for s in lst:
result += self.rxGlob.findall(s)
return result
@implementer(IPipelineElement)
class CaseNormalizer:
"""
A simple :class:`zope.index.text.interfaces.IPipelineElement`
to normalize to lower case.
"""
def process(self, lst):
return [w.lower() for w in lst]
@implementer(IPipelineElement)
class StopWordRemover:
"""
A simple :class:`zope.index.text.interfaces.IPipelineElement`
to remove stop words.
.. seealso:: :func:`.get_stopdict`
"""
dict = get_stopdict().copy()
def process(self, lst):
return [w for w in lst if w not in self.dict]
class StopWordAndSingleCharRemover(StopWordRemover):
"""
A simple :class:`zope.index.text.interfaces.IPipelineElement`
to remove stop words and words of a single character.
"""
dict = get_stopdict().copy()
for c in range(255):
dict[chr(c)] = None | zope.index | /zope.index-6.0-cp38-cp38-macosx_11_0_arm64.whl/zope/index/text/lexicon.py | lexicon.py |
import zope.interface
__all__ = [
'asReStructuredText',
'asStructuredText',
]
def asStructuredText(I, munge=0, rst=False):
""" Output structured text format. Note, this will whack any existing
'structured' format of the text.
If `rst=True`, then the output will quote all code as inline literals in
accordance with 'reStructuredText' markup principles.
"""
if rst:
inline_literal = lambda s: "``{}``".format(s)
else:
inline_literal = lambda s: s
r = [inline_literal(I.getName())]
outp = r.append
level = 1
if I.getDoc():
outp(_justify_and_indent(_trim_doc_string(I.getDoc()), level))
bases = [base
for base in I.__bases__
if base is not zope.interface.Interface
]
if bases:
outp(_justify_and_indent("This interface extends:", level, munge))
level += 1
for b in bases:
item = "o %s" % inline_literal(b.getName())
outp(_justify_and_indent(_trim_doc_string(item), level, munge))
level -= 1
namesAndDescriptions = sorted(I.namesAndDescriptions())
outp(_justify_and_indent("Attributes:", level, munge))
level += 1
for name, desc in namesAndDescriptions:
if not hasattr(desc, 'getSignatureString'): # ugh...
item = "{} -- {}".format(inline_literal(desc.getName()),
desc.getDoc() or 'no documentation')
outp(_justify_and_indent(_trim_doc_string(item), level, munge))
level -= 1
outp(_justify_and_indent("Methods:", level, munge))
level += 1
for name, desc in namesAndDescriptions:
if hasattr(desc, 'getSignatureString'): # ugh...
_call = "{}{}".format(desc.getName(), desc.getSignatureString())
item = "{} -- {}".format(inline_literal(_call),
desc.getDoc() or 'no documentation')
outp(_justify_and_indent(_trim_doc_string(item), level, munge))
return "\n\n".join(r) + "\n\n"
def asReStructuredText(I, munge=0):
""" Output reStructuredText format. Note, this will whack any existing
'structured' format of the text."""
return asStructuredText(I, munge=munge, rst=True)
def _trim_doc_string(text):
""" Trims a doc string to make it format
correctly with structured text. """
lines = text.replace('\r\n', '\n').split('\n')
nlines = [lines.pop(0)]
if lines:
min_indent = min([len(line) - len(line.lstrip())
for line in lines])
for line in lines:
nlines.append(line[min_indent:])
return '\n'.join(nlines)
def _justify_and_indent(text, level, munge=0, width=72):
""" indent and justify text, rejustify (munge) if specified """
indent = " " * level
if munge:
lines = []
line = indent
text = text.split()
for word in text:
line = ' '.join([line, word])
if len(line) > width:
lines.append(line)
line = indent
else:
lines.append(line)
return '\n'.join(lines)
else:
return indent + \
text.strip().replace("\r\n", "\n") .replace("\n", "\n" + indent) | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/document.py | document.py |
"""Interface Package Interfaces
"""
__docformat__ = 'restructuredtext'
from zope.interface.interface import Attribute
from zope.interface.interface import Interface
from zope.interface.declarations import implementer
__all__ = [
'ComponentLookupError',
'IAdapterRegistration',
'IAdapterRegistry',
'IAttribute',
'IComponentLookup',
'IComponentRegistry',
'IComponents',
'IDeclaration',
'IElement',
'IHandlerRegistration',
'IInterface',
'IInterfaceDeclaration',
'IMethod',
'Invalid',
'IObjectEvent',
'IRegistered',
'IRegistration',
'IRegistrationEvent',
'ISpecification',
'ISubscriptionAdapterRegistration',
'IUnregistered',
'IUtilityRegistration',
'ObjectEvent',
'Registered',
'Unregistered',
]
# pylint:disable=inherit-non-class,no-method-argument,no-self-argument
# pylint:disable=unexpected-special-method-signature
# pylint:disable=too-many-lines
class IElement(Interface):
"""
Objects that have basic documentation and tagged values.
Known derivatives include :class:`IAttribute` and its derivative
:class:`IMethod`; these have no notion of inheritance.
:class:`IInterface` is also a derivative, and it does have a
notion of inheritance, expressed through its ``__bases__`` and
ordered in its ``__iro__`` (both defined by
:class:`ISpecification`).
"""
# pylint:disable=arguments-differ
# Note that defining __doc__ as an Attribute hides the docstring
# from introspection. When changing it, also change it in the Sphinx
# ReST files.
__name__ = Attribute('__name__', 'The object name')
__doc__ = Attribute('__doc__', 'The object doc string')
###
# Tagged values.
#
# Direct values are established in this instance. Others may be
# inherited. Although ``IElement`` itself doesn't have a notion of
# inheritance, ``IInterface`` *does*. It might have been better to
# make ``IInterface`` define new methods
# ``getIndirectTaggedValue``, etc, to include inheritance instead
# of overriding ``getTaggedValue`` to do that, but that ship has sailed.
# So to keep things nice and symmetric, we define the ``Direct`` methods here.
###
def getTaggedValue(tag):
"""Returns the value associated with *tag*.
Raise a `KeyError` if the tag isn't set.
If the object has a notion of inheritance, this searches
through the inheritance hierarchy and returns the nearest result.
If there is no such notion, this looks only at this object.
.. versionchanged:: 4.7.0
This method should respect inheritance if present.
"""
def queryTaggedValue(tag, default=None):
"""
As for `getTaggedValue`, but instead of raising a `KeyError`, returns *default*.
.. versionchanged:: 4.7.0
This method should respect inheritance if present.
"""
def getTaggedValueTags():
"""
Returns a collection of all tags in no particular order.
If the object has a notion of inheritance, this
includes all the inherited tagged values. If there is
no such notion, this looks only at this object.
.. versionchanged:: 4.7.0
This method should respect inheritance if present.
"""
def setTaggedValue(tag, value):
"""
Associates *value* with *key* directly in this object.
"""
def getDirectTaggedValue(tag):
"""
As for `getTaggedValue`, but never includes inheritance.
.. versionadded:: 5.0.0
"""
def queryDirectTaggedValue(tag, default=None):
"""
As for `queryTaggedValue`, but never includes inheritance.
.. versionadded:: 5.0.0
"""
def getDirectTaggedValueTags():
"""
As for `getTaggedValueTags`, but includes only tags directly
set on this object.
.. versionadded:: 5.0.0
"""
class IAttribute(IElement):
"""Attribute descriptors"""
interface = Attribute('interface',
'Stores the interface instance in which the '
'attribute is located.')
class IMethod(IAttribute):
"""Method attributes"""
def getSignatureInfo():
"""Returns the signature information.
This method returns a dictionary with the following string keys:
- positional
A sequence of the names of positional arguments.
- required
A sequence of the names of required arguments.
- optional
A dictionary mapping argument names to their default values.
- varargs
The name of the varargs argument (or None).
- kwargs
The name of the kwargs argument (or None).
"""
def getSignatureString():
"""Return a signature string suitable for inclusion in documentation.
This method returns the function signature string. For example, if you
have ``def func(a, b, c=1, d='f')``, then the signature string is ``"(a, b,
c=1, d='f')"``.
"""
class ISpecification(Interface):
"""Object Behavioral specifications"""
# pylint:disable=arguments-differ
def providedBy(object): # pylint:disable=redefined-builtin
"""Test whether the interface is implemented by the object
Return true of the object asserts that it implements the
interface, including asserting that it implements an extended
interface.
"""
def implementedBy(class_):
"""Test whether the interface is implemented by instances of the class
Return true of the class asserts that its instances implement the
interface, including asserting that they implement an extended
interface.
"""
def isOrExtends(other):
"""Test whether the specification is or extends another
"""
def extends(other, strict=True):
"""Test whether a specification extends another
The specification extends other if it has other as a base
interface or if one of it's bases extends other.
If strict is false, then the specification extends itself.
"""
def weakref(callback=None):
"""Return a weakref to the specification
This method is, regrettably, needed to allow weakrefs to be
computed to security-proxied specifications. While the
zope.interface package does not require zope.security or
zope.proxy, it has to be able to coexist with it.
"""
__bases__ = Attribute("""Base specifications
A tuple of specifications from which this specification is
directly derived.
""")
__sro__ = Attribute("""Specification-resolution order
A tuple of the specification and all of it's ancestor
specifications from most specific to least specific. The specification
itself is the first element.
(This is similar to the method-resolution order for new-style classes.)
""")
__iro__ = Attribute("""Interface-resolution order
A tuple of the specification's ancestor interfaces from
most specific to least specific. The specification itself is
included if it is an interface.
(This is similar to the method-resolution order for new-style classes.)
""")
def get(name, default=None):
"""Look up the description for a name
If the named attribute is not defined, the default is
returned.
"""
class IInterface(ISpecification, IElement):
"""Interface objects
Interface objects describe the behavior of an object by containing
useful information about the object. This information includes:
- Prose documentation about the object. In Python terms, this
is called the "doc string" of the interface. In this element,
you describe how the object works in prose language and any
other useful information about the object.
- Descriptions of attributes. Attribute descriptions include
the name of the attribute and prose documentation describing
the attributes usage.
- Descriptions of methods. Method descriptions can include:
- Prose "doc string" documentation about the method and its
usage.
- A description of the methods arguments; how many arguments
are expected, optional arguments and their default values,
the position or arguments in the signature, whether the
method accepts arbitrary arguments and whether the method
accepts arbitrary keyword arguments.
- Optional tagged data. Interface objects (and their attributes and
methods) can have optional, application specific tagged data
associated with them. Examples uses for this are examples,
security assertions, pre/post conditions, and other possible
information you may want to associate with an Interface or its
attributes.
Not all of this information is mandatory. For example, you may
only want the methods of your interface to have prose
documentation and not describe the arguments of the method in
exact detail. Interface objects are flexible and let you give or
take any of these components.
Interfaces are created with the Python class statement using
either `zope.interface.Interface` or another interface, as in::
from zope.interface import Interface
class IMyInterface(Interface):
'''Interface documentation'''
def meth(arg1, arg2):
'''Documentation for meth'''
# Note that there is no self argument
class IMySubInterface(IMyInterface):
'''Interface documentation'''
def meth2():
'''Documentation for meth2'''
You use interfaces in two ways:
- You assert that your object implement the interfaces.
There are several ways that you can declare that an object
provides an interface:
1. Call `zope.interface.implementer` on your class definition.
2. Call `zope.interface.directlyProvides` on your object.
3. Call `zope.interface.classImplements` to declare that instances
of a class implement an interface.
For example::
from zope.interface import classImplements
classImplements(some_class, some_interface)
This approach is useful when it is not an option to modify
the class source. Note that this doesn't affect what the
class itself implements, but only what its instances
implement.
- You query interface meta-data. See the IInterface methods and
attributes for details.
"""
# pylint:disable=arguments-differ
def names(all=False): # pylint:disable=redefined-builtin
"""Get the interface attribute names
Return a collection of the names of the attributes, including
methods, included in the interface definition.
Normally, only directly defined attributes are included. If
a true positional or keyword argument is given, then
attributes defined by base classes will be included.
"""
def namesAndDescriptions(all=False): # pylint:disable=redefined-builtin
"""Get the interface attribute names and descriptions
Return a collection of the names and descriptions of the
attributes, including methods, as name-value pairs, included
in the interface definition.
Normally, only directly defined attributes are included. If
a true positional or keyword argument is given, then
attributes defined by base classes will be included.
"""
def __getitem__(name):
"""Get the description for a name
If the named attribute is not defined, a `KeyError` is raised.
"""
def direct(name):
"""Get the description for the name if it was defined by the interface
If the interface doesn't define the name, returns None.
"""
def validateInvariants(obj, errors=None):
"""Validate invariants
Validate object to defined invariants. If errors is None,
raises first Invalid error; if errors is a list, appends all errors
to list, then raises Invalid with the errors as the first element
of the "args" tuple."""
def __contains__(name):
"""Test whether the name is defined by the interface"""
def __iter__():
"""Return an iterator over the names defined by the interface
The names iterated include all of the names defined by the
interface directly and indirectly by base interfaces.
"""
__module__ = Attribute("""The name of the module defining the interface""")
class IDeclaration(ISpecification):
"""Interface declaration
Declarations are used to express the interfaces implemented by
classes or provided by objects.
"""
def __contains__(interface):
"""Test whether an interface is in the specification
Return true if the given interface is one of the interfaces in
the specification and false otherwise.
"""
def __iter__():
"""Return an iterator for the interfaces in the specification
"""
def flattened():
"""Return an iterator of all included and extended interfaces
An iterator is returned for all interfaces either included in
or extended by interfaces included in the specifications
without duplicates. The interfaces are in "interface
resolution order". The interface resolution order is such that
base interfaces are listed after interfaces that extend them
and, otherwise, interfaces are included in the order that they
were defined in the specification.
"""
def __sub__(interfaces):
"""Create an interface specification with some interfaces excluded
The argument can be an interface or an interface
specifications. The interface or interfaces given in a
specification are subtracted from the interface specification.
Removing an interface that is not in the specification does
not raise an error. Doing so has no effect.
Removing an interface also removes sub-interfaces of the interface.
"""
def __add__(interfaces):
"""Create an interface specification with some interfaces added
The argument can be an interface or an interface
specifications. The interface or interfaces given in a
specification are added to the interface specification.
Adding an interface that is already in the specification does
not raise an error. Doing so has no effect.
"""
def __nonzero__():
"""Return a true value of the interface specification is non-empty
"""
class IInterfaceDeclaration(Interface):
"""
Declare and check the interfaces of objects.
The functions defined in this interface are used to declare the
interfaces that objects provide and to query the interfaces that
have been declared.
Interfaces can be declared for objects in two ways:
- Interfaces are declared for instances of the object's class
- Interfaces are declared for the object directly.
The interfaces declared for an object are, therefore, the union of
interfaces declared for the object directly and the interfaces
declared for instances of the object's class.
Note that we say that a class implements the interfaces provided
by it's instances. An instance can also provide interfaces
directly. The interfaces provided by an object are the union of
the interfaces provided directly and the interfaces implemented by
the class.
This interface is implemented by :mod:`zope.interface`.
"""
# pylint:disable=arguments-differ
###
# Defining interfaces
###
Interface = Attribute("The base class used to create new interfaces")
def taggedValue(key, value):
"""
Attach a tagged value to an interface while defining the interface.
This is a way of executing :meth:`IElement.setTaggedValue` from
the definition of the interface. For example::
class IFoo(Interface):
taggedValue('key', 'value')
.. seealso:: `zope.interface.taggedValue`
"""
def invariant(checker_function):
"""
Attach an invariant checker function to an interface while defining it.
Invariants can later be validated against particular implementations by
calling :meth:`IInterface.validateInvariants`.
For example::
def check_range(ob):
if ob.max < ob.min:
raise ValueError("max value is less than min value")
class IRange(Interface):
min = Attribute("The min value")
max = Attribute("The max value")
invariant(check_range)
.. seealso:: `zope.interface.invariant`
"""
def interfacemethod(method):
"""
A decorator that transforms a method specification into an
implementation method.
This is used to override methods of ``Interface`` or provide new methods.
Definitions using this decorator will not appear in :meth:`IInterface.names()`.
It is possible to have an implementation method and a method specification
of the same name.
For example::
class IRange(Interface):
@interfacemethod
def __adapt__(self, obj):
if isinstance(obj, range):
# Return the builtin ``range`` as-is
return obj
return super(type(IRange), self).__adapt__(obj)
You can use ``super`` to call the parent class functionality. Note that
the zero-argument version (``super().__adapt__``) works on Python 3.6 and above, but
prior to that the two-argument version must be used, and the class must be explicitly
passed as the first argument.
.. versionadded:: 5.1.0
.. seealso:: `zope.interface.interfacemethod`
"""
###
# Querying interfaces
###
def providedBy(ob):
"""
Return the interfaces provided by an object.
This is the union of the interfaces directly provided by an
object and interfaces implemented by it's class.
The value returned is an `IDeclaration`.
.. seealso:: `zope.interface.providedBy`
"""
def implementedBy(class_):
"""
Return the interfaces implemented for a class's instances.
The value returned is an `IDeclaration`.
.. seealso:: `zope.interface.implementedBy`
"""
###
# Declaring interfaces
###
def classImplements(class_, *interfaces):
"""
Declare additional interfaces implemented for instances of a class.
The arguments after the class are one or more interfaces or
interface specifications (`IDeclaration` objects).
The interfaces given (including the interfaces in the
specifications) are added to any interfaces previously
declared.
Consider the following example::
class C(A, B):
...
classImplements(C, I1, I2)
Instances of ``C`` provide ``I1``, ``I2``, and whatever interfaces
instances of ``A`` and ``B`` provide. This is equivalent to::
@implementer(I1, I2)
class C(A, B):
pass
.. seealso:: `zope.interface.classImplements`
.. seealso:: `zope.interface.implementer`
"""
def classImplementsFirst(cls, interface):
"""
See :func:`zope.interface.classImplementsFirst`.
"""
def implementer(*interfaces):
"""
Create a decorator for declaring interfaces implemented by a
factory.
A callable is returned that makes an implements declaration on
objects passed to it.
.. seealso:: :meth:`classImplements`
"""
def classImplementsOnly(class_, *interfaces):
"""
Declare the only interfaces implemented by instances of a class.
The arguments after the class are one or more interfaces or
interface specifications (`IDeclaration` objects).
The interfaces given (including the interfaces in the
specifications) replace any previous declarations.
Consider the following example::
class C(A, B):
...
classImplements(C, IA, IB. IC)
classImplementsOnly(C. I1, I2)
Instances of ``C`` provide only ``I1``, ``I2``, and regardless of
whatever interfaces instances of ``A`` and ``B`` implement.
.. seealso:: `zope.interface.classImplementsOnly`
"""
def implementer_only(*interfaces):
"""
Create a decorator for declaring the only interfaces implemented.
A callable is returned that makes an implements declaration on
objects passed to it.
.. seealso:: `zope.interface.implementer_only`
"""
def directlyProvidedBy(object): # pylint:disable=redefined-builtin
"""
Return the interfaces directly provided by the given object.
The value returned is an `IDeclaration`.
.. seealso:: `zope.interface.directlyProvidedBy`
"""
def directlyProvides(object, *interfaces): # pylint:disable=redefined-builtin
"""
Declare interfaces declared directly for an object.
The arguments after the object are one or more interfaces or
interface specifications (`IDeclaration` objects).
.. caution::
The interfaces given (including the interfaces in the
specifications) *replace* interfaces previously
declared for the object. See :meth:`alsoProvides` to add
additional interfaces.
Consider the following example::
class C(A, B):
...
ob = C()
directlyProvides(ob, I1, I2)
The object, ``ob`` provides ``I1``, ``I2``, and whatever interfaces
instances have been declared for instances of ``C``.
To remove directly provided interfaces, use `directlyProvidedBy` and
subtract the unwanted interfaces. For example::
directlyProvides(ob, directlyProvidedBy(ob)-I2)
removes I2 from the interfaces directly provided by
``ob``. The object, ``ob`` no longer directly provides ``I2``,
although it might still provide ``I2`` if it's class
implements ``I2``.
To add directly provided interfaces, use `directlyProvidedBy` and
include additional interfaces. For example::
directlyProvides(ob, directlyProvidedBy(ob), I2)
adds I2 to the interfaces directly provided by ob.
.. seealso:: `zope.interface.directlyProvides`
"""
def alsoProvides(object, *interfaces): # pylint:disable=redefined-builtin
"""
Declare additional interfaces directly for an object.
For example::
alsoProvides(ob, I1)
is equivalent to::
directlyProvides(ob, directlyProvidedBy(ob), I1)
.. seealso:: `zope.interface.alsoProvides`
"""
def noLongerProvides(object, interface): # pylint:disable=redefined-builtin
"""
Remove an interface from the list of an object's directly provided
interfaces.
For example::
noLongerProvides(ob, I1)
is equivalent to::
directlyProvides(ob, directlyProvidedBy(ob) - I1)
with the exception that if ``I1`` is an interface that is
provided by ``ob`` through the class's implementation,
`ValueError` is raised.
.. seealso:: `zope.interface.noLongerProvides`
"""
def provider(*interfaces):
"""
Declare interfaces provided directly by a class.
.. seealso:: `zope.interface.provider`
"""
def moduleProvides(*interfaces):
"""
Declare interfaces provided by a module.
This function is used in a module definition.
The arguments are one or more interfaces or interface
specifications (`IDeclaration` objects).
The given interfaces (including the interfaces in the
specifications) are used to create the module's direct-object
interface specification. An error will be raised if the module
already has an interface specification. In other words, it is
an error to call this function more than once in a module
definition.
This function is provided for convenience. It provides a more
convenient way to call `directlyProvides` for a module. For example::
moduleImplements(I1)
is equivalent to::
directlyProvides(sys.modules[__name__], I1)
.. seealso:: `zope.interface.moduleProvides`
"""
def Declaration(*interfaces):
"""
Create an interface specification.
The arguments are one or more interfaces or interface
specifications (`IDeclaration` objects).
A new interface specification (`IDeclaration`) with the given
interfaces is returned.
.. seealso:: `zope.interface.Declaration`
"""
class IAdapterRegistry(Interface):
"""Provide an interface-based registry for adapters
This registry registers objects that are in some sense "from" a
sequence of specification to an interface and a name.
No specific semantics are assumed for the registered objects,
however, the most common application will be to register factories
that adapt objects providing required specifications to a provided
interface.
"""
def register(required, provided, name, value):
"""Register a value
A value is registered for a *sequence* of required specifications, a
provided interface, and a name, which must be text.
"""
def registered(required, provided, name=''):
"""Return the component registered for the given interfaces and name
name must be text.
Unlike the lookup method, this methods won't retrieve
components registered for more specific required interfaces or
less specific provided interfaces.
If no component was registered exactly for the given
interfaces and name, then None is returned.
"""
def lookup(required, provided, name='', default=None):
"""Lookup a value
A value is looked up based on a *sequence* of required
specifications, a provided interface, and a name, which must be
text.
"""
def queryMultiAdapter(objects, provided, name='', default=None):
"""Adapt a sequence of objects to a named, provided, interface
"""
def lookup1(required, provided, name='', default=None):
"""Lookup a value using a single required interface
A value is looked up based on a single required
specifications, a provided interface, and a name, which must be
text.
"""
def queryAdapter(object, provided, name='', default=None): # pylint:disable=redefined-builtin
"""Adapt an object using a registered adapter factory.
"""
def adapter_hook(provided, object, name='', default=None): # pylint:disable=redefined-builtin
"""Adapt an object using a registered adapter factory.
name must be text.
"""
def lookupAll(required, provided):
"""Find all adapters from the required to the provided interfaces
An iterable object is returned that provides name-value two-tuples.
"""
def names(required, provided): # pylint:disable=arguments-differ
"""Return the names for which there are registered objects
"""
def subscribe(required, provided, subscriber): # pylint:disable=arguments-differ
"""Register a subscriber
A subscriber is registered for a *sequence* of required
specifications, a provided interface, and a name.
Multiple subscribers may be registered for the same (or
equivalent) interfaces.
.. versionchanged:: 5.1.1
Correct the method signature to remove the ``name`` parameter.
Subscribers have no names.
"""
def subscribed(required, provided, subscriber):
"""
Check whether the object *subscriber* is registered directly
with this object via a previous call to
``subscribe(required, provided, subscriber)``.
If the *subscriber*, or one equal to it, has been subscribed,
for the given *required* sequence and *provided* interface,
return that object. (This does not guarantee whether the *subscriber*
itself is returned, or an object equal to it.)
If it has not, return ``None``.
Unlike :meth:`subscriptions`, this method won't retrieve
components registered for more specific required interfaces or
less specific provided interfaces.
.. versionadded:: 5.3.0
"""
def subscriptions(required, provided):
"""
Get a sequence of subscribers.
Subscribers for a sequence of *required* interfaces, and a *provided*
interface are returned. This takes into account subscribers
registered with this object, as well as those registered with
base adapter registries in the resolution order, and interfaces that
extend *provided*.
.. versionchanged:: 5.1.1
Correct the method signature to remove the ``name`` parameter.
Subscribers have no names.
"""
def subscribers(objects, provided):
"""
Get a sequence of subscription **adapters**.
This is like :meth:`subscriptions`, but calls the returned
subscribers with *objects* (and optionally returns the results
of those calls), instead of returning the subscribers directly.
:param objects: A sequence of objects; they will be used to
determine the *required* argument to :meth:`subscriptions`.
:param provided: A single interface, or ``None``, to pass
as the *provided* parameter to :meth:`subscriptions`.
If an interface is given, the results of calling each returned
subscriber with the the *objects* are collected and returned
from this method; each result should be an object implementing
the *provided* interface. If ``None``, the resulting subscribers
are still called, but the results are ignored.
:return: A sequence of the results of calling the subscribers
if *provided* is not ``None``. If there are no registered
subscribers, or *provided* is ``None``, this will be an empty
sequence.
.. versionchanged:: 5.1.1
Correct the method signature to remove the ``name`` parameter.
Subscribers have no names.
"""
# begin formerly in zope.component
class ComponentLookupError(LookupError):
"""A component could not be found."""
class Invalid(Exception):
"""A component doesn't satisfy a promise."""
class IObjectEvent(Interface):
"""An event related to an object.
The object that generated this event is not necessarily the object
referred to by location.
"""
object = Attribute("The subject of the event.")
@implementer(IObjectEvent)
class ObjectEvent:
def __init__(self, object): # pylint:disable=redefined-builtin
self.object = object
class IComponentLookup(Interface):
"""Component Manager for a Site
This object manages the components registered at a particular site. The
definition of a site is intentionally vague.
"""
adapters = Attribute(
"Adapter Registry to manage all registered adapters.")
utilities = Attribute(
"Adapter Registry to manage all registered utilities.")
def queryAdapter(object, interface, name='', default=None): # pylint:disable=redefined-builtin
"""Look for a named adapter to an interface for an object
If a matching adapter cannot be found, returns the default.
"""
def getAdapter(object, interface, name=''): # pylint:disable=redefined-builtin
"""Look for a named adapter to an interface for an object
If a matching adapter cannot be found, a `ComponentLookupError`
is raised.
"""
def queryMultiAdapter(objects, interface, name='', default=None):
"""Look for a multi-adapter to an interface for multiple objects
If a matching adapter cannot be found, returns the default.
"""
def getMultiAdapter(objects, interface, name=''):
"""Look for a multi-adapter to an interface for multiple objects
If a matching adapter cannot be found, a `ComponentLookupError`
is raised.
"""
def getAdapters(objects, provided):
"""Look for all matching adapters to a provided interface for objects
Return an iterable of name-adapter pairs for adapters that
provide the given interface.
"""
def subscribers(objects, provided):
"""Get subscribers
Subscribers are returned that provide the provided interface
and that depend on and are computed from the sequence of
required objects.
"""
def handle(*objects):
"""Call handlers for the given objects
Handlers registered for the given objects are called.
"""
def queryUtility(interface, name='', default=None):
"""Look up a utility that provides an interface.
If one is not found, returns default.
"""
def getUtilitiesFor(interface):
"""Look up the registered utilities that provide an interface.
Returns an iterable of name-utility pairs.
"""
def getAllUtilitiesRegisteredFor(interface):
"""Return all registered utilities for an interface
This includes overridden utilities.
An iterable of utility instances is returned. No names are
returned.
"""
class IRegistration(Interface):
"""A registration-information object
"""
registry = Attribute("The registry having the registration")
name = Attribute("The registration name")
info = Attribute("""Information about the registration
This is information deemed useful to people browsing the
configuration of a system. It could, for example, include
commentary or information about the source of the configuration.
""")
class IUtilityRegistration(IRegistration):
"""Information about the registration of a utility
"""
factory = Attribute("The factory used to create the utility. Optional.")
component = Attribute("The object registered")
provided = Attribute("The interface provided by the component")
class _IBaseAdapterRegistration(IRegistration):
"""Information about the registration of an adapter
"""
factory = Attribute("The factory used to create adapters")
required = Attribute("""The adapted interfaces
This is a sequence of interfaces adapters by the registered
factory. The factory will be caled with a sequence of objects, as
positional arguments, that provide these interfaces.
""")
provided = Attribute("""The interface provided by the adapters.
This interface is implemented by the factory
""")
class IAdapterRegistration(_IBaseAdapterRegistration):
"""Information about the registration of an adapter
"""
class ISubscriptionAdapterRegistration(_IBaseAdapterRegistration):
"""Information about the registration of a subscription adapter
"""
class IHandlerRegistration(IRegistration):
handler = Attribute("An object called used to handle an event")
required = Attribute("""The handled interfaces
This is a sequence of interfaces handled by the registered
handler. The handler will be caled with a sequence of objects, as
positional arguments, that provide these interfaces.
""")
class IRegistrationEvent(IObjectEvent):
"""An event that involves a registration"""
@implementer(IRegistrationEvent)
class RegistrationEvent(ObjectEvent):
"""There has been a change in a registration
"""
def __repr__(self):
return "{} event:\n{!r}".format(self.__class__.__name__, self.object)
class IRegistered(IRegistrationEvent):
"""A component or factory was registered
"""
@implementer(IRegistered)
class Registered(RegistrationEvent):
pass
class IUnregistered(IRegistrationEvent):
"""A component or factory was unregistered
"""
@implementer(IUnregistered)
class Unregistered(RegistrationEvent):
"""A component or factory was unregistered
"""
class IComponentRegistry(Interface):
"""Register components
"""
def registerUtility(component=None, provided=None, name='',
info='', factory=None):
"""Register a utility
:param factory:
Factory for the component to be registered.
:param component:
The registered component
:param provided:
This is the interface provided by the utility. If the
component provides a single interface, then this
argument is optional and the component-implemented
interface will be used.
:param name:
The utility name.
:param info:
An object that can be converted to a string to provide
information about the registration.
Only one of *component* and *factory* can be used.
A `IRegistered` event is generated with an `IUtilityRegistration`.
"""
def unregisterUtility(component=None, provided=None, name='',
factory=None):
"""Unregister a utility
:returns:
A boolean is returned indicating whether the registry was
changed. If the given *component* is None and there is no
component registered, or if the given *component* is not
None and is not registered, then the function returns
False, otherwise it returns True.
:param factory:
Factory for the component to be unregistered.
:param component:
The registered component The given component can be
None, in which case any component registered to provide
the given provided interface with the given name is
unregistered.
:param provided:
This is the interface provided by the utility. If the
component is not None and provides a single interface,
then this argument is optional and the
component-implemented interface will be used.
:param name:
The utility name.
Only one of *component* and *factory* can be used.
An `IUnregistered` event is generated with an `IUtilityRegistration`.
"""
def registeredUtilities():
"""Return an iterable of `IUtilityRegistration` instances.
These registrations describe the current utility registrations
in the object.
"""
def registerAdapter(factory, required=None, provided=None, name='',
info=''):
"""Register an adapter factory
:param factory:
The object used to compute the adapter
:param required:
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
``__component_adapts__`` attribute will be used. The
``__component_adapts__`` attribute is
normally set in class definitions using
the `.adapter`
decorator. If the factory doesn't have a
``__component_adapts__`` adapts attribute, then this
argument is required.
:param provided:
This is the interface provided by the adapter and
implemented by the factory. If the factory
implements a single interface, then this argument is
optional and the factory-implemented interface will be
used.
:param name:
The adapter name.
:param info:
An object that can be converted to a string to provide
information about the registration.
A `IRegistered` event is generated with an `IAdapterRegistration`.
"""
def unregisterAdapter(factory=None, required=None,
provided=None, name=''):
"""Unregister an adapter factory
:returns:
A boolean is returned indicating whether the registry was
changed. If the given component is None and there is no
component registered, or if the given component is not
None and is not registered, then the function returns
False, otherwise it returns True.
:param factory:
This is the object used to compute the adapter. The
factory can be None, in which case any factory
registered to implement the given provided interface
for the given required specifications with the given
name is unregistered.
:param required:
This is a sequence of specifications for objects to be
adapted. If the factory is not None and the required
arguments is omitted, then the value of the factory's
__component_adapts__ attribute will be used. The
__component_adapts__ attribute attribute is normally
set in class definitions using adapts function, or for
callables using the adapter decorator. If the factory
is None or doesn't have a __component_adapts__ adapts
attribute, then this argument is required.
:param provided:
This is the interface provided by the adapter and
implemented by the factory. If the factory is not
None and implements a single interface, then this
argument is optional and the factory-implemented
interface will be used.
:param name:
The adapter name.
An `IUnregistered` event is generated with an `IAdapterRegistration`.
"""
def registeredAdapters():
"""Return an iterable of `IAdapterRegistration` instances.
These registrations describe the current adapter registrations
in the object.
"""
def registerSubscriptionAdapter(factory, required=None, provides=None,
name='', info=''):
"""Register a subscriber factory
:param factory:
The object used to compute the adapter
:param required:
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
``__component_adapts__`` attribute will be used. The
``__component_adapts__`` attribute is
normally set using the adapter
decorator. If the factory doesn't have a
``__component_adapts__`` adapts attribute, then this
argument is required.
:param provided:
This is the interface provided by the adapter and
implemented by the factory. If the factory implements
a single interface, then this argument is optional and
the factory-implemented interface will be used.
:param name:
The adapter name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named subscribers is added.
:param info:
An object that can be converted to a string to provide
information about the registration.
A `IRegistered` event is generated with an
`ISubscriptionAdapterRegistration`.
"""
def unregisterSubscriptionAdapter(factory=None, required=None,
provides=None, name=''):
"""Unregister a subscriber factory.
:returns:
A boolean is returned indicating whether the registry was
changed. If the given component is None and there is no
component registered, or if the given component is not
None and is not registered, then the function returns
False, otherwise it returns True.
:param factory:
This is the object used to compute the adapter. The
factory can be None, in which case any factories
registered to implement the given provided interface
for the given required specifications with the given
name are unregistered.
:param required:
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
``__component_adapts__`` attribute will be used. The
``__component_adapts__`` attribute is
normally set using the adapter
decorator. If the factory doesn't have a
``__component_adapts__`` adapts attribute, then this
argument is required.
:param provided:
This is the interface provided by the adapter and
implemented by the factory. If the factory is not
None implements a single interface, then this argument
is optional and the factory-implemented interface will
be used.
:param name:
The adapter name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named subscribers is added.
An `IUnregistered` event is generated with an
`ISubscriptionAdapterRegistration`.
"""
def registeredSubscriptionAdapters():
"""Return an iterable of `ISubscriptionAdapterRegistration` instances.
These registrations describe the current subscription adapter
registrations in the object.
"""
def registerHandler(handler, required=None, name='', info=''):
"""Register a handler.
A handler is a subscriber that doesn't compute an adapter
but performs some function when called.
:param handler:
The object used to handle some event represented by
the objects passed to it.
:param required:
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
``__component_adapts__`` attribute will be used. The
``__component_adapts__`` attribute is
normally set using the adapter
decorator. If the factory doesn't have a
``__component_adapts__`` adapts attribute, then this
argument is required.
:param name:
The handler name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named handlers is added.
:param info:
An object that can be converted to a string to provide
information about the registration.
A `IRegistered` event is generated with an `IHandlerRegistration`.
"""
def unregisterHandler(handler=None, required=None, name=''):
"""Unregister a handler.
A handler is a subscriber that doesn't compute an adapter
but performs some function when called.
:returns: A boolean is returned indicating whether the registry was
changed.
:param handler:
This is the object used to handle some event
represented by the objects passed to it. The handler
can be None, in which case any handlers registered for
the given required specifications with the given are
unregistered.
:param required:
This is a sequence of specifications for objects to be
adapted. If omitted, then the value of the factory's
``__component_adapts__`` attribute will be used. The
``__component_adapts__`` attribute is
normally set using the adapter
decorator. If the factory doesn't have a
``__component_adapts__`` adapts attribute, then this
argument is required.
:param name:
The handler name.
Currently, only the empty string is accepted. Other
strings will be accepted in the future when support for
named handlers is added.
An `IUnregistered` event is generated with an `IHandlerRegistration`.
"""
def registeredHandlers():
"""Return an iterable of `IHandlerRegistration` instances.
These registrations describe the current handler registrations
in the object.
"""
class IComponents(IComponentLookup, IComponentRegistry):
"""Component registration and access
"""
# end formerly in zope.component | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/interfaces.py | interfaces.py |
"""Basic components support
"""
from collections import defaultdict
try:
from zope.event import notify
except ImportError: # pragma: no cover
def notify(*arg, **kw): pass
from zope.interface.interfaces import ISpecification
from zope.interface.interfaces import ComponentLookupError
from zope.interface.interfaces import IAdapterRegistration
from zope.interface.interfaces import IComponents
from zope.interface.interfaces import IHandlerRegistration
from zope.interface.interfaces import ISubscriptionAdapterRegistration
from zope.interface.interfaces import IUtilityRegistration
from zope.interface.interfaces import Registered
from zope.interface.interfaces import Unregistered
from zope.interface.interface import Interface
from zope.interface.declarations import implementedBy
from zope.interface.declarations import implementer
from zope.interface.declarations import implementer_only
from zope.interface.declarations import providedBy
from zope.interface.adapter import AdapterRegistry
__all__ = [
# Components is public API, but
# the *Registration classes are just implementations
# of public interfaces.
'Components',
]
class _UnhashableComponentCounter:
# defaultdict(int)-like object for unhashable components
def __init__(self, otherdict):
# [(component, count)]
self._data = [item for item in otherdict.items()]
def __getitem__(self, key):
for component, count in self._data:
if component == key:
return count
return 0
def __setitem__(self, component, count):
for i, data in enumerate(self._data):
if data[0] == component:
self._data[i] = component, count
return
self._data.append((component, count))
def __delitem__(self, component):
for i, data in enumerate(self._data):
if data[0] == component:
del self._data[i]
return
raise KeyError(component) # pragma: no cover
def _defaultdict_int():
return defaultdict(int)
class _UtilityRegistrations:
def __init__(self, utilities, utility_registrations):
# {provided -> {component: count}}
self._cache = defaultdict(_defaultdict_int)
self._utilities = utilities
self._utility_registrations = utility_registrations
self.__populate_cache()
def __populate_cache(self):
for ((p, _), data) in iter(self._utility_registrations.items()):
component = data[0]
self.__cache_utility(p, component)
def __cache_utility(self, provided, component):
try:
self._cache[provided][component] += 1
except TypeError:
# The component is not hashable, and we have a dict. Switch to a strategy
# that doesn't use hashing.
prov = self._cache[provided] = _UnhashableComponentCounter(self._cache[provided])
prov[component] += 1
def __uncache_utility(self, provided, component):
provided = self._cache[provided]
# It seems like this line could raise a TypeError if component isn't
# hashable and we haven't yet switched to _UnhashableComponentCounter. However,
# we can't actually get in that situation. In order to get here, we would
# have had to cache the utility already which would have switched
# the datastructure if needed.
count = provided[component]
count -= 1
if count == 0:
del provided[component]
else:
provided[component] = count
return count > 0
def _is_utility_subscribed(self, provided, component):
try:
return self._cache[provided][component] > 0
except TypeError:
# Not hashable and we're still using a dict
return False
def registerUtility(self, provided, name, component, info, factory):
subscribed = self._is_utility_subscribed(provided, component)
self._utility_registrations[(provided, name)] = component, info, factory
self._utilities.register((), provided, name, component)
if not subscribed:
self._utilities.subscribe((), provided, component)
self.__cache_utility(provided, component)
def unregisterUtility(self, provided, name, component):
del self._utility_registrations[(provided, name)]
self._utilities.unregister((), provided, name)
subscribed = self.__uncache_utility(provided, component)
if not subscribed:
self._utilities.unsubscribe((), provided, component)
@implementer(IComponents)
class Components:
_v_utility_registrations_cache = None
def __init__(self, name='', bases=()):
# __init__ is used for test cleanup as well as initialization.
# XXX add a separate API for test cleanup.
assert isinstance(name, str)
self.__name__ = name
self._init_registries()
self._init_registrations()
self.__bases__ = tuple(bases)
self._v_utility_registrations_cache = None
def __repr__(self):
return "<{} {}>".format(self.__class__.__name__, self.__name__)
def __reduce__(self):
# Mimic what a persistent.Persistent object does and elide
# _v_ attributes so that they don't get saved in ZODB.
# This allows us to store things that cannot be pickled in such
# attributes.
reduction = super().__reduce__()
# (callable, args, state, listiter, dictiter)
# We assume the state is always a dict; the last three items
# are technically optional and can be missing or None.
filtered_state = {k: v for k, v in reduction[2].items()
if not k.startswith('_v_')}
reduction = list(reduction)
reduction[2] = filtered_state
return tuple(reduction)
def _init_registries(self):
# Subclasses have never been required to call this method
# if they override it, merely to fill in these two attributes.
self.adapters = AdapterRegistry()
self.utilities = AdapterRegistry()
def _init_registrations(self):
self._utility_registrations = {}
self._adapter_registrations = {}
self._subscription_registrations = []
self._handler_registrations = []
@property
def _utility_registrations_cache(self):
# We use a _v_ attribute internally so that data aren't saved in ZODB,
# because this object cannot be pickled.
cache = self._v_utility_registrations_cache
if (cache is None
or cache._utilities is not self.utilities
or cache._utility_registrations is not self._utility_registrations):
cache = self._v_utility_registrations_cache = _UtilityRegistrations(
self.utilities,
self._utility_registrations)
return cache
def _getBases(self):
# Subclasses might override
return self.__dict__.get('__bases__', ())
def _setBases(self, bases):
# Subclasses might override
self.adapters.__bases__ = tuple([
base.adapters for base in bases])
self.utilities.__bases__ = tuple([
base.utilities for base in bases])
self.__dict__['__bases__'] = tuple(bases)
__bases__ = property(
lambda self: self._getBases(),
lambda self, bases: self._setBases(bases),
)
def registerUtility(self, component=None, provided=None, name='',
info='', event=True, factory=None):
if factory:
if component:
raise TypeError("Can't specify factory and component.")
component = factory()
if provided is None:
provided = _getUtilityProvided(component)
if name == '':
name = _getName(component)
reg = self._utility_registrations.get((provided, name))
if reg is not None:
if reg[:2] == (component, info):
# already registered
return
self.unregisterUtility(reg[0], provided, name)
self._utility_registrations_cache.registerUtility(
provided, name, component, info, factory)
if event:
notify(Registered(
UtilityRegistration(self, provided, name, component, info,
factory)
))
def unregisterUtility(self, component=None, provided=None, name='',
factory=None):
if factory:
if component:
raise TypeError("Can't specify factory and component.")
component = factory()
if provided is None:
if component is None:
raise TypeError("Must specify one of component, factory and "
"provided")
provided = _getUtilityProvided(component)
old = self._utility_registrations.get((provided, name))
if (old is None) or ((component is not None) and
(component != old[0])):
return False
if component is None:
component = old[0]
# Note that component is now the old thing registered
self._utility_registrations_cache.unregisterUtility(
provided, name, component)
notify(Unregistered(
UtilityRegistration(self, provided, name, component, *old[1:])
))
return True
def registeredUtilities(self):
for ((provided, name), data
) in iter(self._utility_registrations.items()):
yield UtilityRegistration(self, provided, name, *data)
def queryUtility(self, provided, name='', default=None):
return self.utilities.lookup((), provided, name, default)
def getUtility(self, provided, name=''):
utility = self.utilities.lookup((), provided, name)
if utility is None:
raise ComponentLookupError(provided, name)
return utility
def getUtilitiesFor(self, interface):
yield from self.utilities.lookupAll((), interface)
def getAllUtilitiesRegisteredFor(self, interface):
return self.utilities.subscriptions((), interface)
def registerAdapter(self, factory, required=None, provided=None,
name='', info='', event=True):
if provided is None:
provided = _getAdapterProvided(factory)
required = _getAdapterRequired(factory, required)
if name == '':
name = _getName(factory)
self._adapter_registrations[(required, provided, name)
] = factory, info
self.adapters.register(required, provided, name, factory)
if event:
notify(Registered(
AdapterRegistration(self, required, provided, name,
factory, info)
))
def unregisterAdapter(self, factory=None,
required=None, provided=None, name='',
):
if provided is None:
if factory is None:
raise TypeError("Must specify one of factory and provided")
provided = _getAdapterProvided(factory)
if (required is None) and (factory is None):
raise TypeError("Must specify one of factory and required")
required = _getAdapterRequired(factory, required)
old = self._adapter_registrations.get((required, provided, name))
if (old is None) or ((factory is not None) and
(factory != old[0])):
return False
del self._adapter_registrations[(required, provided, name)]
self.adapters.unregister(required, provided, name)
notify(Unregistered(
AdapterRegistration(self, required, provided, name,
*old)
))
return True
def registeredAdapters(self):
for ((required, provided, name), (component, info)
) in iter(self._adapter_registrations.items()):
yield AdapterRegistration(self, required, provided, name,
component, info)
def queryAdapter(self, object, interface, name='', default=None):
return self.adapters.queryAdapter(object, interface, name, default)
def getAdapter(self, object, interface, name=''):
adapter = self.adapters.queryAdapter(object, interface, name)
if adapter is None:
raise ComponentLookupError(object, interface, name)
return adapter
def queryMultiAdapter(self, objects, interface, name='',
default=None):
return self.adapters.queryMultiAdapter(
objects, interface, name, default)
def getMultiAdapter(self, objects, interface, name=''):
adapter = self.adapters.queryMultiAdapter(objects, interface, name)
if adapter is None:
raise ComponentLookupError(objects, interface, name)
return adapter
def getAdapters(self, objects, provided):
for name, factory in self.adapters.lookupAll(
list(map(providedBy, objects)),
provided):
adapter = factory(*objects)
if adapter is not None:
yield name, adapter
def registerSubscriptionAdapter(self,
factory, required=None, provided=None,
name='', info='',
event=True):
if name:
raise TypeError("Named subscribers are not yet supported")
if provided is None:
provided = _getAdapterProvided(factory)
required = _getAdapterRequired(factory, required)
self._subscription_registrations.append(
(required, provided, name, factory, info)
)
self.adapters.subscribe(required, provided, factory)
if event:
notify(Registered(
SubscriptionRegistration(self, required, provided, name,
factory, info)
))
def registeredSubscriptionAdapters(self):
for data in self._subscription_registrations:
yield SubscriptionRegistration(self, *data)
def unregisterSubscriptionAdapter(self, factory=None,
required=None, provided=None, name='',
):
if name:
raise TypeError("Named subscribers are not yet supported")
if provided is None:
if factory is None:
raise TypeError("Must specify one of factory and provided")
provided = _getAdapterProvided(factory)
if (required is None) and (factory is None):
raise TypeError("Must specify one of factory and required")
required = _getAdapterRequired(factory, required)
if factory is None:
new = [(r, p, n, f, i)
for (r, p, n, f, i)
in self._subscription_registrations
if not (r == required and p == provided)
]
else:
new = [(r, p, n, f, i)
for (r, p, n, f, i)
in self._subscription_registrations
if not (r == required and p == provided and f == factory)
]
if len(new) == len(self._subscription_registrations):
return False
self._subscription_registrations[:] = new
self.adapters.unsubscribe(required, provided, factory)
notify(Unregistered(
SubscriptionRegistration(self, required, provided, name,
factory, '')
))
return True
def subscribers(self, objects, provided):
return self.adapters.subscribers(objects, provided)
def registerHandler(self,
factory, required=None,
name='', info='',
event=True):
if name:
raise TypeError("Named handlers are not yet supported")
required = _getAdapterRequired(factory, required)
self._handler_registrations.append(
(required, name, factory, info)
)
self.adapters.subscribe(required, None, factory)
if event:
notify(Registered(
HandlerRegistration(self, required, name, factory, info)
))
def registeredHandlers(self):
for data in self._handler_registrations:
yield HandlerRegistration(self, *data)
def unregisterHandler(self, factory=None, required=None, name=''):
if name:
raise TypeError("Named subscribers are not yet supported")
if (required is None) and (factory is None):
raise TypeError("Must specify one of factory and required")
required = _getAdapterRequired(factory, required)
if factory is None:
new = [(r, n, f, i)
for (r, n, f, i)
in self._handler_registrations
if r != required
]
else:
new = [(r, n, f, i)
for (r, n, f, i)
in self._handler_registrations
if not (r == required and f == factory)
]
if len(new) == len(self._handler_registrations):
return False
self._handler_registrations[:] = new
self.adapters.unsubscribe(required, None, factory)
notify(Unregistered(
HandlerRegistration(self, required, name, factory, '')
))
return True
def handle(self, *objects):
self.adapters.subscribers(objects, None)
def rebuildUtilityRegistryFromLocalCache(self, rebuild=False):
"""
Emergency maintenance method to rebuild the ``.utilities``
registry from the local copy maintained in this object, or
detect the need to do so.
Most users will never need to call this, but it can be helpful
in the event of suspected corruption.
By default, this method only checks for corruption. To make it
actually rebuild the registry, pass `True` for *rebuild*.
:param bool rebuild: If set to `True` (not the default),
this method will actually register and subscribe utilities
in the registry as needed to synchronize with the local cache.
:return: A dictionary that's meant as diagnostic data. The keys
and values may change over time. When called with a false *rebuild*,
the keys ``"needed_registered"`` and ``"needed_subscribed"`` will be
non-zero if any corruption was detected, but that will not be corrected.
.. versionadded:: 5.3.0
"""
regs = dict(self._utility_registrations)
utils = self.utilities
needed_registered = 0
did_not_register = 0
needed_subscribed = 0
did_not_subscribe = 0
# Avoid the expensive change process during this; we'll call
# it once at the end if needed.
assert 'changed' not in utils.__dict__
utils.changed = lambda _: None
if rebuild:
register = utils.register
subscribe = utils.subscribe
else:
register = subscribe = lambda *args: None
try:
for (provided, name), (value, _info, _factory) in regs.items():
if utils.registered((), provided, name) != value:
register((), provided, name, value)
needed_registered += 1
else:
did_not_register += 1
if utils.subscribed((), provided, value) is None:
needed_subscribed += 1
subscribe((), provided, value)
else:
did_not_subscribe += 1
finally:
del utils.changed
if rebuild and (needed_subscribed or needed_registered):
utils.changed(utils)
return {
'needed_registered': needed_registered,
'did_not_register': did_not_register,
'needed_subscribed': needed_subscribed,
'did_not_subscribe': did_not_subscribe
}
def _getName(component):
try:
return component.__component_name__
except AttributeError:
return ''
def _getUtilityProvided(component):
provided = list(providedBy(component))
if len(provided) == 1:
return provided[0]
raise TypeError(
"The utility doesn't provide a single interface "
"and no provided interface was specified.")
def _getAdapterProvided(factory):
provided = list(implementedBy(factory))
if len(provided) == 1:
return provided[0]
raise TypeError(
"The adapter factory doesn't implement a single interface "
"and no provided interface was specified.")
def _getAdapterRequired(factory, required):
if required is None:
try:
required = factory.__component_adapts__
except AttributeError:
raise TypeError(
"The adapter factory doesn't have a __component_adapts__ "
"attribute and no required specifications were specified"
)
elif ISpecification.providedBy(required):
raise TypeError("the required argument should be a list of "
"interfaces, not a single interface")
result = []
for r in required:
if r is None:
r = Interface
elif not ISpecification.providedBy(r):
if isinstance(r, type):
r = implementedBy(r)
else:
raise TypeError("Required specification must be a "
"specification or class, not %r" % type(r)
)
result.append(r)
return tuple(result)
@implementer(IUtilityRegistration)
class UtilityRegistration:
def __init__(self, registry, provided, name, component, doc, factory=None):
(self.registry, self.provided, self.name, self.component, self.info,
self.factory
) = registry, provided, name, component, doc, factory
def __repr__(self):
return '{}({!r}, {}, {!r}, {}, {!r}, {!r})'.format(
self.__class__.__name__,
self.registry,
getattr(self.provided, '__name__', None), self.name,
getattr(self.component, '__name__', repr(self.component)),
self.factory, self.info,
)
def __hash__(self):
return id(self)
def __eq__(self, other):
return repr(self) == repr(other)
def __ne__(self, other):
return repr(self) != repr(other)
def __lt__(self, other):
return repr(self) < repr(other)
def __le__(self, other):
return repr(self) <= repr(other)
def __gt__(self, other):
return repr(self) > repr(other)
def __ge__(self, other):
return repr(self) >= repr(other)
@implementer(IAdapterRegistration)
class AdapterRegistration:
def __init__(self, registry, required, provided, name, component, doc):
(self.registry, self.required, self.provided, self.name,
self.factory, self.info
) = registry, required, provided, name, component, doc
def __repr__(self):
return '{}({!r}, {}, {}, {!r}, {}, {!r})'.format(
self.__class__.__name__,
self.registry,
'[' + ", ".join([r.__name__ for r in self.required]) + ']',
getattr(self.provided, '__name__', None), self.name,
getattr(self.factory, '__name__', repr(self.factory)), self.info,
)
def __hash__(self):
return id(self)
def __eq__(self, other):
return repr(self) == repr(other)
def __ne__(self, other):
return repr(self) != repr(other)
def __lt__(self, other):
return repr(self) < repr(other)
def __le__(self, other):
return repr(self) <= repr(other)
def __gt__(self, other):
return repr(self) > repr(other)
def __ge__(self, other):
return repr(self) >= repr(other)
@implementer_only(ISubscriptionAdapterRegistration)
class SubscriptionRegistration(AdapterRegistration):
pass
@implementer_only(IHandlerRegistration)
class HandlerRegistration(AdapterRegistration):
def __init__(self, registry, required, name, handler, doc):
(self.registry, self.required, self.name, self.handler, self.info
) = registry, required, name, handler, doc
@property
def factory(self):
return self.handler
provided = None
def __repr__(self):
return '{}({!r}, {}, {!r}, {}, {!r})'.format(
self.__class__.__name__,
self.registry,
'[' + ", ".join([r.__name__ for r in self.required]) + ']',
self.name,
getattr(self.factory, '__name__', repr(self.factory)), self.info,
) | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/registry.py | registry.py |
__docformat__ = 'restructuredtext'
__all__ = [
'ro',
'InconsistentResolutionOrderError',
'InconsistentResolutionOrderWarning',
]
__logger = None
def _logger():
global __logger # pylint:disable=global-statement
if __logger is None:
import logging
__logger = logging.getLogger(__name__)
return __logger
def _legacy_mergeOrderings(orderings):
"""Merge multiple orderings so that within-ordering order is preserved
Orderings are constrained in such a way that if an object appears
in two or more orderings, then the suffix that begins with the
object must be in both orderings.
For example:
>>> _mergeOrderings([
... ['x', 'y', 'z'],
... ['q', 'z'],
... [1, 3, 5],
... ['z']
... ])
['x', 'y', 'q', 1, 3, 5, 'z']
"""
seen = set()
result = []
for ordering in reversed(orderings):
for o in reversed(ordering):
if o not in seen:
seen.add(o)
result.insert(0, o)
return result
def _legacy_flatten(begin):
result = [begin]
i = 0
for ob in iter(result):
i += 1
# The recursive calls can be avoided by inserting the base classes
# into the dynamically growing list directly after the currently
# considered object; the iterator makes sure this will keep working
# in the future, since it cannot rely on the length of the list
# by definition.
result[i:i] = ob.__bases__
return result
def _legacy_ro(ob):
return _legacy_mergeOrderings([_legacy_flatten(ob)])
###
# Compare base objects using identity, not equality. This matches what
# the CPython MRO algorithm does, and is *much* faster to boot: that,
# plus some other small tweaks makes the difference between 25s and 6s
# in loading 446 plone/zope interface.py modules (1925 InterfaceClass,
# 1200 Implements, 1100 ClassProvides objects)
###
class InconsistentResolutionOrderWarning(PendingDeprecationWarning):
"""
The warning issued when an invalid IRO is requested.
"""
class InconsistentResolutionOrderError(TypeError):
"""
The error raised when an invalid IRO is requested in strict mode.
"""
def __init__(self, c3, base_tree_remaining):
self.C = c3.leaf
base_tree = c3.base_tree
self.base_ros = {
base: base_tree[i + 1]
for i, base in enumerate(self.C.__bases__)
}
# Unfortunately, this doesn't necessarily directly match
# up to any transformation on C.__bases__, because
# if any were fully used up, they were removed already.
self.base_tree_remaining = base_tree_remaining
TypeError.__init__(self)
def __str__(self):
import pprint
return "{}: For object {!r}.\nBase ROs:\n{}\nConflict Location:\n{}".format(
self.__class__.__name__,
self.C,
pprint.pformat(self.base_ros),
pprint.pformat(self.base_tree_remaining),
)
class _NamedBool(int): # cannot actually inherit bool
def __new__(cls, val, name):
inst = super(cls, _NamedBool).__new__(cls, val)
inst.__name__ = name
return inst
class _ClassBoolFromEnv:
"""
Non-data descriptor that reads a transformed environment variable
as a boolean, and caches the result in the class.
"""
def __get__(self, inst, klass):
import os
for cls in klass.__mro__:
my_name = None
for k in dir(klass):
if k in cls.__dict__ and cls.__dict__[k] is self:
my_name = k
break
if my_name is not None:
break
else: # pragma: no cover
raise RuntimeError("Unable to find self")
env_name = 'ZOPE_INTERFACE_' + my_name
val = os.environ.get(env_name, '') == '1'
val = _NamedBool(val, my_name)
setattr(klass, my_name, val)
setattr(klass, 'ORIG_' + my_name, self)
return val
class _StaticMRO:
# A previously resolved MRO, supplied by the caller.
# Used in place of calculating it.
had_inconsistency = None # We don't know...
def __init__(self, C, mro):
self.leaf = C
self.__mro = tuple(mro)
def mro(self):
return list(self.__mro)
class C3:
# Holds the shared state during computation of an MRO.
@staticmethod
def resolver(C, strict, base_mros):
strict = strict if strict is not None else C3.STRICT_IRO
factory = C3
if strict:
factory = _StrictC3
elif C3.TRACK_BAD_IRO:
factory = _TrackingC3
memo = {}
base_mros = base_mros or {}
for base, mro in base_mros.items():
assert base in C.__bases__
memo[base] = _StaticMRO(base, mro)
return factory(C, memo)
__mro = None
__legacy_ro = None
direct_inconsistency = False
def __init__(self, C, memo):
self.leaf = C
self.memo = memo
kind = self.__class__
base_resolvers = []
for base in C.__bases__:
if base not in memo:
resolver = kind(base, memo)
memo[base] = resolver
base_resolvers.append(memo[base])
self.base_tree = [
[C]
] + [
memo[base].mro() for base in C.__bases__
] + [
list(C.__bases__)
]
self.bases_had_inconsistency = any(base.had_inconsistency for base in base_resolvers)
if len(C.__bases__) == 1:
self.__mro = [C] + memo[C.__bases__[0]].mro()
@property
def had_inconsistency(self):
return self.direct_inconsistency or self.bases_had_inconsistency
@property
def legacy_ro(self):
if self.__legacy_ro is None:
self.__legacy_ro = tuple(_legacy_ro(self.leaf))
return list(self.__legacy_ro)
TRACK_BAD_IRO = _ClassBoolFromEnv()
STRICT_IRO = _ClassBoolFromEnv()
WARN_BAD_IRO = _ClassBoolFromEnv()
LOG_CHANGED_IRO = _ClassBoolFromEnv()
USE_LEGACY_IRO = _ClassBoolFromEnv()
BAD_IROS = ()
def _warn_iro(self):
if not self.WARN_BAD_IRO:
# For the initial release, one must opt-in to see the warning.
# In the future (2021?) seeing at least the first warning will
# be the default
return
import warnings
warnings.warn(
"An inconsistent resolution order is being requested. "
"(Interfaces should follow the Python class rules known as C3.) "
"For backwards compatibility, zope.interface will allow this, "
"making the best guess it can to produce as meaningful an order as possible. "
"In the future this might be an error. Set the warning filter to error, or set "
"the environment variable 'ZOPE_INTERFACE_TRACK_BAD_IRO' to '1' and examine "
"ro.C3.BAD_IROS to debug, or set 'ZOPE_INTERFACE_STRICT_IRO' to raise exceptions.",
InconsistentResolutionOrderWarning,
)
@staticmethod
def _can_choose_base(base, base_tree_remaining):
# From C3:
# nothead = [s for s in nonemptyseqs if cand in s[1:]]
for bases in base_tree_remaining:
if not bases or bases[0] is base:
continue
for b in bases:
if b is base:
return False
return True
@staticmethod
def _nonempty_bases_ignoring(base_tree, ignoring):
return list(filter(None, [
[b for b in bases if b is not ignoring]
for bases
in base_tree
]))
def _choose_next_base(self, base_tree_remaining):
"""
Return the next base.
The return value will either fit the C3 constraints or be our best
guess about what to do. If we cannot guess, this may raise an exception.
"""
base = self._find_next_C3_base(base_tree_remaining)
if base is not None:
return base
return self._guess_next_base(base_tree_remaining)
def _find_next_C3_base(self, base_tree_remaining):
"""
Return the next base that fits the constraints, or ``None`` if there isn't one.
"""
for bases in base_tree_remaining:
base = bases[0]
if self._can_choose_base(base, base_tree_remaining):
return base
return None
class _UseLegacyRO(Exception):
pass
def _guess_next_base(self, base_tree_remaining):
# Narf. We may have an inconsistent order (we won't know for
# sure until we check all the bases). Python cannot create
# classes like this:
#
# class B1:
# pass
# class B2(B1):
# pass
# class C(B1, B2): # -> TypeError; this is like saying C(B1, B2, B1).
# pass
#
# However, older versions of zope.interface were fine with this order.
# A good example is ``providedBy(IOError())``. Because of the way
# ``classImplements`` works, it winds up with ``__bases__`` ==
# ``[IEnvironmentError, IIOError, IOSError, <implementedBy Exception>]``
# (on Python 3). But ``IEnvironmentError`` is a base of both ``IIOError``
# and ``IOSError``. Previously, we would get a resolution order of
# ``[IIOError, IOSError, IEnvironmentError, IStandardError, IException, Interface]``
# but the standard Python algorithm would forbid creating that order entirely.
# Unlike Python's MRO, we attempt to resolve the issue. A few
# heuristics have been tried. One was:
#
# Strip off the first (highest priority) base of each direct
# base one at a time and seeing if we can come to an agreement
# with the other bases. (We're trying for a partial ordering
# here.) This often resolves cases (such as the IOSError case
# above), and frequently produces the same ordering as the
# legacy MRO did. If we looked at all the highest priority
# bases and couldn't find any partial ordering, then we strip
# them *all* out and begin the C3 step again. We take care not
# to promote a common root over all others.
#
# If we only did the first part, stripped off the first
# element of the first item, we could resolve simple cases.
# But it tended to fail badly. If we did the whole thing, it
# could be extremely painful from a performance perspective
# for deep/wide things like Zope's OFS.SimpleItem.Item. Plus,
# anytime you get ExtensionClass.Base into the mix, you're
# likely to wind up in trouble, because it messes with the MRO
# of classes. Sigh.
#
# So now, we fall back to the old linearization (fast to compute).
self._warn_iro()
self.direct_inconsistency = InconsistentResolutionOrderError(self, base_tree_remaining)
raise self._UseLegacyRO
def _merge(self):
# Returns a merged *list*.
result = self.__mro = []
base_tree_remaining = self.base_tree
base = None
while 1:
# Take last picked base out of the base tree wherever it is.
# This differs slightly from the standard Python MRO and is needed
# because we have no other step that prevents duplicates
# from coming in (e.g., in the inconsistent fallback path)
base_tree_remaining = self._nonempty_bases_ignoring(base_tree_remaining, base)
if not base_tree_remaining:
return result
try:
base = self._choose_next_base(base_tree_remaining)
except self._UseLegacyRO:
self.__mro = self.legacy_ro
return self.legacy_ro
result.append(base)
def mro(self):
if self.__mro is None:
self.__mro = tuple(self._merge())
return list(self.__mro)
class _StrictC3(C3):
__slots__ = ()
def _guess_next_base(self, base_tree_remaining):
raise InconsistentResolutionOrderError(self, base_tree_remaining)
class _TrackingC3(C3):
__slots__ = ()
def _guess_next_base(self, base_tree_remaining):
import traceback
bad_iros = C3.BAD_IROS
if self.leaf not in bad_iros:
if bad_iros == ():
import weakref
# This is a race condition, but it doesn't matter much.
bad_iros = C3.BAD_IROS = weakref.WeakKeyDictionary()
bad_iros[self.leaf] = t = (
InconsistentResolutionOrderError(self, base_tree_remaining),
traceback.format_stack()
)
_logger().warning("Tracking inconsistent IRO: %s", t[0])
return C3._guess_next_base(self, base_tree_remaining)
class _ROComparison:
# Exists to compute and print a pretty string comparison
# for differing ROs.
# Since we're used in a logging context, and may actually never be printed,
# this is a class so we can defer computing the diff until asked.
# Components we use to build up the comparison report
class Item:
prefix = ' '
def __init__(self, item):
self.item = item
def __str__(self):
return "{}{}".format(
self.prefix,
self.item,
)
class Deleted(Item):
prefix = '- '
class Inserted(Item):
prefix = '+ '
Empty = str
class ReplacedBy: # pragma: no cover
prefix = '- '
suffix = ''
def __init__(self, chunk, total_count):
self.chunk = chunk
self.total_count = total_count
def __iter__(self):
lines = [
self.prefix + str(item) + self.suffix
for item in self.chunk
]
while len(lines) < self.total_count:
lines.append('')
return iter(lines)
class Replacing(ReplacedBy):
prefix = "+ "
suffix = ''
_c3_report = None
_legacy_report = None
def __init__(self, c3, c3_ro, legacy_ro):
self.c3 = c3
self.c3_ro = c3_ro
self.legacy_ro = legacy_ro
def __move(self, from_, to_, chunk, operation):
for x in chunk:
to_.append(operation(x))
from_.append(self.Empty())
def _generate_report(self):
if self._c3_report is None:
import difflib
# The opcodes we get describe how to turn 'a' into 'b'. So
# the old one (legacy) needs to be first ('a')
matcher = difflib.SequenceMatcher(None, self.legacy_ro, self.c3_ro)
# The reports are equal length sequences. We're going for a
# side-by-side diff.
self._c3_report = c3_report = []
self._legacy_report = legacy_report = []
for opcode, leg1, leg2, c31, c32 in matcher.get_opcodes():
c3_chunk = self.c3_ro[c31:c32]
legacy_chunk = self.legacy_ro[leg1:leg2]
if opcode == 'equal':
# Guaranteed same length
c3_report.extend(self.Item(x) for x in c3_chunk)
legacy_report.extend(self.Item(x) for x in legacy_chunk)
if opcode == 'delete':
# Guaranteed same length
assert not c3_chunk
self.__move(c3_report, legacy_report, legacy_chunk, self.Deleted)
if opcode == 'insert':
# Guaranteed same length
assert not legacy_chunk
self.__move(legacy_report, c3_report, c3_chunk, self.Inserted)
if opcode == 'replace': # pragma: no cover (How do you make it output this?)
# Either side could be longer.
chunk_size = max(len(c3_chunk), len(legacy_chunk))
c3_report.extend(self.Replacing(c3_chunk, chunk_size))
legacy_report.extend(self.ReplacedBy(legacy_chunk, chunk_size))
return self._c3_report, self._legacy_report
@property
def _inconsistent_label(self):
inconsistent = []
if self.c3.direct_inconsistency:
inconsistent.append('direct')
if self.c3.bases_had_inconsistency:
inconsistent.append('bases')
return '+'.join(inconsistent) if inconsistent else 'no'
def __str__(self):
c3_report, legacy_report = self._generate_report()
assert len(c3_report) == len(legacy_report)
left_lines = [str(x) for x in legacy_report]
right_lines = [str(x) for x in c3_report]
# We have the same number of lines in the report; this is not
# necessarily the same as the number of items in either RO.
assert len(left_lines) == len(right_lines)
padding = ' ' * 2
max_left = max(len(x) for x in left_lines)
max_right = max(len(x) for x in right_lines)
left_title = 'Legacy RO (len={})'.format(len(self.legacy_ro))
right_title = 'C3 RO (len={}; inconsistent={})'.format(
len(self.c3_ro),
self._inconsistent_label,
)
lines = [
(padding + left_title.ljust(max_left) + padding + right_title.ljust(max_right)),
padding + '=' * (max_left + len(padding) + max_right)
]
lines += [
padding + left.ljust(max_left) + padding + right
for left, right in zip(left_lines, right_lines)
]
return '\n'.join(lines)
# Set to `Interface` once it is defined. This is used to
# avoid logging false positives about changed ROs.
_ROOT = None
def ro(C, strict=None, base_mros=None, log_changed_ro=None, use_legacy_ro=None):
"""
ro(C) -> list
Compute the precedence list (mro) according to C3.
:return: A fresh `list` object.
.. versionchanged:: 5.0.0
Add the *strict*, *log_changed_ro* and *use_legacy_ro*
keyword arguments. These are provisional and likely to be
removed in the future. They are most useful for testing.
"""
# The ``base_mros`` argument is for internal optimization and
# not documented.
resolver = C3.resolver(C, strict, base_mros)
mro = resolver.mro()
log_changed = log_changed_ro if log_changed_ro is not None else resolver.LOG_CHANGED_IRO
use_legacy = use_legacy_ro if use_legacy_ro is not None else resolver.USE_LEGACY_IRO
if log_changed or use_legacy:
legacy_ro = resolver.legacy_ro
assert isinstance(legacy_ro, list)
assert isinstance(mro, list)
changed = legacy_ro != mro
if changed:
# Did only Interface move? The fix for issue #8 made that
# somewhat common. It's almost certainly not a problem, though,
# so allow ignoring it.
legacy_without_root = [x for x in legacy_ro if x is not _ROOT]
mro_without_root = [x for x in mro if x is not _ROOT]
changed = legacy_without_root != mro_without_root
if changed:
comparison = _ROComparison(resolver, mro, legacy_ro)
_logger().warning(
"Object %r has different legacy and C3 MROs:\n%s",
C, comparison
)
if resolver.had_inconsistency and legacy_ro == mro:
comparison = _ROComparison(resolver, mro, legacy_ro)
_logger().warning(
"Object %r had inconsistent IRO and used the legacy RO:\n%s"
"\nInconsistency entered at:\n%s",
C, comparison, resolver.direct_inconsistency
)
if use_legacy:
return legacy_ro
return mro
def is_consistent(C):
"""
Check if the resolution order for *C*, as computed by :func:`ro`, is consistent
according to C3.
"""
return not C3.resolver(C, False, None).had_inconsistency | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/ro.py | ro.py |
"""Adapter management
"""
import itertools
import weakref
from zope.interface import implementer
from zope.interface import providedBy
from zope.interface import Interface
from zope.interface import ro
from zope.interface.interfaces import IAdapterRegistry
from zope.interface._compat import _normalize_name
from zope.interface._compat import _use_c_impl
__all__ = [
'AdapterRegistry',
'VerifyingAdapterRegistry',
]
# In the CPython implementation,
# ``tuple`` and ``list`` cooperate so that ``tuple([some list])``
# directly allocates and iterates at the C level without using a
# Python iterator. That's not the case for
# ``tuple(generator_expression)`` or ``tuple(map(func, it))``.
##
# 3.8
# ``tuple([t for t in range(10)])`` -> 610ns
# ``tuple(t for t in range(10))`` -> 696ns
# ``tuple(map(lambda t: t, range(10)))`` -> 881ns
##
# 2.7
# ``tuple([t fon t in range(10)])`` -> 625ns
# ``tuple(t for t in range(10))`` -> 665ns
# ``tuple(map(lambda t: t, range(10)))`` -> 958ns
#
# All three have substantial variance.
##
# On PyPy, this is also the best option.
##
# PyPy 2.7.18-7.3.3
# ``tuple([t fon t in range(10)])`` -> 128ns
# ``tuple(t for t in range(10))`` -> 175ns
# ``tuple(map(lambda t: t, range(10)))`` -> 153ns
##
# PyPy 3.7.9 7.3.3-beta
# ``tuple([t fon t in range(10)])`` -> 82ns
# ``tuple(t for t in range(10))`` -> 177ns
# ``tuple(map(lambda t: t, range(10)))`` -> 168ns
#
class BaseAdapterRegistry:
"""
A basic implementation of the data storage and algorithms required
for a :class:`zope.interface.interfaces.IAdapterRegistry`.
Subclasses can set the following attributes to control how the data
is stored; in particular, these hooks can be helpful for ZODB
persistence. They can be class attributes that are the named (or similar) type, or
they can be methods that act as a constructor for an object that behaves
like the types defined here; this object will not assume that they are type
objects, but subclasses are free to do so:
_sequenceType = list
This is the type used for our two mutable top-level "byorder" sequences.
Must support mutation operations like ``append()`` and ``del seq[index]``.
These are usually small (< 10). Although at least one of them is
accessed when performing lookups or queries on this object, the other
is untouched. In many common scenarios, both are only required when
mutating registrations and subscriptions (like what
:meth:`zope.interface.interfaces.IComponents.registerUtility` does).
This use pattern makes it an ideal candidate to be a
:class:`~persistent.list.PersistentList`.
_leafSequenceType = tuple
This is the type used for the leaf sequences of subscribers.
It could be set to a ``PersistentList`` to avoid many unnecessary data
loads when subscribers aren't being used. Mutation operations are directed
through :meth:`_addValueToLeaf` and :meth:`_removeValueFromLeaf`; if you use
a mutable type, you'll need to override those.
_mappingType = dict
This is the mutable mapping type used for the keyed mappings.
A :class:`~persistent.mapping.PersistentMapping`
could be used to help reduce the number of data loads when the registry is large
and parts of it are rarely used. Further reductions in data loads can come from
using a :class:`~BTrees.OOBTree.OOBTree`, but care is required
to be sure that all required/provided
values are fully ordered (e.g., no required or provided values that are classes
can be used).
_providedType = dict
This is the mutable mapping type used for the ``_provided`` mapping.
This is separate from the generic mapping type because the values
are always integers, so one might choose to use a more optimized data
structure such as a :class:`~BTrees.OIBTree.OIBTree`.
The same caveats regarding key types
apply as for ``_mappingType``.
It is possible to also set these on an instance, but because of the need to
potentially also override :meth:`_addValueToLeaf` and :meth:`_removeValueFromLeaf`,
this may be less useful in a persistent scenario; using a subclass is recommended.
.. versionchanged:: 5.3.0
Add support for customizing the way internal data
structures are created.
.. versionchanged:: 5.3.0
Add methods :meth:`rebuild`, :meth:`allRegistrations`
and :meth:`allSubscriptions`.
"""
# List of methods copied from lookup sub-objects:
_delegated = ('lookup', 'queryMultiAdapter', 'lookup1', 'queryAdapter',
'adapter_hook', 'lookupAll', 'names',
'subscriptions', 'subscribers')
# All registries maintain a generation that can be used by verifying
# registries
_generation = 0
def __init__(self, bases=()):
# The comments here could be improved. Possibly this bit needs
# explaining in a separate document, as the comments here can
# be quite confusing. /regebro
# {order -> {required -> {provided -> {name -> value}}}}
# Here "order" is actually an index in a list, "required" and
# "provided" are interfaces, and "required" is really a nested
# key. So, for example:
# for order == 0 (that is, self._adapters[0]), we have:
# {provided -> {name -> value}}
# but for order == 2 (that is, self._adapters[2]), we have:
# {r1 -> {r2 -> {provided -> {name -> value}}}}
#
self._adapters = self._sequenceType()
# {order -> {required -> {provided -> {name -> [value]}}}}
# where the remarks about adapters above apply
self._subscribers = self._sequenceType()
# Set, with a reference count, keeping track of the interfaces
# for which we have provided components:
self._provided = self._providedType()
# Create ``_v_lookup`` object to perform lookup. We make this a
# separate object to to make it easier to implement just the
# lookup functionality in C. This object keeps track of cache
# invalidation data in two kinds of registries.
# Invalidating registries have caches that are invalidated
# when they or their base registies change. An invalidating
# registry can only have invalidating registries as bases.
# See LookupBaseFallback below for the pertinent logic.
# Verifying registies can't rely on getting invalidation messages,
# so have to check the generations of base registries to determine
# if their cache data are current. See VerifyingBasePy below
# for the pertinent object.
self._createLookup()
# Setting the bases causes the registries described above
# to be initialized (self._setBases -> self.changed ->
# self._v_lookup.changed).
self.__bases__ = bases
def _setBases(self, bases):
"""
If subclasses need to track when ``__bases__`` changes, they
can override this method.
Subclasses must still call this method.
"""
self.__dict__['__bases__'] = bases
self.ro = ro.ro(self)
self.changed(self)
__bases__ = property(lambda self: self.__dict__['__bases__'],
lambda self, bases: self._setBases(bases),
)
def _createLookup(self):
self._v_lookup = self.LookupClass(self)
for name in self._delegated:
self.__dict__[name] = getattr(self._v_lookup, name)
# Hooks for subclasses to define the types of objects used in
# our data structures.
# These have to be documented in the docstring, instead of local
# comments, because Sphinx autodoc ignores the comment and just writes
# "alias of list"
_sequenceType = list
_leafSequenceType = tuple
_mappingType = dict
_providedType = dict
def _addValueToLeaf(self, existing_leaf_sequence, new_item):
"""
Add the value *new_item* to the *existing_leaf_sequence*, which may
be ``None``.
Subclasses that redefine `_leafSequenceType` should override this method.
:param existing_leaf_sequence:
If *existing_leaf_sequence* is not *None*, it will be an instance
of `_leafSequenceType`. (Unless the object has been unpickled
from an old pickle and the class definition has changed, in which case
it may be an instance of a previous definition, commonly a `tuple`.)
:return:
This method returns the new value to be stored. It may mutate the
sequence in place if it was not ``None`` and the type is mutable, but
it must also return it.
.. versionadded:: 5.3.0
"""
if existing_leaf_sequence is None:
return (new_item,)
return existing_leaf_sequence + (new_item,)
def _removeValueFromLeaf(self, existing_leaf_sequence, to_remove):
"""
Remove the item *to_remove* from the (non-``None``, non-empty)
*existing_leaf_sequence* and return the mutated sequence.
If there is more than one item that is equal to *to_remove*
they must all be removed.
Subclasses that redefine `_leafSequenceType` should override
this method. Note that they can call this method to help
in their implementation; this implementation will always
return a new tuple constructed by iterating across
the *existing_leaf_sequence* and omitting items equal to *to_remove*.
:param existing_leaf_sequence:
As for `_addValueToLeaf`, probably an instance of
`_leafSequenceType` but possibly an older type; never `None`.
:return:
A version of *existing_leaf_sequence* with all items equal to
*to_remove* removed. Must not return `None`. However,
returning an empty
object, even of another type such as the empty tuple, ``()`` is
explicitly allowed; such an object will never be stored.
.. versionadded:: 5.3.0
"""
return tuple([v for v in existing_leaf_sequence if v != to_remove])
def changed(self, originally_changed):
self._generation += 1
self._v_lookup.changed(originally_changed)
def register(self, required, provided, name, value):
if not isinstance(name, str):
raise ValueError('name is not a string')
if value is None:
self.unregister(required, provided, name, value)
return
required = tuple([_convert_None_to_Interface(r) for r in required])
name = _normalize_name(name)
order = len(required)
byorder = self._adapters
while len(byorder) <= order:
byorder.append(self._mappingType())
components = byorder[order]
key = required + (provided,)
for k in key:
d = components.get(k)
if d is None:
d = self._mappingType()
components[k] = d
components = d
if components.get(name) is value:
return
components[name] = value
n = self._provided.get(provided, 0) + 1
self._provided[provided] = n
if n == 1:
self._v_lookup.add_extendor(provided)
self.changed(self)
def _find_leaf(self, byorder, required, provided, name):
# Find the leaf value, if any, in the *byorder* list
# for the interface sequence *required* and the interface
# *provided*, given the already normalized *name*.
#
# If no such leaf value exists, returns ``None``
required = tuple([_convert_None_to_Interface(r) for r in required])
order = len(required)
if len(byorder) <= order:
return None
components = byorder[order]
key = required + (provided,)
for k in key:
d = components.get(k)
if d is None:
return None
components = d
return components.get(name)
def registered(self, required, provided, name=''):
return self._find_leaf(
self._adapters,
required,
provided,
_normalize_name(name)
)
@classmethod
def _allKeys(cls, components, i, parent_k=()):
if i == 0:
for k, v in components.items():
yield parent_k + (k,), v
else:
for k, v in components.items():
new_parent_k = parent_k + (k,)
yield from cls._allKeys(v, i - 1, new_parent_k)
def _all_entries(self, byorder):
# Recurse through the mapping levels of the `byorder` sequence,
# reconstructing a flattened sequence of ``(required, provided, name, value)``
# tuples that can be used to reconstruct the sequence with the appropriate
# registration methods.
#
# Locally reference the `byorder` data; it might be replaced while
# this method is running (see ``rebuild``).
for i, components in enumerate(byorder):
# We will have *i* levels of dictionaries to go before
# we get to the leaf.
for key, value in self._allKeys(components, i + 1):
assert len(key) == i + 2
required = key[:i]
provided = key[-2]
name = key[-1]
yield (required, provided, name, value)
def allRegistrations(self):
"""
Yields tuples ``(required, provided, name, value)`` for all
the registrations that this object holds.
These tuples could be passed as the arguments to the
:meth:`register` method on another adapter registry to
duplicate the registrations this object holds.
.. versionadded:: 5.3.0
"""
yield from self._all_entries(self._adapters)
def unregister(self, required, provided, name, value=None):
required = tuple([_convert_None_to_Interface(r) for r in required])
order = len(required)
byorder = self._adapters
if order >= len(byorder):
return False
components = byorder[order]
key = required + (provided,)
# Keep track of how we got to `components`:
lookups = []
for k in key:
d = components.get(k)
if d is None:
return
lookups.append((components, k))
components = d
old = components.get(name)
if old is None:
return
if (value is not None) and (old is not value):
return
del components[name]
if not components:
# Clean out empty containers, since we don't want our keys
# to reference global objects (interfaces) unnecessarily.
# This is often a problem when an interface is slated for
# removal; a hold-over entry in the registry can make it
# difficult to remove such interfaces.
for comp, k in reversed(lookups):
d = comp[k]
if d:
break
else:
del comp[k]
while byorder and not byorder[-1]:
del byorder[-1]
n = self._provided[provided] - 1
if n == 0:
del self._provided[provided]
self._v_lookup.remove_extendor(provided)
else:
self._provided[provided] = n
self.changed(self)
def subscribe(self, required, provided, value):
required = tuple([_convert_None_to_Interface(r) for r in required])
name = ''
order = len(required)
byorder = self._subscribers
while len(byorder) <= order:
byorder.append(self._mappingType())
components = byorder[order]
key = required + (provided,)
for k in key:
d = components.get(k)
if d is None:
d = self._mappingType()
components[k] = d
components = d
components[name] = self._addValueToLeaf(components.get(name), value)
if provided is not None:
n = self._provided.get(provided, 0) + 1
self._provided[provided] = n
if n == 1:
self._v_lookup.add_extendor(provided)
self.changed(self)
def subscribed(self, required, provided, subscriber):
subscribers = self._find_leaf(
self._subscribers,
required,
provided,
''
) or ()
return subscriber if subscriber in subscribers else None
def allSubscriptions(self):
"""
Yields tuples ``(required, provided, value)`` for all the
subscribers that this object holds.
These tuples could be passed as the arguments to the
:meth:`subscribe` method on another adapter registry to
duplicate the registrations this object holds.
.. versionadded:: 5.3.0
"""
for required, provided, _name, value in self._all_entries(self._subscribers):
for v in value:
yield (required, provided, v)
def unsubscribe(self, required, provided, value=None):
required = tuple([_convert_None_to_Interface(r) for r in required])
order = len(required)
byorder = self._subscribers
if order >= len(byorder):
return
components = byorder[order]
key = required + (provided,)
# Keep track of how we got to `components`:
lookups = []
for k in key:
d = components.get(k)
if d is None:
return
lookups.append((components, k))
components = d
old = components.get('')
if not old:
# this is belt-and-suspenders against the failure of cleanup below
return # pragma: no cover
len_old = len(old)
if value is None:
# Removing everything; note that the type of ``new`` won't
# necessarily match the ``_leafSequenceType``, but that's
# OK because we're about to delete the entire entry
# anyway.
new = ()
else:
new = self._removeValueFromLeaf(old, value)
# ``new`` may be the same object as ``old``, just mutated in place,
# so we cannot compare it to ``old`` to check for changes. Remove
# our reference to it now to avoid trying to do so below.
del old
if len(new) == len_old:
# No changes, so nothing could have been removed.
return
if new:
components[''] = new
else:
# Instead of setting components[u''] = new, we clean out
# empty containers, since we don't want our keys to
# reference global objects (interfaces) unnecessarily. This
# is often a problem when an interface is slated for
# removal; a hold-over entry in the registry can make it
# difficult to remove such interfaces.
del components['']
for comp, k in reversed(lookups):
d = comp[k]
if d:
break
else:
del comp[k]
while byorder and not byorder[-1]:
del byorder[-1]
if provided is not None:
n = self._provided[provided] + len(new) - len_old
if n == 0:
del self._provided[provided]
self._v_lookup.remove_extendor(provided)
else:
self._provided[provided] = n
self.changed(self)
def rebuild(self):
"""
Rebuild (and replace) all the internal data structures of this
object.
This is useful, especially for persistent implementations, if
you suspect an issue with reference counts keeping interfaces
alive even though they are no longer used.
It is also useful if you or a subclass change the data types
(``_mappingType`` and friends) that are to be used.
This method replaces all internal data structures with new objects;
it specifically does not re-use any storage.
.. versionadded:: 5.3.0
"""
# Grab the iterators, we're about to discard their data.
registrations = self.allRegistrations()
subscriptions = self.allSubscriptions()
def buffer(it):
# The generator doesn't actually start running until we
# ask for its next(), by which time the attributes will change
# unless we do so before calling __init__.
try:
first = next(it)
except StopIteration:
return iter(())
return itertools.chain((first,), it)
registrations = buffer(registrations)
subscriptions = buffer(subscriptions)
# Replace the base data structures as well as _v_lookup.
self.__init__(self.__bases__)
# Re-register everything previously registered and subscribed.
#
# XXX: This is going to call ``self.changed()`` a lot, all of
# which is unnecessary (because ``self.__init__`` just
# re-created those dependent objects and also called
# ``self.changed()``). Is this a bottleneck that needs fixed?
# (We could do ``self.changed = lambda _: None`` before
# beginning and remove it after to disable the presumably expensive
# part of passing that notification to the change of objects.)
for args in registrations:
self.register(*args)
for args in subscriptions:
self.subscribe(*args)
# XXX hack to fake out twisted's use of a private api. We need to get them
# to use the new registered method.
def get(self, _): # pragma: no cover
class XXXTwistedFakeOut:
selfImplied = {}
return XXXTwistedFakeOut
_not_in_mapping = object()
@_use_c_impl
class LookupBase:
def __init__(self):
self._cache = {}
self._mcache = {}
self._scache = {}
def changed(self, ignored=None):
self._cache.clear()
self._mcache.clear()
self._scache.clear()
def _getcache(self, provided, name):
cache = self._cache.get(provided)
if cache is None:
cache = {}
self._cache[provided] = cache
if name:
c = cache.get(name)
if c is None:
c = {}
cache[name] = c
cache = c
return cache
def lookup(self, required, provided, name='', default=None):
if not isinstance(name, str):
raise ValueError('name is not a string')
cache = self._getcache(provided, name)
required = tuple(required)
if len(required) == 1:
result = cache.get(required[0], _not_in_mapping)
else:
result = cache.get(tuple(required), _not_in_mapping)
if result is _not_in_mapping:
result = self._uncached_lookup(required, provided, name)
if len(required) == 1:
cache[required[0]] = result
else:
cache[tuple(required)] = result
if result is None:
return default
return result
def lookup1(self, required, provided, name='', default=None):
if not isinstance(name, str):
raise ValueError('name is not a string')
cache = self._getcache(provided, name)
result = cache.get(required, _not_in_mapping)
if result is _not_in_mapping:
return self.lookup((required, ), provided, name, default)
if result is None:
return default
return result
def queryAdapter(self, object, provided, name='', default=None):
return self.adapter_hook(provided, object, name, default)
def adapter_hook(self, provided, object, name='', default=None):
if not isinstance(name, str):
raise ValueError('name is not a string')
required = providedBy(object)
cache = self._getcache(provided, name)
factory = cache.get(required, _not_in_mapping)
if factory is _not_in_mapping:
factory = self.lookup((required, ), provided, name)
if factory is not None:
if isinstance(object, super):
object = object.__self__
result = factory(object)
if result is not None:
return result
return default
def lookupAll(self, required, provided):
cache = self._mcache.get(provided)
if cache is None:
cache = {}
self._mcache[provided] = cache
required = tuple(required)
result = cache.get(required, _not_in_mapping)
if result is _not_in_mapping:
result = self._uncached_lookupAll(required, provided)
cache[required] = result
return result
def subscriptions(self, required, provided):
cache = self._scache.get(provided)
if cache is None:
cache = {}
self._scache[provided] = cache
required = tuple(required)
result = cache.get(required, _not_in_mapping)
if result is _not_in_mapping:
result = self._uncached_subscriptions(required, provided)
cache[required] = result
return result
@_use_c_impl
class VerifyingBase(LookupBaseFallback):
# Mixin for lookups against registries which "chain" upwards, and
# whose lookups invalidate their own caches whenever a parent registry
# bumps its own '_generation' counter. E.g., used by
# zope.component.persistentregistry
def changed(self, originally_changed):
LookupBaseFallback.changed(self, originally_changed)
self._verify_ro = self._registry.ro[1:]
self._verify_generations = [r._generation for r in self._verify_ro]
def _verify(self):
if ([r._generation for r in self._verify_ro]
!= self._verify_generations):
self.changed(None)
def _getcache(self, provided, name):
self._verify()
return LookupBaseFallback._getcache(self, provided, name)
def lookupAll(self, required, provided):
self._verify()
return LookupBaseFallback.lookupAll(self, required, provided)
def subscriptions(self, required, provided):
self._verify()
return LookupBaseFallback.subscriptions(self, required, provided)
class AdapterLookupBase:
def __init__(self, registry):
self._registry = registry
self._required = {}
self.init_extendors()
super().__init__()
def changed(self, ignored=None):
super().changed(None)
for r in self._required.keys():
r = r()
if r is not None:
r.unsubscribe(self)
self._required.clear()
# Extendors
# ---------
# When given an target interface for an adapter lookup, we need to consider
# adapters for interfaces that extend the target interface. This is
# what the extendors dictionary is about. It tells us all of the
# interfaces that extend an interface for which there are adapters
# registered.
# We could separate this by order and name, thus reducing the
# number of provided interfaces to search at run time. The tradeoff,
# however, is that we have to store more information. For example,
# if the same interface is provided for multiple names and if the
# interface extends many interfaces, we'll have to keep track of
# a fair bit of information for each name. It's better to
# be space efficient here and be time efficient in the cache
# implementation.
# TODO: add invalidation when a provided interface changes, in case
# the interface's __iro__ has changed. This is unlikely enough that
# we'll take our chances for now.
def init_extendors(self):
self._extendors = {}
for p in self._registry._provided:
self.add_extendor(p)
def add_extendor(self, provided):
_extendors = self._extendors
for i in provided.__iro__:
extendors = _extendors.get(i, ())
_extendors[i] = (
[e for e in extendors if provided.isOrExtends(e)]
+
[provided]
+
[e for e in extendors if not provided.isOrExtends(e)]
)
def remove_extendor(self, provided):
_extendors = self._extendors
for i in provided.__iro__:
_extendors[i] = [e for e in _extendors.get(i, ())
if e != provided]
def _subscribe(self, *required):
_refs = self._required
for r in required:
ref = r.weakref()
if ref not in _refs:
r.subscribe(self)
_refs[ref] = 1
def _uncached_lookup(self, required, provided, name=''):
required = tuple(required)
result = None
order = len(required)
for registry in self._registry.ro:
byorder = registry._adapters
if order >= len(byorder):
continue
extendors = registry._v_lookup._extendors.get(provided)
if not extendors:
continue
components = byorder[order]
result = _lookup(components, required, extendors, name, 0,
order)
if result is not None:
break
self._subscribe(*required)
return result
def queryMultiAdapter(self, objects, provided, name='', default=None):
factory = self.lookup([providedBy(o) for o in objects], provided, name)
if factory is None:
return default
result = factory(*[o.__self__ if isinstance(o, super) else o for o in objects])
if result is None:
return default
return result
def _uncached_lookupAll(self, required, provided):
required = tuple(required)
order = len(required)
result = {}
for registry in reversed(self._registry.ro):
byorder = registry._adapters
if order >= len(byorder):
continue
extendors = registry._v_lookup._extendors.get(provided)
if not extendors:
continue
components = byorder[order]
_lookupAll(components, required, extendors, result, 0, order)
self._subscribe(*required)
return tuple(result.items())
def names(self, required, provided):
return [c[0] for c in self.lookupAll(required, provided)]
def _uncached_subscriptions(self, required, provided):
required = tuple(required)
order = len(required)
result = []
for registry in reversed(self._registry.ro):
byorder = registry._subscribers
if order >= len(byorder):
continue
if provided is None:
extendors = (provided, )
else:
extendors = registry._v_lookup._extendors.get(provided)
if extendors is None:
continue
_subscriptions(byorder[order], required, extendors, '',
result, 0, order)
self._subscribe(*required)
return result
def subscribers(self, objects, provided):
subscriptions = self.subscriptions([providedBy(o) for o in objects], provided)
if provided is None:
result = ()
for subscription in subscriptions:
subscription(*objects)
else:
result = []
for subscription in subscriptions:
subscriber = subscription(*objects)
if subscriber is not None:
result.append(subscriber)
return result
class AdapterLookup(AdapterLookupBase, LookupBase):
pass
@implementer(IAdapterRegistry)
class AdapterRegistry(BaseAdapterRegistry):
"""
A full implementation of ``IAdapterRegistry`` that adds support for
sub-registries.
"""
LookupClass = AdapterLookup
def __init__(self, bases=()):
# AdapterRegisties are invalidating registries, so
# we need to keep track of our invalidating subregistries.
self._v_subregistries = weakref.WeakKeyDictionary()
super().__init__(bases)
def _addSubregistry(self, r):
self._v_subregistries[r] = 1
def _removeSubregistry(self, r):
if r in self._v_subregistries:
del self._v_subregistries[r]
def _setBases(self, bases):
old = self.__dict__.get('__bases__', ())
for r in old:
if r not in bases:
r._removeSubregistry(self)
for r in bases:
if r not in old:
r._addSubregistry(self)
super()._setBases(bases)
def changed(self, originally_changed):
super().changed(originally_changed)
for sub in self._v_subregistries.keys():
sub.changed(originally_changed)
class VerifyingAdapterLookup(AdapterLookupBase, VerifyingBase):
pass
@implementer(IAdapterRegistry)
class VerifyingAdapterRegistry(BaseAdapterRegistry):
"""
The most commonly-used adapter registry.
"""
LookupClass = VerifyingAdapterLookup
def _convert_None_to_Interface(x):
if x is None:
return Interface
else:
return x
def _lookup(components, specs, provided, name, i, l):
# this function is called very often.
# The components.get in loops is executed 100 of 1000s times.
# by loading get into a local variable the bytecode
# "LOAD_FAST 0 (components)" in the loop can be eliminated.
components_get = components.get
if i < l:
for spec in specs[i].__sro__:
comps = components_get(spec)
if comps:
r = _lookup(comps, specs, provided, name, i+1, l)
if r is not None:
return r
else:
for iface in provided:
comps = components_get(iface)
if comps:
r = comps.get(name)
if r is not None:
return r
return None
def _lookupAll(components, specs, provided, result, i, l):
components_get = components.get # see _lookup above
if i < l:
for spec in reversed(specs[i].__sro__):
comps = components_get(spec)
if comps:
_lookupAll(comps, specs, provided, result, i+1, l)
else:
for iface in reversed(provided):
comps = components_get(iface)
if comps:
result.update(comps)
def _subscriptions(components, specs, provided, name, result, i, l):
components_get = components.get # see _lookup above
if i < l:
for spec in reversed(specs[i].__sro__):
comps = components_get(spec)
if comps:
_subscriptions(comps, specs, provided, name, result, i+1, l)
else:
for iface in reversed(provided):
comps = components_get(iface)
if comps:
comps = comps.get(name)
if comps:
result.extend(comps) | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/adapter.py | adapter.py |
__docformat__ = 'restructuredtext'
import sys
from types import FunctionType
from types import MethodType
from types import ModuleType
import weakref
from zope.interface.interface import Interface
from zope.interface.interface import InterfaceClass
from zope.interface.interface import SpecificationBase
from zope.interface.interface import Specification
from zope.interface.interface import NameAndModuleComparisonMixin
from zope.interface._compat import _use_c_impl
__all__ = [
# None. The public APIs of this module are
# re-exported from zope.interface directly.
]
# pylint:disable=too-many-lines
# Registry of class-implementation specifications
BuiltinImplementationSpecifications = {}
def _next_super_class(ob):
# When ``ob`` is an instance of ``super``, return
# the next class in the MRO that we should actually be
# looking at. Watch out for diamond inheritance!
self_class = ob.__self_class__
class_that_invoked_super = ob.__thisclass__
complete_mro = self_class.__mro__
next_class = complete_mro[complete_mro.index(class_that_invoked_super) + 1]
return next_class
class named:
def __init__(self, name):
self.name = name
def __call__(self, ob):
ob.__component_name__ = self.name
return ob
class Declaration(Specification):
"""Interface declarations"""
__slots__ = ()
def __init__(self, *bases):
Specification.__init__(self, _normalizeargs(bases))
def __contains__(self, interface):
"""Test whether an interface is in the specification
"""
return self.extends(interface) and interface in self.interfaces()
def __iter__(self):
"""Return an iterator for the interfaces in the specification
"""
return self.interfaces()
def flattened(self):
"""Return an iterator of all included and extended interfaces
"""
return iter(self.__iro__)
def __sub__(self, other):
"""Remove interfaces from a specification
"""
return Declaration(*[
i for i in self.interfaces()
if not [
j
for j in other.interfaces()
if i.extends(j, 0) # non-strict extends
]
])
def __add__(self, other):
"""
Add two specifications or a specification and an interface
and produce a new declaration.
.. versionchanged:: 5.4.0
Now tries to preserve a consistent resolution order. Interfaces
being added to this object are added to the front of the resulting resolution
order if they already extend an interface in this object. Previously,
they were always added to the end of the order, which easily resulted in
invalid orders.
"""
before = []
result = list(self.interfaces())
seen = set(result)
for i in other.interfaces():
if i in seen:
continue
seen.add(i)
if any(i.extends(x) for x in result):
# It already extends us, e.g., is a subclass,
# so it needs to go at the front of the RO.
before.append(i)
else:
result.append(i)
return Declaration(*(before + result))
# XXX: Is __radd__ needed? No tests break if it's removed.
# If it is needed, does it need to handle the C3 ordering differently?
# I (JAM) don't *think* it does.
__radd__ = __add__
@staticmethod
def _add_interfaces_to_cls(interfaces, cls):
# Strip redundant interfaces already provided
# by the cls so we don't produce invalid
# resolution orders.
implemented_by_cls = implementedBy(cls)
interfaces = tuple([
iface
for iface in interfaces
if not implemented_by_cls.isOrExtends(iface)
])
return interfaces + (implemented_by_cls,)
@staticmethod
def _argument_names_for_repr(interfaces):
# These don't actually have to be interfaces, they could be other
# Specification objects like Implements. Also, the first
# one is typically/nominally the cls.
ordered_names = []
names = set()
for iface in interfaces:
duplicate_transform = repr
if isinstance(iface, InterfaceClass):
# Special case to get 'foo.bar.IFace'
# instead of '<InterfaceClass foo.bar.IFace>'
this_name = iface.__name__
duplicate_transform = str
elif isinstance(iface, type):
# Likewise for types. (Ignoring legacy old-style
# classes.)
this_name = iface.__name__
duplicate_transform = _implements_name
elif (isinstance(iface, Implements)
and not iface.declared
and iface.inherit in interfaces):
# If nothing is declared, there's no need to even print this;
# it would just show as ``classImplements(Class)``, and the
# ``Class`` has typically already.
continue
else:
this_name = repr(iface)
already_seen = this_name in names
names.add(this_name)
if already_seen:
this_name = duplicate_transform(iface)
ordered_names.append(this_name)
return ', '.join(ordered_names)
class _ImmutableDeclaration(Declaration):
# A Declaration that is immutable. Used as a singleton to
# return empty answers for things like ``implementedBy``.
# We have to define the actual singleton after normalizeargs
# is defined, and that in turn is defined after InterfaceClass and
# Implements.
__slots__ = ()
__instance = None
def __new__(cls):
if _ImmutableDeclaration.__instance is None:
_ImmutableDeclaration.__instance = object.__new__(cls)
return _ImmutableDeclaration.__instance
def __reduce__(self):
return "_empty"
@property
def __bases__(self):
return ()
@__bases__.setter
def __bases__(self, new_bases):
# We expect the superclass constructor to set ``self.__bases__ = ()``.
# Rather than attempt to special case that in the constructor and allow
# setting __bases__ only at that time, it's easier to just allow setting
# the empty tuple at any time. That makes ``x.__bases__ = x.__bases__`` a nice
# no-op too. (Skipping the superclass constructor altogether is a recipe
# for maintenance headaches.)
if new_bases != ():
raise TypeError("Cannot set non-empty bases on shared empty Declaration.")
# As the immutable empty declaration, we cannot be changed.
# This means there's no logical reason for us to have dependents
# or subscriptions: we'll never notify them. So there's no need for
# us to keep track of any of that.
@property
def dependents(self):
return {}
changed = subscribe = unsubscribe = lambda self, _ignored: None
def interfaces(self):
# An empty iterator
return iter(())
def extends(self, interface, strict=True):
return interface is self._ROOT
def get(self, name, default=None):
return default
def weakref(self, callback=None):
# We're a singleton, we never go away. So there's no need to return
# distinct weakref objects here; their callbacks will never
# be called. Instead, we only need to return a callable that
# returns ourself. The easiest one is to return _ImmutableDeclaration
# itself; testing on Python 3.8 shows that's faster than a function that
# returns _empty. (Remember, one goal is to avoid allocating any
# object, and that includes a method.)
return _ImmutableDeclaration
@property
def _v_attrs(self):
# _v_attrs is not a public, documented property, but some client code
# uses it anyway as a convenient place to cache things. To keep the
# empty declaration truly immutable, we must ignore that. That includes
# ignoring assignments as well.
return {}
@_v_attrs.setter
def _v_attrs(self, new_attrs):
pass
##############################################################################
#
# Implementation specifications
#
# These specify interfaces implemented by instances of classes
class Implements(NameAndModuleComparisonMixin,
Declaration):
# Inherit from NameAndModuleComparisonMixin to be
# mutually comparable with InterfaceClass objects.
# (The two must be mutually comparable to be able to work in e.g., BTrees.)
# Instances of this class generally don't have a __module__ other than
# `zope.interface.declarations`, whereas they *do* have a __name__ that is the
# fully qualified name of the object they are representing.
# Note, though, that equality and hashing are still identity based. This
# accounts for things like nested objects that have the same name (typically
# only in tests) and is consistent with pickling. As far as comparisons to InterfaceClass
# goes, we'll never have equal name and module to those, so we're still consistent there.
# Instances of this class are essentially intended to be unique and are
# heavily cached (note how our __reduce__ handles this) so having identity
# based hash and eq should also work.
# We want equality and hashing to be based on identity. However, we can't actually
# implement __eq__/__ne__ to do this because sometimes we get wrapped in a proxy.
# We need to let the proxy types implement these methods so they can handle unwrapping
# and then rely on: (1) the interpreter automatically changing `implements == proxy` into
# `proxy == implements` (which will call proxy.__eq__ to do the unwrapping) and then
# (2) the default equality and hashing semantics being identity based.
# class whose specification should be used as additional base
inherit = None
# interfaces actually declared for a class
declared = ()
# Weak cache of {class: <implements>} for super objects.
# Created on demand. These are rare, as of 5.0 anyway. Using a class
# level default doesn't take space in instances. Using _v_attrs would be
# another place to store this without taking space unless needed.
_super_cache = None
__name__ = '?'
@classmethod
def named(cls, name, *bases):
# Implementation method: Produce an Implements interface with
# a fully fleshed out __name__ before calling the constructor, which
# sets bases to the given interfaces and which may pass this object to
# other objects (e.g., to adjust dependents). If they're sorting or comparing
# by name, this needs to be set.
inst = cls.__new__(cls)
inst.__name__ = name
inst.__init__(*bases)
return inst
def changed(self, originally_changed):
try:
del self._super_cache
except AttributeError:
pass
return super().changed(originally_changed)
def __repr__(self):
if self.inherit:
name = getattr(self.inherit, '__name__', None) or _implements_name(self.inherit)
else:
name = self.__name__
declared_names = self._argument_names_for_repr(self.declared)
if declared_names:
declared_names = ', ' + declared_names
return 'classImplements({}{})'.format(name, declared_names)
def __reduce__(self):
return implementedBy, (self.inherit, )
def _implements_name(ob):
# Return the __name__ attribute to be used by its __implemented__
# property.
# This must be stable for the "same" object across processes
# because it is used for sorting. It needn't be unique, though, in cases
# like nested classes named Foo created by different functions, because
# equality and hashing is still based on identity.
# It might be nice to use __qualname__ on Python 3, but that would produce
# different values between Py2 and Py3.
return (getattr(ob, '__module__', '?') or '?') + \
'.' + (getattr(ob, '__name__', '?') or '?')
def _implementedBy_super(sup):
# TODO: This is now simple enough we could probably implement
# in C if needed.
# If the class MRO is strictly linear, we could just
# follow the normal algorithm for the next class in the
# search order (e.g., just return
# ``implemented_by_next``). But when diamond inheritance
# or mixins + interface declarations are present, we have
# to consider the whole MRO and compute a new Implements
# that excludes the classes being skipped over but
# includes everything else.
implemented_by_self = implementedBy(sup.__self_class__)
cache = implemented_by_self._super_cache # pylint:disable=protected-access
if cache is None:
cache = implemented_by_self._super_cache = weakref.WeakKeyDictionary()
key = sup.__thisclass__
try:
return cache[key]
except KeyError:
pass
next_cls = _next_super_class(sup)
# For ``implementedBy(cls)``:
# .__bases__ is .declared + [implementedBy(b) for b in cls.__bases__]
# .inherit is cls
implemented_by_next = implementedBy(next_cls)
mro = sup.__self_class__.__mro__
ix_next_cls = mro.index(next_cls)
classes_to_keep = mro[ix_next_cls:]
new_bases = [implementedBy(c) for c in classes_to_keep]
new = Implements.named(
implemented_by_self.__name__ + ':' + implemented_by_next.__name__,
*new_bases
)
new.inherit = implemented_by_next.inherit
new.declared = implemented_by_next.declared
# I don't *think* that new needs to subscribe to ``implemented_by_self``;
# it auto-subscribed to its bases, and that should be good enough.
cache[key] = new
return new
@_use_c_impl
def implementedBy(cls): # pylint:disable=too-many-return-statements,too-many-branches
"""Return the interfaces implemented for a class' instances
The value returned is an `~zope.interface.interfaces.IDeclaration`.
"""
try:
if isinstance(cls, super):
# Yes, this needs to be inside the try: block. Some objects
# like security proxies even break isinstance.
return _implementedBy_super(cls)
spec = cls.__dict__.get('__implemented__')
except AttributeError:
# we can't get the class dict. This is probably due to a
# security proxy. If this is the case, then probably no
# descriptor was installed for the class.
# We don't want to depend directly on zope.security in
# zope.interface, but we'll try to make reasonable
# accommodations in an indirect way.
# We'll check to see if there's an implements:
spec = getattr(cls, '__implemented__', None)
if spec is None:
# There's no spec stred in the class. Maybe its a builtin:
spec = BuiltinImplementationSpecifications.get(cls)
if spec is not None:
return spec
return _empty
if spec.__class__ == Implements:
# we defaulted to _empty or there was a spec. Good enough.
# Return it.
return spec
# TODO: need old style __implements__ compatibility?
# Hm, there's an __implemented__, but it's not a spec. Must be
# an old-style declaration. Just compute a spec for it
return Declaration(*_normalizeargs((spec, )))
if isinstance(spec, Implements):
return spec
if spec is None:
spec = BuiltinImplementationSpecifications.get(cls)
if spec is not None:
return spec
# TODO: need old style __implements__ compatibility?
spec_name = _implements_name(cls)
if spec is not None:
# old-style __implemented__ = foo declaration
spec = (spec, ) # tuplefy, as it might be just an int
spec = Implements.named(spec_name, *_normalizeargs(spec))
spec.inherit = None # old-style implies no inherit
del cls.__implemented__ # get rid of the old-style declaration
else:
try:
bases = cls.__bases__
except AttributeError:
if not callable(cls):
raise TypeError("ImplementedBy called for non-factory", cls)
bases = ()
spec = Implements.named(spec_name, *[implementedBy(c) for c in bases])
spec.inherit = cls
try:
cls.__implemented__ = spec
if not hasattr(cls, '__providedBy__'):
cls.__providedBy__ = objectSpecificationDescriptor
if isinstance(cls, type) and '__provides__' not in cls.__dict__:
# Make sure we get a __provides__ descriptor
cls.__provides__ = ClassProvides(
cls,
getattr(cls, '__class__', type(cls)),
)
except TypeError:
if not isinstance(cls, type):
raise TypeError("ImplementedBy called for non-type", cls)
BuiltinImplementationSpecifications[cls] = spec
return spec
def classImplementsOnly(cls, *interfaces):
"""
Declare the only interfaces implemented by instances of a class
The arguments after the class are one or more interfaces or interface
specifications (`~zope.interface.interfaces.IDeclaration` objects).
The interfaces given (including the interfaces in the specifications)
replace any previous declarations, *including* inherited definitions. If you
wish to preserve inherited declarations, you can pass ``implementedBy(cls)``
in *interfaces*. This can be used to alter the interface resolution order.
"""
spec = implementedBy(cls)
# Clear out everything inherited. It's important to
# also clear the bases right now so that we don't improperly discard
# interfaces that are already implemented by *old* bases that we're
# about to get rid of.
spec.declared = ()
spec.inherit = None
spec.__bases__ = ()
_classImplements_ordered(spec, interfaces, ())
def classImplements(cls, *interfaces):
"""
Declare additional interfaces implemented for instances of a class
The arguments after the class are one or more interfaces or
interface specifications (`~zope.interface.interfaces.IDeclaration` objects).
The interfaces given (including the interfaces in the specifications)
are added to any interfaces previously declared. An effort is made to
keep a consistent C3 resolution order, but this cannot be guaranteed.
.. versionchanged:: 5.0.0
Each individual interface in *interfaces* may be added to either the
beginning or end of the list of interfaces declared for *cls*,
based on inheritance, in order to try to maintain a consistent
resolution order. Previously, all interfaces were added to the end.
.. versionchanged:: 5.1.0
If *cls* is already declared to implement an interface (or derived interface)
in *interfaces* through inheritance, the interface is ignored. Previously, it
would redundantly be made direct base of *cls*, which often produced inconsistent
interface resolution orders. Now, the order will be consistent, but may change.
Also, if the ``__bases__`` of the *cls* are later changed, the *cls* will no
longer be considered to implement such an interface (changing the ``__bases__`` of *cls*
has never been supported).
"""
spec = implementedBy(cls)
interfaces = tuple(_normalizeargs(interfaces))
before = []
after = []
# Take steps to try to avoid producing an invalid resolution
# order, while still allowing for BWC (in the past, we always
# appended)
for iface in interfaces:
for b in spec.declared:
if iface.extends(b):
before.append(iface)
break
else:
after.append(iface)
_classImplements_ordered(spec, tuple(before), tuple(after))
def classImplementsFirst(cls, iface):
"""
Declare that instances of *cls* additionally provide *iface*.
The second argument is an interface or interface specification.
It is added as the highest priority (first in the IRO) interface;
no attempt is made to keep a consistent resolution order.
.. versionadded:: 5.0.0
"""
spec = implementedBy(cls)
_classImplements_ordered(spec, (iface,), ())
def _classImplements_ordered(spec, before=(), after=()):
# Elide everything already inherited.
# Except, if it is the root, and we don't already declare anything else
# that would imply it, allow the root through. (TODO: When we disallow non-strict
# IRO, this part of the check can be removed because it's not possible to re-declare
# like that.)
before = [
x
for x in before
if not spec.isOrExtends(x) or (x is Interface and not spec.declared)
]
after = [
x
for x in after
if not spec.isOrExtends(x) or (x is Interface and not spec.declared)
]
# eliminate duplicates
new_declared = []
seen = set()
for l in before, spec.declared, after:
for b in l:
if b not in seen:
new_declared.append(b)
seen.add(b)
spec.declared = tuple(new_declared)
# compute the bases
bases = new_declared # guaranteed no dupes
if spec.inherit is not None:
for c in spec.inherit.__bases__:
b = implementedBy(c)
if b not in seen:
seen.add(b)
bases.append(b)
spec.__bases__ = tuple(bases)
def _implements_advice(cls):
interfaces, do_classImplements = cls.__dict__['__implements_advice_data__']
del cls.__implements_advice_data__
do_classImplements(cls, *interfaces)
return cls
class implementer:
"""
Declare the interfaces implemented by instances of a class.
This function is called as a class decorator.
The arguments are one or more interfaces or interface
specifications (`~zope.interface.interfaces.IDeclaration`
objects).
The interfaces given (including the interfaces in the
specifications) are added to any interfaces previously declared,
unless the interface is already implemented.
Previous declarations include declarations for base classes unless
implementsOnly was used.
This function is provided for convenience. It provides a more
convenient way to call `classImplements`. For example::
@implementer(I1)
class C(object):
pass
is equivalent to calling::
classImplements(C, I1)
after the class has been created.
.. seealso:: `classImplements`
The change history provided there applies to this function too.
"""
__slots__ = ('interfaces',)
def __init__(self, *interfaces):
self.interfaces = interfaces
def __call__(self, ob):
if isinstance(ob, type):
# This is the common branch for classes.
classImplements(ob, *self.interfaces)
return ob
spec_name = _implements_name(ob)
spec = Implements.named(spec_name, *self.interfaces)
try:
ob.__implemented__ = spec
except AttributeError:
raise TypeError("Can't declare implements", ob)
return ob
class implementer_only:
"""Declare the only interfaces implemented by instances of a class
This function is called as a class decorator.
The arguments are one or more interfaces or interface
specifications (`~zope.interface.interfaces.IDeclaration` objects).
Previous declarations including declarations for base classes
are overridden.
This function is provided for convenience. It provides a more
convenient way to call `classImplementsOnly`. For example::
@implementer_only(I1)
class C(object): pass
is equivalent to calling::
classImplementsOnly(I1)
after the class has been created.
"""
def __init__(self, *interfaces):
self.interfaces = interfaces
def __call__(self, ob):
if isinstance(ob, (FunctionType, MethodType)):
# XXX Does this decorator make sense for anything but classes?
# I don't think so. There can be no inheritance of interfaces
# on a method or function....
raise ValueError('The implementer_only decorator is not '
'supported for methods or functions.')
# Assume it's a class:
classImplementsOnly(ob, *self.interfaces)
return ob
##############################################################################
#
# Instance declarations
class Provides(Declaration): # Really named ProvidesClass
"""Implement ``__provides__``, the instance-specific specification
When an object is pickled, we pickle the interfaces that it implements.
"""
def __init__(self, cls, *interfaces):
self.__args = (cls, ) + interfaces
self._cls = cls
Declaration.__init__(self, *self._add_interfaces_to_cls(interfaces, cls))
# Added to by ``moduleProvides``, et al
_v_module_names = ()
def __repr__(self):
# The typical way to create instances of this
# object is via calling ``directlyProvides(...)`` or ``alsoProvides()``,
# but that's not the only way. Proxies, for example,
# directly use the ``Provides(...)`` function (which is the
# more generic method, and what we pickle as). We're after the most
# readable, useful repr in the common case, so we use the most
# common name.
#
# We also cooperate with ``moduleProvides`` to attempt to do the
# right thing for that API. See it for details.
function_name = 'directlyProvides'
if self._cls is ModuleType and self._v_module_names:
# See notes in ``moduleProvides``/``directlyProvides``
providing_on_module = True
interfaces = self.__args[1:]
else:
providing_on_module = False
interfaces = (self._cls,) + self.__bases__
ordered_names = self._argument_names_for_repr(interfaces)
if providing_on_module:
mod_names = self._v_module_names
if len(mod_names) == 1:
mod_names = "sys.modules[%r]" % mod_names[0]
ordered_names = (
'{}, '.format(mod_names)
) + ordered_names
return "{}({})".format(
function_name,
ordered_names,
)
def __reduce__(self):
# This reduces to the Provides *function*, not
# this class.
return Provides, self.__args
__module__ = 'zope.interface'
def __get__(self, inst, cls):
"""Make sure that a class __provides__ doesn't leak to an instance
"""
if inst is None and cls is self._cls:
# We were accessed through a class, so we are the class'
# provides spec. Just return this object, but only if we are
# being called on the same class that we were defined for:
return self
raise AttributeError('__provides__')
ProvidesClass = Provides
# Registry of instance declarations
# This is a memory optimization to allow objects to share specifications.
InstanceDeclarations = weakref.WeakValueDictionary()
def Provides(*interfaces): # pylint:disable=function-redefined
"""Cache instance declarations
Instance declarations are shared among instances that have the same
declaration. The declarations are cached in a weak value dictionary.
"""
spec = InstanceDeclarations.get(interfaces)
if spec is None:
spec = ProvidesClass(*interfaces)
InstanceDeclarations[interfaces] = spec
return spec
Provides.__safe_for_unpickling__ = True
def directlyProvides(object, *interfaces): # pylint:disable=redefined-builtin
"""Declare interfaces declared directly for an object
The arguments after the object are one or more interfaces or interface
specifications (`~zope.interface.interfaces.IDeclaration` objects).
The interfaces given (including the interfaces in the specifications)
replace interfaces previously declared for the object.
"""
cls = getattr(object, '__class__', None)
if cls is not None and getattr(cls, '__class__', None) is cls:
# It's a meta class (well, at least it it could be an extension class)
# Note that we can't get here from the tests: there is no normal
# class which isn't descriptor aware.
if not isinstance(object, type):
raise TypeError("Attempt to make an interface declaration on a "
"non-descriptor-aware class")
interfaces = _normalizeargs(interfaces)
if cls is None:
cls = type(object)
if issubclass(cls, type):
# we have a class or type. We'll use a special descriptor
# that provides some extra caching
object.__provides__ = ClassProvides(object, cls, *interfaces)
else:
provides = object.__provides__ = Provides(cls, *interfaces)
# See notes in ``moduleProvides``.
if issubclass(cls, ModuleType) and hasattr(object, '__name__'):
provides._v_module_names += (object.__name__,)
def alsoProvides(object, *interfaces): # pylint:disable=redefined-builtin
"""Declare interfaces declared directly for an object
The arguments after the object are one or more interfaces or interface
specifications (`~zope.interface.interfaces.IDeclaration` objects).
The interfaces given (including the interfaces in the specifications) are
added to the interfaces previously declared for the object.
"""
directlyProvides(object, directlyProvidedBy(object), *interfaces)
def noLongerProvides(object, interface): # pylint:disable=redefined-builtin
""" Removes a directly provided interface from an object.
"""
directlyProvides(object, directlyProvidedBy(object) - interface)
if interface.providedBy(object):
raise ValueError("Can only remove directly provided interfaces.")
@_use_c_impl
class ClassProvidesBase(SpecificationBase):
__slots__ = (
'_cls',
'_implements',
)
def __get__(self, inst, cls):
# member slots are set by subclass
# pylint:disable=no-member
if cls is self._cls:
# We only work if called on the class we were defined for
if inst is None:
# We were accessed through a class, so we are the class'
# provides spec. Just return this object as is:
return self
return self._implements
raise AttributeError('__provides__')
class ClassProvides(Declaration, ClassProvidesBase):
"""Special descriptor for class ``__provides__``
The descriptor caches the implementedBy info, so that
we can get declarations for objects without instance-specific
interfaces a bit quicker.
"""
__slots__ = (
'__args',
)
def __init__(self, cls, metacls, *interfaces):
self._cls = cls
self._implements = implementedBy(cls)
self.__args = (cls, metacls, ) + interfaces
Declaration.__init__(self, *self._add_interfaces_to_cls(interfaces, metacls))
def __repr__(self):
# There are two common ways to get instances of this object:
# The most interesting way is calling ``@provider(..)`` as a decorator
# of a class; this is the same as calling ``directlyProvides(cls, ...)``.
#
# The other way is by default: anything that invokes ``implementedBy(x)``
# will wind up putting an instance in ``type(x).__provides__``; this includes
# the ``@implementer(...)`` decorator. Those instances won't have any
# interfaces.
#
# Thus, as our repr, we go with the ``directlyProvides()`` syntax.
interfaces = (self._cls, ) + self.__args[2:]
ordered_names = self._argument_names_for_repr(interfaces)
return "directlyProvides({})".format(ordered_names)
def __reduce__(self):
return self.__class__, self.__args
# Copy base-class method for speed
__get__ = ClassProvidesBase.__get__
def directlyProvidedBy(object): # pylint:disable=redefined-builtin
"""Return the interfaces directly provided by the given object
The value returned is an `~zope.interface.interfaces.IDeclaration`.
"""
provides = getattr(object, "__provides__", None)
if (
provides is None # no spec
# We might have gotten the implements spec, as an
# optimization. If so, it's like having only one base, that we
# lop off to exclude class-supplied declarations:
or isinstance(provides, Implements)
):
return _empty
# Strip off the class part of the spec:
return Declaration(provides.__bases__[:-1])
class provider:
"""Declare interfaces provided directly by a class
This function is called in a class definition.
The arguments are one or more interfaces or interface specifications
(`~zope.interface.interfaces.IDeclaration` objects).
The given interfaces (including the interfaces in the specifications)
are used to create the class's direct-object interface specification.
An error will be raised if the module class has an direct interface
specification. In other words, it is an error to call this function more
than once in a class definition.
Note that the given interfaces have nothing to do with the interfaces
implemented by instances of the class.
This function is provided for convenience. It provides a more convenient
way to call `directlyProvides` for a class. For example::
@provider(I1)
class C:
pass
is equivalent to calling::
directlyProvides(C, I1)
after the class has been created.
"""
def __init__(self, *interfaces):
self.interfaces = interfaces
def __call__(self, ob):
directlyProvides(ob, *self.interfaces)
return ob
def moduleProvides(*interfaces):
"""Declare interfaces provided by a module
This function is used in a module definition.
The arguments are one or more interfaces or interface specifications
(`~zope.interface.interfaces.IDeclaration` objects).
The given interfaces (including the interfaces in the specifications) are
used to create the module's direct-object interface specification. An
error will be raised if the module already has an interface specification.
In other words, it is an error to call this function more than once in a
module definition.
This function is provided for convenience. It provides a more convenient
way to call directlyProvides. For example::
moduleProvides(I1)
is equivalent to::
directlyProvides(sys.modules[__name__], I1)
"""
frame = sys._getframe(1) # pylint:disable=protected-access
locals = frame.f_locals # pylint:disable=redefined-builtin
# Try to make sure we were called from a module body
if (locals is not frame.f_globals) or ('__name__' not in locals):
raise TypeError(
"moduleProvides can only be used from a module definition.")
if '__provides__' in locals:
raise TypeError(
"moduleProvides can only be used once in a module definition.")
# Note: This is cached based on the key ``(ModuleType, *interfaces)``;
# One consequence is that any module that provides the same interfaces
# gets the same ``__repr__``, meaning that you can't tell what module
# such a declaration came from. Adding the module name to ``_v_module_names``
# attempts to correct for this; it works in some common situations, but fails
# (1) after pickling (the data is lost) and (2) if declarations are
# actually shared and (3) if the alternate spelling of ``directlyProvides()``
# is used. Problem (3) is fixed by cooperating with ``directlyProvides``
# to maintain this information, and problem (2) is worked around by
# printing all the names, but (1) is unsolvable without introducing
# new classes or changing the stored data...but it doesn't actually matter,
# because ``ModuleType`` can't be pickled!
p = locals["__provides__"] = Provides(ModuleType,
*_normalizeargs(interfaces))
p._v_module_names += (locals['__name__'],)
##############################################################################
#
# Declaration querying support
# XXX: is this a fossil? Nobody calls it, no unit tests exercise it, no
# doctests import it, and the package __init__ doesn't import it.
# (Answer: Versions of zope.container prior to 4.4.0 called this,
# and zope.proxy.decorator up through at least 4.3.5 called this.)
def ObjectSpecification(direct, cls):
"""Provide object specifications
These combine information for the object and for it's classes.
"""
return Provides(cls, direct) # pragma: no cover fossil
@_use_c_impl
def getObjectSpecification(ob):
try:
provides = ob.__provides__
except AttributeError:
provides = None
if provides is not None:
if isinstance(provides, SpecificationBase):
return provides
try:
cls = ob.__class__
except AttributeError:
# We can't get the class, so just consider provides
return _empty
return implementedBy(cls)
@_use_c_impl
def providedBy(ob):
"""
Return the interfaces provided by *ob*.
If *ob* is a :class:`super` object, then only interfaces implemented
by the remainder of the classes in the method resolution order are
considered. Interfaces directly provided by the object underlying *ob*
are not.
"""
# Here we have either a special object, an old-style declaration
# or a descriptor
# Try to get __providedBy__
try:
if isinstance(ob, super): # Some objects raise errors on isinstance()
return implementedBy(ob)
r = ob.__providedBy__
except AttributeError:
# Not set yet. Fall back to lower-level thing that computes it
return getObjectSpecification(ob)
try:
# We might have gotten a descriptor from an instance of a
# class (like an ExtensionClass) that doesn't support
# descriptors. We'll make sure we got one by trying to get
# the only attribute, which all specs have.
r.extends
except AttributeError:
# The object's class doesn't understand descriptors.
# Sigh. We need to get an object descriptor, but we have to be
# careful. We want to use the instance's __provides__, if
# there is one, but only if it didn't come from the class.
try:
r = ob.__provides__
except AttributeError:
# No __provides__, so just fall back to implementedBy
return implementedBy(ob.__class__)
# We need to make sure we got the __provides__ from the
# instance. We'll do this by making sure we don't get the same
# thing from the class:
try:
cp = ob.__class__.__provides__
except AttributeError:
# The ob doesn't have a class or the class has no
# provides, assume we're done:
return r
if r is cp:
# Oops, we got the provides from the class. This means
# the object doesn't have it's own. We should use implementedBy
return implementedBy(ob.__class__)
return r
@_use_c_impl
class ObjectSpecificationDescriptor:
"""Implement the ``__providedBy__`` attribute
The ``__providedBy__`` attribute computes the interfaces provided by
an object. If an object has an ``__provides__`` attribute, that is returned.
Otherwise, `implementedBy` the *cls* is returned.
.. versionchanged:: 5.4.0
Both the default (C) implementation and the Python implementation
now let exceptions raised by accessing ``__provides__`` propagate.
Previously, the C version ignored all exceptions.
.. versionchanged:: 5.4.0
The Python implementation now matches the C implementation and lets
a ``__provides__`` of ``None`` override what the class is declared to
implement.
"""
def __get__(self, inst, cls):
"""Get an object specification for an object
"""
if inst is None:
return getObjectSpecification(cls)
try:
return inst.__provides__
except AttributeError:
return implementedBy(cls)
##############################################################################
def _normalizeargs(sequence, output=None):
"""Normalize declaration arguments
Normalization arguments might contain Declarions, tuples, or single
interfaces.
Anything but individual interfaces or implements specs will be expanded.
"""
if output is None:
output = []
cls = sequence.__class__
if InterfaceClass in cls.__mro__ or Implements in cls.__mro__:
output.append(sequence)
else:
for v in sequence:
_normalizeargs(v, output)
return output
_empty = _ImmutableDeclaration()
objectSpecificationDescriptor = ObjectSpecificationDescriptor() | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/declarations.py | declarations.py |
"""Interface object implementation
"""
# pylint:disable=protected-access
import sys
from types import MethodType
from types import FunctionType
import weakref
from zope.interface._compat import _use_c_impl
from zope.interface.exceptions import Invalid
from zope.interface.ro import ro as calculate_ro
from zope.interface import ro
__all__ = [
# Most of the public API from this module is directly exported
# from zope.interface. The only remaining public API intended to
# be imported from here should be those few things documented as
# such.
'InterfaceClass',
'Specification',
'adapter_hooks',
]
CO_VARARGS = 4
CO_VARKEYWORDS = 8
# Put in the attrs dict of an interface by ``taggedValue`` and ``invariants``
TAGGED_DATA = '__interface_tagged_values__'
# Put in the attrs dict of an interface by ``interfacemethod``
INTERFACE_METHODS = '__interface_methods__'
_decorator_non_return = object()
_marker = object()
def invariant(call):
f_locals = sys._getframe(1).f_locals
tags = f_locals.setdefault(TAGGED_DATA, {})
invariants = tags.setdefault('invariants', [])
invariants.append(call)
return _decorator_non_return
def taggedValue(key, value):
"""Attaches a tagged value to an interface at definition time."""
f_locals = sys._getframe(1).f_locals
tagged_values = f_locals.setdefault(TAGGED_DATA, {})
tagged_values[key] = value
return _decorator_non_return
class Element:
"""
Default implementation of `zope.interface.interfaces.IElement`.
"""
# We can't say this yet because we don't have enough
# infrastructure in place.
#
#implements(IElement)
def __init__(self, __name__, __doc__=''): # pylint:disable=redefined-builtin
if not __doc__ and __name__.find(' ') >= 0:
__doc__ = __name__
__name__ = None
self.__name__ = __name__
self.__doc__ = __doc__
# Tagged values are rare, especially on methods or attributes.
# Deferring the allocation can save substantial memory.
self.__tagged_values = None
def getName(self):
""" Returns the name of the object. """
return self.__name__
def getDoc(self):
""" Returns the documentation for the object. """
return self.__doc__
###
# Tagged values.
#
# Direct tagged values are set only in this instance. Others
# may be inherited (for those subclasses that have that concept).
###
def getTaggedValue(self, tag):
""" Returns the value associated with 'tag'. """
if not self.__tagged_values:
raise KeyError(tag)
return self.__tagged_values[tag]
def queryTaggedValue(self, tag, default=None):
""" Returns the value associated with 'tag'. """
return self.__tagged_values.get(tag, default) if self.__tagged_values else default
def getTaggedValueTags(self):
""" Returns a collection of all tags. """
return self.__tagged_values.keys() if self.__tagged_values else ()
def setTaggedValue(self, tag, value):
""" Associates 'value' with 'key'. """
if self.__tagged_values is None:
self.__tagged_values = {}
self.__tagged_values[tag] = value
queryDirectTaggedValue = queryTaggedValue
getDirectTaggedValue = getTaggedValue
getDirectTaggedValueTags = getTaggedValueTags
SpecificationBasePy = object # filled by _use_c_impl.
@_use_c_impl
class SpecificationBase:
# This object is the base of the inheritance hierarchy for ClassProvides:
#
# ClassProvides < ClassProvidesBase, Declaration
# Declaration < Specification < SpecificationBase
# ClassProvidesBase < SpecificationBase
#
# In order to have compatible instance layouts, we need to declare
# the storage used by Specification and Declaration here (and
# those classes must have ``__slots__ = ()``); fortunately this is
# not a waste of space because those are the only two inheritance
# trees. These all translate into tp_members in C.
__slots__ = (
# Things used here.
'_implied',
# Things used in Specification.
'_dependents',
'_bases',
'_v_attrs',
'__iro__',
'__sro__',
'__weakref__',
)
def providedBy(self, ob):
"""Is the interface implemented by an object
"""
spec = providedBy(ob)
return self in spec._implied
def implementedBy(self, cls):
"""Test whether the specification is implemented by a class or factory.
Raise TypeError if argument is neither a class nor a callable.
"""
spec = implementedBy(cls)
return self in spec._implied
def isOrExtends(self, interface):
"""Is the interface the same as or extend the given interface
"""
return interface in self._implied # pylint:disable=no-member
__call__ = isOrExtends
class NameAndModuleComparisonMixin:
# Internal use. Implement the basic sorting operators (but not (in)equality
# or hashing). Subclasses must provide ``__name__`` and ``__module__``
# attributes. Subclasses will be mutually comparable; but because equality
# and hashing semantics are missing from this class, take care in how
# you define those two attributes: If you stick with the default equality
# and hashing (identity based) you should make sure that all possible ``__name__``
# and ``__module__`` pairs are unique ACROSS ALL SUBCLASSES. (Actually, pretty
# much the same thing goes if you define equality and hashing to be based on
# those two attributes: they must still be consistent ACROSS ALL SUBCLASSES.)
# pylint:disable=assigning-non-slot
__slots__ = ()
def _compare(self, other):
"""
Compare *self* to *other* based on ``__name__`` and ``__module__``.
Return 0 if they are equal, return 1 if *self* is
greater than *other*, and return -1 if *self* is less than
*other*.
If *other* does not have ``__name__`` or ``__module__``, then
return ``NotImplemented``.
.. caution::
This allows comparison to things well outside the type hierarchy,
perhaps not symmetrically.
For example, ``class Foo(object)`` and ``class Foo(Interface)``
in the same file would compare equal, depending on the order of
operands. Writing code like this by hand would be unusual, but it could
happen with dynamic creation of types and interfaces.
None is treated as a pseudo interface that implies the loosest
contact possible, no contract. For that reason, all interfaces
sort before None.
"""
if other is self:
return 0
if other is None:
return -1
n1 = (self.__name__, self.__module__)
try:
n2 = (other.__name__, other.__module__)
except AttributeError:
return NotImplemented
# This spelling works under Python3, which doesn't have cmp().
return (n1 > n2) - (n1 < n2)
def __lt__(self, other):
c = self._compare(other)
if c is NotImplemented:
return c
return c < 0
def __le__(self, other):
c = self._compare(other)
if c is NotImplemented:
return c
return c <= 0
def __gt__(self, other):
c = self._compare(other)
if c is NotImplemented:
return c
return c > 0
def __ge__(self, other):
c = self._compare(other)
if c is NotImplemented:
return c
return c >= 0
@_use_c_impl
class InterfaceBase(NameAndModuleComparisonMixin, SpecificationBasePy):
"""Base class that wants to be replaced with a C base :)
"""
__slots__ = (
'__name__',
'__ibmodule__',
'_v_cached_hash',
)
def __init__(self, name=None, module=None):
self.__name__ = name
self.__ibmodule__ = module
def _call_conform(self, conform):
raise NotImplementedError
@property
def __module_property__(self):
# This is for _InterfaceMetaClass
return self.__ibmodule__
def __call__(self, obj, alternate=_marker):
"""Adapt an object to the interface
"""
try:
conform = obj.__conform__
except AttributeError:
conform = None
if conform is not None:
adapter = self._call_conform(conform)
if adapter is not None:
return adapter
adapter = self.__adapt__(obj)
if adapter is not None:
return adapter
if alternate is not _marker:
return alternate
raise TypeError("Could not adapt", obj, self)
def __adapt__(self, obj):
"""Adapt an object to the receiver
"""
if self.providedBy(obj):
return obj
for hook in adapter_hooks:
adapter = hook(self, obj)
if adapter is not None:
return adapter
return None
def __hash__(self):
# pylint:disable=assigning-non-slot,attribute-defined-outside-init
try:
return self._v_cached_hash
except AttributeError:
self._v_cached_hash = hash((self.__name__, self.__module__))
return self._v_cached_hash
def __eq__(self, other):
c = self._compare(other)
if c is NotImplemented:
return c
return c == 0
def __ne__(self, other):
if other is self:
return False
c = self._compare(other)
if c is NotImplemented:
return c
return c != 0
adapter_hooks = _use_c_impl([], 'adapter_hooks')
class Specification(SpecificationBase):
"""Specifications
An interface specification is used to track interface declarations
and component registrations.
This class is a base class for both interfaces themselves and for
interface specifications (declarations).
Specifications are mutable. If you reassign their bases, their
relations with other specifications are adjusted accordingly.
"""
__slots__ = ()
# The root of all Specifications. This will be assigned `Interface`,
# once it is defined.
_ROOT = None
# Copy some base class methods for speed
isOrExtends = SpecificationBase.isOrExtends
providedBy = SpecificationBase.providedBy
def __init__(self, bases=()):
# There are many leaf interfaces with no dependents,
# and a few with very many. It's a heavily left-skewed
# distribution. In a survey of Plone and Zope related packages
# that loaded 2245 InterfaceClass objects and 2235 ClassProvides
# instances, there were a total of 7000 Specification objects created.
# 4700 had 0 dependents, 1400 had 1, 382 had 2 and so on. Only one
# for <type> had 1664. So there's savings to be had deferring
# the creation of dependents.
self._dependents = None # type: weakref.WeakKeyDictionary
self._bases = ()
self._implied = {}
self._v_attrs = None
self.__iro__ = ()
self.__sro__ = ()
self.__bases__ = tuple(bases)
@property
def dependents(self):
if self._dependents is None:
self._dependents = weakref.WeakKeyDictionary()
return self._dependents
def subscribe(self, dependent):
self._dependents[dependent] = self.dependents.get(dependent, 0) + 1
def unsubscribe(self, dependent):
try:
n = self._dependents[dependent]
except TypeError:
raise KeyError(dependent)
n -= 1
if not n:
del self.dependents[dependent]
else:
assert n > 0
self.dependents[dependent] = n
def __setBases(self, bases):
# Remove ourselves as a dependent of our old bases
for b in self.__bases__:
b.unsubscribe(self)
# Register ourselves as a dependent of our new bases
self._bases = bases
for b in bases:
b.subscribe(self)
self.changed(self)
__bases__ = property(
lambda self: self._bases,
__setBases,
)
# This method exists for tests to override the way we call
# ro.calculate_ro(), usually by adding extra kwargs. We don't
# want to have a mutable dictionary as a class member that we pass
# ourself because mutability is bad, and passing **kw is slower than
# calling the bound function.
_do_calculate_ro = calculate_ro
def _calculate_sro(self):
"""
Calculate and return the resolution order for this object, using its ``__bases__``.
Ensures that ``Interface`` is always the last (lowest priority) element.
"""
# We'd like to make Interface the lowest priority as a
# property of the resolution order algorithm. That almost
# works out naturally, but it fails when class inheritance has
# some bases that DO implement an interface, and some that DO
# NOT. In such a mixed scenario, you wind up with a set of
# bases to consider that look like this: [[..., Interface],
# [..., object], ...]. Depending on the order of inheritance,
# Interface can wind up before or after object, and that can
# happen at any point in the tree, meaning Interface can wind
# up somewhere in the middle of the order. Since Interface is
# treated as something that everything winds up implementing
# anyway (a catch-all for things like adapters), having it high up
# the order is bad. It's also bad to have it at the end, just before
# some concrete class: concrete classes should be HIGHER priority than
# interfaces (because there's only one class, but many implementations).
#
# One technically nice way to fix this would be to have
# ``implementedBy(object).__bases__ = (Interface,)``
#
# But: (1) That fails for old-style classes and (2) that causes
# everything to appear to *explicitly* implement Interface, when up
# to this point it's been an implicit virtual sort of relationship.
#
# So we force the issue by mutating the resolution order.
# Note that we let C3 use pre-computed __sro__ for our bases.
# This requires that by the time this method is invoked, our bases
# have settled their SROs. Thus, ``changed()`` must first
# update itself before telling its descendents of changes.
sro = self._do_calculate_ro(base_mros={
b: b.__sro__
for b in self.__bases__
})
root = self._ROOT
if root is not None and sro and sro[-1] is not root:
# In one dataset of 1823 Interface objects, 1117 ClassProvides objects,
# sro[-1] was root 4496 times, and only not root 118 times. So it's
# probably worth checking.
# Once we don't have to deal with old-style classes,
# we can add a check and only do this if base_count > 1,
# if we tweak the bootstrapping for ``<implementedBy object>``
sro = [
x
for x in sro
if x is not root
]
sro.append(root)
return sro
def changed(self, originally_changed):
"""
We, or something we depend on, have changed.
By the time this is called, the things we depend on,
such as our bases, should themselves be stable.
"""
self._v_attrs = None
implied = self._implied
implied.clear()
ancestors = self._calculate_sro()
self.__sro__ = tuple(ancestors)
self.__iro__ = tuple([ancestor for ancestor in ancestors
if isinstance(ancestor, InterfaceClass)
])
for ancestor in ancestors:
# We directly imply our ancestors:
implied[ancestor] = ()
# Now, advise our dependents of change
# (being careful not to create the WeakKeyDictionary if not needed):
for dependent in tuple(self._dependents.keys() if self._dependents else ()):
dependent.changed(originally_changed)
# Just in case something called get() at some point
# during that process and we have a cycle of some sort
# make sure we didn't cache incomplete results.
self._v_attrs = None
def interfaces(self):
"""Return an iterator for the interfaces in the specification.
"""
seen = {}
for base in self.__bases__:
for interface in base.interfaces():
if interface not in seen:
seen[interface] = 1
yield interface
def extends(self, interface, strict=True):
"""Does the specification extend the given interface?
Test whether an interface in the specification extends the
given interface
"""
return ((interface in self._implied)
and
((not strict) or (self != interface))
)
def weakref(self, callback=None):
return weakref.ref(self, callback)
def get(self, name, default=None):
"""Query for an attribute description
"""
attrs = self._v_attrs
if attrs is None:
attrs = self._v_attrs = {}
attr = attrs.get(name)
if attr is None:
for iface in self.__iro__:
attr = iface.direct(name)
if attr is not None:
attrs[name] = attr
break
return default if attr is None else attr
class _InterfaceMetaClass(type):
# Handling ``__module__`` on ``InterfaceClass`` is tricky. We need
# to be able to read it on a type and get the expected string. We
# also need to be able to set it on an instance and get the value
# we set. So far so good. But what gets tricky is that we'd like
# to store the value in the C structure (``InterfaceBase.__ibmodule__``) for
# direct access during equality, sorting, and hashing. "No
# problem, you think, I'll just use a property" (well, the C
# equivalents, ``PyMemberDef`` or ``PyGetSetDef``).
#
# Except there is a problem. When a subclass is created, the
# metaclass (``type``) always automatically puts the expected
# string in the class's dictionary under ``__module__``, thus
# overriding the property inherited from the superclass. Writing
# ``Subclass.__module__`` still works, but
# ``Subclass().__module__`` fails.
#
# There are multiple ways to work around this:
#
# (1) Define ``InterfaceBase.__getattribute__`` to watch for
# ``__module__`` and return the C storage.
#
# This works, but slows down *all* attribute access (except,
# ironically, to ``__module__``) by about 25% (40ns becomes 50ns)
# (when implemented in C). Since that includes methods like
# ``providedBy``, that's probably not acceptable.
#
# All the other methods involve modifying subclasses. This can be
# done either on the fly in some cases, as instances are
# constructed, or by using a metaclass. These next few can be done on the fly.
#
# (2) Make ``__module__`` a descriptor in each subclass dictionary.
# It can't be a straight up ``@property`` descriptor, though, because accessing
# it on the class returns a ``property`` object, not the desired string.
#
# (3) Implement a data descriptor (``__get__`` and ``__set__``)
# that is both a subclass of string, and also does the redirect of
# ``__module__`` to ``__ibmodule__`` and does the correct thing
# with the ``instance`` argument to ``__get__`` is None (returns
# the class's value.) (Why must it be a subclass of string? Because
# when it' s in the class's dict, it's defined on an *instance* of the
# metaclass; descriptors in an instance's dict aren't honored --- their
# ``__get__`` is never invoked --- so it must also *be* the value we want
# returned.)
#
# This works, preserves the ability to read and write
# ``__module__``, and eliminates any penalty accessing other
# attributes. But it slows down accessing ``__module__`` of
# instances by 200% (40ns to 124ns), requires editing class dicts on the fly
# (in InterfaceClass.__init__), thus slightly slowing down all interface creation,
# and is ugly.
#
# (4) As in the last step, but make it a non-data descriptor (no ``__set__``).
#
# If you then *also* store a copy of ``__ibmodule__`` in
# ``__module__`` in the instance's dict, reading works for both
# class and instance and is full speed for instances. But the cost
# is storage space, and you can't write to it anymore, not without
# things getting out of sync.
#
# (Actually, ``__module__`` was never meant to be writable. Doing
# so would break BTrees and normal dictionaries, as well as the
# repr, maybe more.)
#
# That leaves us with a metaclass. (Recall that a class is an
# instance of its metaclass, so properties/descriptors defined in
# the metaclass are used when accessing attributes on the
# instance/class. We'll use that to define ``__module__``.) Here
# we can have our cake and eat it too: no extra storage, and
# C-speed access to the underlying storage. The only substantial
# cost is that metaclasses tend to make people's heads hurt. (But
# still less than the descriptor-is-string, hopefully.)
__slots__ = ()
def __new__(cls, name, bases, attrs):
# Figure out what module defined the interface.
# This is copied from ``InterfaceClass.__init__``;
# reviewers aren't sure how AttributeError or KeyError
# could be raised.
__module__ = sys._getframe(1).f_globals['__name__']
# Get the C optimized __module__ accessor and give it
# to the new class.
moduledescr = InterfaceBase.__dict__['__module__']
if isinstance(moduledescr, str):
# We're working with the Python implementation,
# not the C version
moduledescr = InterfaceBase.__dict__['__module_property__']
attrs['__module__'] = moduledescr
kind = type.__new__(cls, name, bases, attrs)
kind.__module = __module__
return kind
@property
def __module__(cls):
return cls.__module
def __repr__(cls):
return "<class '{}.{}'>".format(
cls.__module,
cls.__name__,
)
_InterfaceClassBase = _InterfaceMetaClass(
'InterfaceClass',
# From least specific to most specific.
(InterfaceBase, Specification, Element),
{'__slots__': ()}
)
def interfacemethod(func):
"""
Convert a method specification to an actual method of the interface.
This is a decorator that functions like `staticmethod` et al.
The primary use of this decorator is to allow interface definitions to
define the ``__adapt__`` method, but other interface methods can be
overridden this way too.
.. seealso:: `zope.interface.interfaces.IInterfaceDeclaration.interfacemethod`
"""
f_locals = sys._getframe(1).f_locals
methods = f_locals.setdefault(INTERFACE_METHODS, {})
methods[func.__name__] = func
return _decorator_non_return
class InterfaceClass(_InterfaceClassBase):
"""
Prototype (scarecrow) Interfaces Implementation.
Note that it is not possible to change the ``__name__`` or ``__module__``
after an instance of this object has been constructed.
"""
# We can't say this yet because we don't have enough
# infrastructure in place.
#
#implements(IInterface)
def __new__(cls, name=None, bases=(), attrs=None, __doc__=None, # pylint:disable=redefined-builtin
__module__=None):
assert isinstance(bases, tuple)
attrs = attrs or {}
needs_custom_class = attrs.pop(INTERFACE_METHODS, None)
if needs_custom_class:
needs_custom_class.update(
{'__classcell__': attrs.pop('__classcell__')}
if '__classcell__' in attrs
else {}
)
if '__adapt__' in needs_custom_class:
# We need to tell the C code to call this.
needs_custom_class['_CALL_CUSTOM_ADAPT'] = 1
if issubclass(cls, _InterfaceClassWithCustomMethods):
cls_bases = (cls,)
elif cls is InterfaceClass:
cls_bases = (_InterfaceClassWithCustomMethods,)
else:
cls_bases = (cls, _InterfaceClassWithCustomMethods)
cls = type(cls)( # pylint:disable=self-cls-assignment
name + "<WithCustomMethods>",
cls_bases,
needs_custom_class
)
return _InterfaceClassBase.__new__(cls)
def __init__(self, name, bases=(), attrs=None, __doc__=None, # pylint:disable=redefined-builtin
__module__=None):
# We don't call our metaclass parent directly
# pylint:disable=non-parent-init-called
# pylint:disable=super-init-not-called
if not all(isinstance(base, InterfaceClass) for base in bases):
raise TypeError('Expected base interfaces')
if attrs is None:
attrs = {}
if __module__ is None:
__module__ = attrs.get('__module__')
if isinstance(__module__, str):
del attrs['__module__']
else:
try:
# Figure out what module defined the interface.
# This is how cPython figures out the module of
# a class, but of course it does it in C. :-/
__module__ = sys._getframe(1).f_globals['__name__']
except (AttributeError, KeyError): # pragma: no cover
pass
InterfaceBase.__init__(self, name, __module__)
# These asserts assisted debugging the metaclass
# assert '__module__' not in self.__dict__
# assert self.__ibmodule__ is self.__module__ is __module__
d = attrs.get('__doc__')
if d is not None:
if not isinstance(d, Attribute):
if __doc__ is None:
__doc__ = d
del attrs['__doc__']
if __doc__ is None:
__doc__ = ''
Element.__init__(self, name, __doc__)
tagged_data = attrs.pop(TAGGED_DATA, None)
if tagged_data is not None:
for key, val in tagged_data.items():
self.setTaggedValue(key, val)
Specification.__init__(self, bases)
self.__attrs = self.__compute_attrs(attrs)
self.__identifier__ = "{}.{}".format(__module__, name)
def __compute_attrs(self, attrs):
# Make sure that all recorded attributes (and methods) are of type
# `Attribute` and `Method`
def update_value(aname, aval):
if isinstance(aval, Attribute):
aval.interface = self
if not aval.__name__:
aval.__name__ = aname
elif isinstance(aval, FunctionType):
aval = fromFunction(aval, self, name=aname)
else:
raise InvalidInterface("Concrete attribute, " + aname)
return aval
return {
aname: update_value(aname, aval)
for aname, aval in attrs.items()
if aname not in (
# __locals__: Python 3 sometimes adds this.
'__locals__',
# __qualname__: PEP 3155 (Python 3.3+)
'__qualname__',
# __annotations__: PEP 3107 (Python 3.0+)
'__annotations__',
)
and aval is not _decorator_non_return
}
def interfaces(self):
"""Return an iterator for the interfaces in the specification.
"""
yield self
def getBases(self):
return self.__bases__
def isEqualOrExtendedBy(self, other):
"""Same interface or extends?"""
return self == other or other.extends(self)
def names(self, all=False): # pylint:disable=redefined-builtin
"""Return the attribute names defined by the interface."""
if not all:
return self.__attrs.keys()
r = self.__attrs.copy()
for base in self.__bases__:
r.update(dict.fromkeys(base.names(all)))
return r.keys()
def __iter__(self):
return iter(self.names(all=True))
def namesAndDescriptions(self, all=False): # pylint:disable=redefined-builtin
"""Return attribute names and descriptions defined by interface."""
if not all:
return self.__attrs.items()
r = {}
for base in self.__bases__[::-1]:
r.update(dict(base.namesAndDescriptions(all)))
r.update(self.__attrs)
return r.items()
def getDescriptionFor(self, name):
"""Return the attribute description for the given name."""
r = self.get(name)
if r is not None:
return r
raise KeyError(name)
__getitem__ = getDescriptionFor
def __contains__(self, name):
return self.get(name) is not None
def direct(self, name):
return self.__attrs.get(name)
def queryDescriptionFor(self, name, default=None):
return self.get(name, default)
def validateInvariants(self, obj, errors=None):
"""validate object to defined invariants."""
for iface in self.__iro__:
for invariant in iface.queryDirectTaggedValue('invariants', ()):
try:
invariant(obj)
except Invalid as error:
if errors is not None:
errors.append(error)
else:
raise
if errors:
raise Invalid(errors)
def queryTaggedValue(self, tag, default=None):
"""
Queries for the value associated with *tag*, returning it from the nearest
interface in the ``__iro__``.
If not found, returns *default*.
"""
for iface in self.__iro__:
value = iface.queryDirectTaggedValue(tag, _marker)
if value is not _marker:
return value
return default
def getTaggedValue(self, tag):
""" Returns the value associated with 'tag'. """
value = self.queryTaggedValue(tag, default=_marker)
if value is _marker:
raise KeyError(tag)
return value
def getTaggedValueTags(self):
""" Returns a list of all tags. """
keys = set()
for base in self.__iro__:
keys.update(base.getDirectTaggedValueTags())
return keys
def __repr__(self):
try:
return self._v_repr
except AttributeError:
name = str(self)
r = "<{} {}>".format(self.__class__.__name__, name)
self._v_repr = r # pylint:disable=attribute-defined-outside-init
return r
def __str__(self):
name = self.__name__
m = self.__ibmodule__
if m:
name = '{}.{}'.format(m, name)
return name
def _call_conform(self, conform):
try:
return conform(self)
except TypeError: # pragma: no cover
# We got a TypeError. It might be an error raised by
# the __conform__ implementation, or *we* may have
# made the TypeError by calling an unbound method
# (object is a class). In the later case, we behave
# as though there is no __conform__ method. We can
# detect this case by checking whether there is more
# than one traceback object in the traceback chain:
if sys.exc_info()[2].tb_next is not None:
# There is more than one entry in the chain, so
# reraise the error:
raise
# This clever trick is from Phillip Eby
return None # pragma: no cover
def __reduce__(self):
return self.__name__
Interface = InterfaceClass("Interface", __module__='zope.interface')
# Interface is the only member of its own SRO.
Interface._calculate_sro = lambda: (Interface,)
Interface.changed(Interface)
assert Interface.__sro__ == (Interface,)
Specification._ROOT = Interface
ro._ROOT = Interface
class _InterfaceClassWithCustomMethods(InterfaceClass):
"""
Marker class for interfaces with custom methods that override InterfaceClass methods.
"""
class Attribute(Element):
"""Attribute descriptions
"""
# We can't say this yet because we don't have enough
# infrastructure in place.
#
# implements(IAttribute)
interface = None
def _get_str_info(self):
"""Return extra data to put at the end of __str__."""
return ""
def __str__(self):
of = ''
if self.interface is not None:
of = self.interface.__module__ + '.' + self.interface.__name__ + '.'
# self.__name__ may be None during construction (e.g., debugging)
return of + (self.__name__ or '<unknown>') + self._get_str_info()
def __repr__(self):
return "<{}.{} object at 0x{:x} {}>".format(
type(self).__module__,
type(self).__name__,
id(self),
self
)
class Method(Attribute):
"""Method interfaces
The idea here is that you have objects that describe methods.
This provides an opportunity for rich meta-data.
"""
# We can't say this yet because we don't have enough
# infrastructure in place.
#
# implements(IMethod)
positional = required = ()
_optional = varargs = kwargs = None
def _get_optional(self):
if self._optional is None:
return {}
return self._optional
def _set_optional(self, opt):
self._optional = opt
def _del_optional(self):
self._optional = None
optional = property(_get_optional, _set_optional, _del_optional)
def __call__(self, *args, **kw):
raise BrokenImplementation(self.interface, self.__name__)
def getSignatureInfo(self):
return {'positional': self.positional,
'required': self.required,
'optional': self.optional,
'varargs': self.varargs,
'kwargs': self.kwargs,
}
def getSignatureString(self):
sig = []
for v in self.positional:
sig.append(v)
if v in self.optional.keys():
sig[-1] += "=" + repr(self.optional[v])
if self.varargs:
sig.append("*" + self.varargs)
if self.kwargs:
sig.append("**" + self.kwargs)
return "(%s)" % ", ".join(sig)
_get_str_info = getSignatureString
def fromFunction(func, interface=None, imlevel=0, name=None):
name = name or func.__name__
method = Method(name, func.__doc__)
defaults = getattr(func, '__defaults__', None) or ()
code = func.__code__
# Number of positional arguments
na = code.co_argcount - imlevel
names = code.co_varnames[imlevel:]
opt = {}
# Number of required arguments
defaults_count = len(defaults)
if not defaults_count:
# PyPy3 uses ``__defaults_count__`` for builtin methods
# like ``dict.pop``. Surprisingly, these don't have recorded
# ``__defaults__``
defaults_count = getattr(func, '__defaults_count__', 0)
nr = na - defaults_count
if nr < 0:
defaults = defaults[-nr:]
nr = 0
# Determine the optional arguments.
opt.update(dict(zip(names[nr:], defaults)))
method.positional = names[:na]
method.required = names[:nr]
method.optional = opt
argno = na
# Determine the function's variable argument's name (i.e. *args)
if code.co_flags & CO_VARARGS:
method.varargs = names[argno]
argno = argno + 1
else:
method.varargs = None
# Determine the function's keyword argument's name (i.e. **kw)
if code.co_flags & CO_VARKEYWORDS:
method.kwargs = names[argno]
else:
method.kwargs = None
method.interface = interface
for key, value in func.__dict__.items():
method.setTaggedValue(key, value)
return method
def fromMethod(meth, interface=None, name=None):
if isinstance(meth, MethodType):
func = meth.__func__
else:
func = meth
return fromFunction(func, interface, imlevel=1, name=name)
# Now we can create the interesting interfaces and wire them up:
def _wire():
from zope.interface.declarations import classImplements
# From lest specific to most specific.
from zope.interface.interfaces import IElement
classImplements(Element, IElement)
from zope.interface.interfaces import IAttribute
classImplements(Attribute, IAttribute)
from zope.interface.interfaces import IMethod
classImplements(Method, IMethod)
from zope.interface.interfaces import ISpecification
classImplements(Specification, ISpecification)
from zope.interface.interfaces import IInterface
classImplements(InterfaceClass, IInterface)
# We import this here to deal with module dependencies.
# pylint:disable=wrong-import-position
from zope.interface.declarations import implementedBy
from zope.interface.declarations import providedBy
from zope.interface.exceptions import InvalidInterface
from zope.interface.exceptions import BrokenImplementation
# This ensures that ``Interface`` winds up in the flattened()
# list of the immutable declaration. It correctly overrides changed()
# as a no-op, so we bypass that.
from zope.interface.declarations import _empty
Specification.changed(_empty, _empty) | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/interface.py | interface.py |
"""Verify interface implementations
"""
import inspect
import sys
from types import FunctionType
from types import MethodType
from zope.interface.exceptions import BrokenImplementation
from zope.interface.exceptions import BrokenMethodImplementation
from zope.interface.exceptions import DoesNotImplement
from zope.interface.exceptions import Invalid
from zope.interface.exceptions import MultipleInvalid
from zope.interface.interface import fromMethod, fromFunction, Method
__all__ = [
'verifyObject',
'verifyClass',
]
# This will be monkey-patched when running under Zope 2, so leave this
# here:
MethodTypes = (MethodType, )
def _verify(iface, candidate, tentative=False, vtype=None):
"""
Verify that *candidate* might correctly provide *iface*.
This involves:
- Making sure the candidate claims that it provides the
interface using ``iface.providedBy`` (unless *tentative* is `True`,
in which case this step is skipped). This means that the candidate's class
declares that it `implements <zope.interface.implementer>` the interface,
or the candidate itself declares that it `provides <zope.interface.provider>`
the interface
- Making sure the candidate defines all the necessary methods
- Making sure the methods have the correct signature (to the
extent possible)
- Making sure the candidate defines all the necessary attributes
:return bool: Returns a true value if everything that could be
checked passed.
:raises zope.interface.Invalid: If any of the previous
conditions does not hold.
.. versionchanged:: 5.0
If multiple methods or attributes are invalid, all such errors
are collected and reported. Previously, only the first error was reported.
As a special case, if only one such error is present, it is raised
alone, like before.
"""
if vtype == 'c':
tester = iface.implementedBy
else:
tester = iface.providedBy
excs = []
if not tentative and not tester(candidate):
excs.append(DoesNotImplement(iface, candidate))
for name, desc in iface.namesAndDescriptions(all=True):
try:
_verify_element(iface, name, desc, candidate, vtype)
except Invalid as e:
excs.append(e)
if excs:
if len(excs) == 1:
raise excs[0]
raise MultipleInvalid(iface, candidate, excs)
return True
def _verify_element(iface, name, desc, candidate, vtype):
# Here the `desc` is either an `Attribute` or `Method` instance
try:
attr = getattr(candidate, name)
except AttributeError:
if (not isinstance(desc, Method)) and vtype == 'c':
# We can't verify non-methods on classes, since the
# class may provide attrs in it's __init__.
return
# TODO: This should use ``raise...from``
raise BrokenImplementation(iface, desc, candidate)
if not isinstance(desc, Method):
# If it's not a method, there's nothing else we can test
return
if inspect.ismethoddescriptor(attr) or inspect.isbuiltin(attr):
# The first case is what you get for things like ``dict.pop``
# on CPython (e.g., ``verifyClass(IFullMapping, dict))``). The
# second case is what you get for things like ``dict().pop`` on
# CPython (e.g., ``verifyObject(IFullMapping, dict()))``.
# In neither case can we get a signature, so there's nothing
# to verify. Even the inspect module gives up and raises
# ValueError: no signature found. The ``__text_signature__`` attribute
# isn't typically populated either.
#
# Note that on PyPy 2 or 3 (up through 7.3 at least), these are
# not true for things like ``dict.pop`` (but might be true for C extensions?)
return
if isinstance(attr, FunctionType):
if isinstance(candidate, type) and vtype == 'c':
# This is an "unbound method".
# Only unwrap this if we're verifying implementedBy;
# otherwise we can unwrap @staticmethod on classes that directly
# provide an interface.
meth = fromFunction(attr, iface, name=name, imlevel=1)
else:
# Nope, just a normal function
meth = fromFunction(attr, iface, name=name)
elif (isinstance(attr, MethodTypes)
and type(attr.__func__) is FunctionType):
meth = fromMethod(attr, iface, name)
elif isinstance(attr, property) and vtype == 'c':
# Without an instance we cannot be sure it's not a
# callable.
# TODO: This should probably check inspect.isdatadescriptor(),
# a more general form than ``property``
return
else:
if not callable(attr):
raise BrokenMethodImplementation(desc, "implementation is not a method",
attr, iface, candidate)
# sigh, it's callable, but we don't know how to introspect it, so
# we have to give it a pass.
return
# Make sure that the required and implemented method signatures are
# the same.
mess = _incompat(desc.getSignatureInfo(), meth.getSignatureInfo())
if mess:
raise BrokenMethodImplementation(desc, mess, attr, iface, candidate)
def verifyClass(iface, candidate, tentative=False):
"""
Verify that the *candidate* might correctly provide *iface*.
"""
return _verify(iface, candidate, tentative, vtype='c')
def verifyObject(iface, candidate, tentative=False):
return _verify(iface, candidate, tentative, vtype='o')
verifyObject.__doc__ = _verify.__doc__
_MSG_TOO_MANY = 'implementation requires too many arguments'
def _incompat(required, implemented):
#if (required['positional'] !=
# implemented['positional'][:len(required['positional'])]
# and implemented['kwargs'] is None):
# return 'imlementation has different argument names'
if len(implemented['required']) > len(required['required']):
return _MSG_TOO_MANY
if ((len(implemented['positional']) < len(required['positional']))
and not implemented['varargs']):
return "implementation doesn't allow enough arguments"
if required['kwargs'] and not implemented['kwargs']:
return "implementation doesn't support keyword arguments"
if required['varargs'] and not implemented['varargs']:
return "implementation doesn't support variable arguments" | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/verify.py | verify.py |
from types import FunctionType
__all__ = [
'determineMetaclass',
'getFrameInfo',
'isClassAdvisor',
'minimalBases',
]
import sys
def getFrameInfo(frame):
"""Return (kind,module,locals,globals) for a frame
'kind' is one of "exec", "module", "class", "function call", or "unknown".
"""
f_locals = frame.f_locals
f_globals = frame.f_globals
sameNamespace = f_locals is f_globals
hasModule = '__module__' in f_locals
hasName = '__name__' in f_globals
sameName = hasModule and hasName
sameName = sameName and f_globals['__name__']==f_locals['__module__']
module = hasName and sys.modules.get(f_globals['__name__']) or None
namespaceIsModule = module and module.__dict__ is f_globals
if not namespaceIsModule:
# some kind of funky exec
kind = "exec"
elif sameNamespace and not hasModule:
kind = "module"
elif sameName and not sameNamespace:
kind = "class"
elif not sameNamespace:
kind = "function call"
else: # pragma: no cover
# How can you have f_locals is f_globals, and have '__module__' set?
# This is probably module-level code, but with a '__module__' variable.
kind = "unknown"
return kind, module, f_locals, f_globals
def isClassAdvisor(ob):
"""True if 'ob' is a class advisor function"""
return isinstance(ob,FunctionType) and hasattr(ob,'previousMetaclass')
def determineMetaclass(bases, explicit_mc=None):
"""Determine metaclass from 1+ bases and optional explicit __metaclass__"""
meta = [getattr(b,'__class__',type(b)) for b in bases]
if explicit_mc is not None:
# The explicit metaclass needs to be verified for compatibility
# as well, and allowed to resolve the incompatible bases, if any
meta.append(explicit_mc)
if len(meta)==1:
# easy case
return meta[0]
candidates = minimalBases(meta) # minimal set of metaclasses
if len(candidates)>1:
# We could auto-combine, but for now we won't...
raise TypeError("Incompatible metatypes", bases)
# Just one, return it
return candidates[0]
def minimalBases(classes):
"""Reduce a list of base classes to its ordered minimum equivalent"""
candidates = []
for m in classes:
for n in classes:
if issubclass(n,m) and m is not n:
break
else:
# m has no subclasses in 'classes'
if m in candidates:
candidates.remove(m) # ensure that we're later in the list
candidates.append(m)
return candidates | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/advice.py | advice.py |
__all__ = [
# Invalid tree
'Invalid',
'DoesNotImplement',
'BrokenImplementation',
'BrokenMethodImplementation',
'MultipleInvalid',
# Other
'BadImplements',
'InvalidInterface',
]
class Invalid(Exception):
"""A specification is violated
"""
class _TargetInvalid(Invalid):
# Internal use. Subclass this when you're describing
# a particular target object that's invalid according
# to a specific interface.
#
# For backwards compatibility, the *target* and *interface* are
# optional, and the signatures are inconsistent in their ordering.
#
# We deal with the inconsistency in ordering by defining the index
# of the two values in ``self.args``. *target* uses a marker object to
# distinguish "not given" from "given, but None", because the latter
# can be a value that gets passed to validation. For this reason, it must
# always be the last argument (we detect absence by the ``IndexError``).
_IX_INTERFACE = 0
_IX_TARGET = 1
# The exception to catch when indexing self.args indicating that
# an argument was not given. If all arguments are expected,
# a subclass should set this to ().
_NOT_GIVEN_CATCH = IndexError
_NOT_GIVEN = '<Not Given>'
def _get_arg_or_default(self, ix, default=None):
try:
return self.args[ix] # pylint:disable=unsubscriptable-object
except self._NOT_GIVEN_CATCH:
return default
@property
def interface(self):
return self._get_arg_or_default(self._IX_INTERFACE)
@property
def target(self):
return self._get_arg_or_default(self._IX_TARGET, self._NOT_GIVEN)
###
# str
#
# The ``__str__`` of self is implemented by concatenating (%s), in order,
# these properties (none of which should have leading or trailing
# whitespace):
#
# - self._str_subject
# Begin the message, including a description of the target.
# - self._str_description
# Provide a general description of the type of error, including
# the interface name if possible and relevant.
# - self._str_conjunction
# Join the description to the details. Defaults to ": ".
# - self._str_details
# Provide details about how this particular instance of the error.
# - self._str_trailer
# End the message. Usually just a period.
###
@property
def _str_subject(self):
target = self.target
if target is self._NOT_GIVEN:
return "An object"
return "The object {!r}".format(target)
@property
def _str_description(self):
return "has failed to implement interface %s" % (
self.interface or '<Unknown>'
)
_str_conjunction = ": "
_str_details = "<unknown>"
_str_trailer = '.'
def __str__(self):
return "{} {}{}{}{}".format(
self._str_subject,
self._str_description,
self._str_conjunction,
self._str_details,
self._str_trailer
)
class DoesNotImplement(_TargetInvalid):
"""
DoesNotImplement(interface[, target])
The *target* (optional) does not implement the *interface*.
.. versionchanged:: 5.0.0
Add the *target* argument and attribute, and change the resulting
string value of this object accordingly.
"""
_str_details = "Does not declaratively implement the interface"
class BrokenImplementation(_TargetInvalid):
"""
BrokenImplementation(interface, name[, target])
The *target* (optional) is missing the attribute *name*.
.. versionchanged:: 5.0.0
Add the *target* argument and attribute, and change the resulting
string value of this object accordingly.
The *name* can either be a simple string or a ``Attribute`` object.
"""
_IX_NAME = _TargetInvalid._IX_INTERFACE + 1
_IX_TARGET = _IX_NAME + 1
@property
def name(self):
return self.args[1] # pylint:disable=unsubscriptable-object
@property
def _str_details(self):
return "The %s attribute was not provided" % (
repr(self.name) if isinstance(self.name, str) else self.name
)
class BrokenMethodImplementation(_TargetInvalid):
"""
BrokenMethodImplementation(method, message[, implementation, interface, target])
The *target* (optional) has a *method* in *implementation* that violates
its contract in a way described by *mess*.
.. versionchanged:: 5.0.0
Add the *interface* and *target* argument and attribute,
and change the resulting string value of this object accordingly.
The *method* can either be a simple string or a ``Method`` object.
.. versionchanged:: 5.0.0
If *implementation* is given, then the *message* will have the
string "implementation" replaced with an short but informative
representation of *implementation*.
"""
_IX_IMPL = 2
_IX_INTERFACE = _IX_IMPL + 1
_IX_TARGET = _IX_INTERFACE + 1
@property
def method(self):
return self.args[0] # pylint:disable=unsubscriptable-object
@property
def mess(self):
return self.args[1] # pylint:disable=unsubscriptable-object
@staticmethod
def __implementation_str(impl):
# It could be a callable or some arbitrary object, we don't
# know yet.
import inspect # Inspect is a heavy-weight dependency, lots of imports
try:
sig = inspect.signature
formatsig = str
except AttributeError:
sig = inspect.getargspec
f = inspect.formatargspec
formatsig = lambda sig: f(*sig) # pylint:disable=deprecated-method
try:
sig = sig(impl)
except (ValueError, TypeError):
# Unable to introspect. Darn.
# This could be a non-callable, or a particular builtin,
# or a bound method that doesn't even accept 'self', e.g.,
# ``Class.method = lambda: None; Class().method``
return repr(impl)
try:
name = impl.__qualname__
except AttributeError:
name = impl.__name__
return name + formatsig(sig)
@property
def _str_details(self):
impl = self._get_arg_or_default(self._IX_IMPL, self._NOT_GIVEN)
message = self.mess
if impl is not self._NOT_GIVEN and 'implementation' in message:
message = message.replace("implementation", '%r')
message = message % (self.__implementation_str(impl),)
return 'The contract of {} is violated because {}'.format(
repr(self.method) if isinstance(self.method, str) else self.method,
message,
)
class MultipleInvalid(_TargetInvalid):
"""
The *target* has failed to implement the *interface* in
multiple ways.
The failures are described by *exceptions*, a collection of
other `Invalid` instances.
.. versionadded:: 5.0
"""
_NOT_GIVEN_CATCH = ()
def __init__(self, interface, target, exceptions):
super().__init__(interface, target, tuple(exceptions))
@property
def exceptions(self):
return self.args[2] # pylint:disable=unsubscriptable-object
@property
def _str_details(self):
# It would be nice to use tabs here, but that
# is hard to represent in doctests.
return '\n ' + '\n '.join(
x._str_details.strip() if isinstance(x, _TargetInvalid) else str(x)
for x in self.exceptions
)
_str_conjunction = ':' # We don't want a trailing space, messes up doctests
_str_trailer = ''
class InvalidInterface(Exception):
"""The interface has invalid contents
"""
class BadImplements(TypeError):
"""An implementation assertion is invalid
because it doesn't contain an interface or a sequence of valid
implementation assertions.
""" | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/exceptions.py | exceptions.py |
import os
import sys
def _normalize_name(name):
if isinstance(name, bytes):
name = str(name, 'ascii')
if isinstance(name, str):
return name
raise TypeError("name must be a string or ASCII-only bytes")
PYPY = hasattr(sys, 'pypy_version_info')
def _c_optimizations_required():
"""
Return a true value if the C optimizations are required.
This uses the ``PURE_PYTHON`` variable as documented in `_use_c_impl`.
"""
pure_env = os.environ.get('PURE_PYTHON')
require_c = pure_env == "0"
return require_c
def _c_optimizations_available():
"""
Return the C optimization module, if available, otherwise
a false value.
If the optimizations are required but not available, this
raises the ImportError.
This does not say whether they should be used or not.
"""
catch = () if _c_optimizations_required() else (ImportError,)
try:
from zope.interface import _zope_interface_coptimizations as c_opt
return c_opt
except catch: # pragma: no cover (only Jython doesn't build extensions)
return False
def _c_optimizations_ignored():
"""
The opposite of `_c_optimizations_required`.
"""
pure_env = os.environ.get('PURE_PYTHON')
return pure_env is not None and pure_env != "0"
def _should_attempt_c_optimizations():
"""
Return a true value if we should attempt to use the C optimizations.
This takes into account whether we're on PyPy and the value of the
``PURE_PYTHON`` environment variable, as defined in `_use_c_impl`.
"""
is_pypy = hasattr(sys, 'pypy_version_info')
if _c_optimizations_required():
return True
if is_pypy:
return False
return not _c_optimizations_ignored()
def _use_c_impl(py_impl, name=None, globs=None):
"""
Decorator. Given an object implemented in Python, with a name like
``Foo``, import the corresponding C implementation from
``zope.interface._zope_interface_coptimizations`` with the name
``Foo`` and use it instead.
If the ``PURE_PYTHON`` environment variable is set to any value
other than ``"0"``, or we're on PyPy, ignore the C implementation
and return the Python version. If the C implementation cannot be
imported, return the Python version. If ``PURE_PYTHON`` is set to
0, *require* the C implementation (let the ImportError propagate);
note that PyPy can import the C implementation in this case (and all
tests pass).
In all cases, the Python version is kept available. in the module
globals with the name ``FooPy`` and the name ``FooFallback`` (both
conventions have been used; the C implementation of some functions
looks for the ``Fallback`` version, as do some of the Sphinx
documents).
Example::
@_use_c_impl
class Foo(object):
...
"""
name = name or py_impl.__name__
globs = globs or sys._getframe(1).f_globals
def find_impl():
if not _should_attempt_c_optimizations():
return py_impl
c_opt = _c_optimizations_available()
if not c_opt: # pragma: no cover (only Jython doesn't build extensions)
return py_impl
__traceback_info__ = c_opt
return getattr(c_opt, name)
c_impl = find_impl()
# Always make available by the FooPy name and FooFallback
# name (for testing and documentation)
globs[name + 'Py'] = py_impl
globs[name + 'Fallback'] = py_impl
return c_impl | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/_compat.py | _compat.py |
from datetime import timedelta, date, datetime, time, tzinfo
from zope.interface import Interface, Attribute
from zope.interface import classImplements
class ITimeDeltaClass(Interface):
"""This is the timedelta class interface.
This is symbolic; this module does **not** make
`datetime.timedelta` provide this interface.
"""
min = Attribute("The most negative timedelta object")
max = Attribute("The most positive timedelta object")
resolution = Attribute(
"The smallest difference between non-equal timedelta objects")
class ITimeDelta(ITimeDeltaClass):
"""Represent the difference between two datetime objects.
Implemented by `datetime.timedelta`.
Supported operators:
- add, subtract timedelta
- unary plus, minus, abs
- compare to timedelta
- multiply, divide by int/long
In addition, `.datetime` supports subtraction of two `.datetime` objects
returning a `.timedelta`, and addition or subtraction of a `.datetime`
and a `.timedelta` giving a `.datetime`.
Representation: (days, seconds, microseconds).
"""
days = Attribute("Days between -999999999 and 999999999 inclusive")
seconds = Attribute("Seconds between 0 and 86399 inclusive")
microseconds = Attribute("Microseconds between 0 and 999999 inclusive")
class IDateClass(Interface):
"""This is the date class interface.
This is symbolic; this module does **not** make
`datetime.date` provide this interface.
"""
min = Attribute("The earliest representable date")
max = Attribute("The latest representable date")
resolution = Attribute(
"The smallest difference between non-equal date objects")
def today():
"""Return the current local time.
This is equivalent to ``date.fromtimestamp(time.time())``"""
def fromtimestamp(timestamp):
"""Return the local date from a POSIX timestamp (like time.time())
This may raise `ValueError`, if the timestamp is out of the range of
values supported by the platform C ``localtime()`` function. It's common
for this to be restricted to years from 1970 through 2038. Note that
on non-POSIX systems that include leap seconds in their notion of a
timestamp, leap seconds are ignored by `fromtimestamp`.
"""
def fromordinal(ordinal):
"""Return the date corresponding to the proleptic Gregorian ordinal.
January 1 of year 1 has ordinal 1. `ValueError` is raised unless
1 <= ordinal <= date.max.toordinal().
For any date *d*, ``date.fromordinal(d.toordinal()) == d``.
"""
class IDate(IDateClass):
"""Represents a date (year, month and day) in an idealized calendar.
Implemented by `datetime.date`.
Operators:
__repr__, __str__
__cmp__, __hash__
__add__, __radd__, __sub__ (add/radd only with timedelta arg)
"""
year = Attribute("Between MINYEAR and MAXYEAR inclusive.")
month = Attribute("Between 1 and 12 inclusive")
day = Attribute(
"Between 1 and the number of days in the given month of the given year.")
def replace(year, month, day):
"""Return a date with the same value.
Except for those members given new values by whichever keyword
arguments are specified. For example, if ``d == date(2002, 12, 31)``, then
``d.replace(day=26) == date(2000, 12, 26)``.
"""
def timetuple():
"""Return a 9-element tuple of the form returned by `time.localtime`.
The hours, minutes and seconds are 0, and the DST flag is -1.
``d.timetuple()`` is equivalent to
``(d.year, d.month, d.day, 0, 0, 0, d.weekday(), d.toordinal() -
date(d.year, 1, 1).toordinal() + 1, -1)``
"""
def toordinal():
"""Return the proleptic Gregorian ordinal of the date
January 1 of year 1 has ordinal 1. For any date object *d*,
``date.fromordinal(d.toordinal()) == d``.
"""
def weekday():
"""Return the day of the week as an integer.
Monday is 0 and Sunday is 6. For example,
``date(2002, 12, 4).weekday() == 2``, a Wednesday.
.. seealso:: `isoweekday`.
"""
def isoweekday():
"""Return the day of the week as an integer.
Monday is 1 and Sunday is 7. For example,
date(2002, 12, 4).isoweekday() == 3, a Wednesday.
.. seealso:: `weekday`, `isocalendar`.
"""
def isocalendar():
"""Return a 3-tuple, (ISO year, ISO week number, ISO weekday).
The ISO calendar is a widely used variant of the Gregorian calendar.
See http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm for a good
explanation.
The ISO year consists of 52 or 53 full weeks, and where a week starts
on a Monday and ends on a Sunday. The first week of an ISO year is the
first (Gregorian) calendar week of a year containing a Thursday. This
is called week number 1, and the ISO year of that Thursday is the same
as its Gregorian year.
For example, 2004 begins on a Thursday, so the first week of ISO year
2004 begins on Monday, 29 Dec 2003 and ends on Sunday, 4 Jan 2004, so
that ``date(2003, 12, 29).isocalendar() == (2004, 1, 1)`` and
``date(2004, 1, 4).isocalendar() == (2004, 1, 7)``.
"""
def isoformat():
"""Return a string representing the date in ISO 8601 format.
This is 'YYYY-MM-DD'.
For example, ``date(2002, 12, 4).isoformat() == '2002-12-04'``.
"""
def __str__():
"""For a date *d*, ``str(d)`` is equivalent to ``d.isoformat()``."""
def ctime():
"""Return a string representing the date.
For example date(2002, 12, 4).ctime() == 'Wed Dec 4 00:00:00 2002'.
d.ctime() is equivalent to time.ctime(time.mktime(d.timetuple()))
on platforms where the native C ctime() function
(which `time.ctime` invokes, but which date.ctime() does not invoke)
conforms to the C standard.
"""
def strftime(format):
"""Return a string representing the date.
Controlled by an explicit format string. Format codes referring to
hours, minutes or seconds will see 0 values.
"""
class IDateTimeClass(Interface):
"""This is the datetime class interface.
This is symbolic; this module does **not** make
`datetime.datetime` provide this interface.
"""
min = Attribute("The earliest representable datetime")
max = Attribute("The latest representable datetime")
resolution = Attribute(
"The smallest possible difference between non-equal datetime objects")
def today():
"""Return the current local datetime, with tzinfo None.
This is equivalent to ``datetime.fromtimestamp(time.time())``.
.. seealso:: `now`, `fromtimestamp`.
"""
def now(tz=None):
"""Return the current local date and time.
If optional argument *tz* is None or not specified, this is like `today`,
but, if possible, supplies more precision than can be gotten from going
through a `time.time` timestamp (for example, this may be possible on
platforms supplying the C ``gettimeofday()`` function).
Else tz must be an instance of a class tzinfo subclass, and the current
date and time are converted to tz's time zone. In this case the result
is equivalent to tz.fromutc(datetime.utcnow().replace(tzinfo=tz)).
.. seealso:: `today`, `utcnow`.
"""
def utcnow():
"""Return the current UTC date and time, with tzinfo None.
This is like `now`, but returns the current UTC date and time, as a
naive datetime object.
.. seealso:: `now`.
"""
def fromtimestamp(timestamp, tz=None):
"""Return the local date and time corresponding to the POSIX timestamp.
Same as is returned by time.time(). If optional argument tz is None or
not specified, the timestamp is converted to the platform's local date
and time, and the returned datetime object is naive.
Else tz must be an instance of a class tzinfo subclass, and the
timestamp is converted to tz's time zone. In this case the result is
equivalent to
``tz.fromutc(datetime.utcfromtimestamp(timestamp).replace(tzinfo=tz))``.
fromtimestamp() may raise `ValueError`, if the timestamp is out of the
range of values supported by the platform C localtime() or gmtime()
functions. It's common for this to be restricted to years in 1970
through 2038. Note that on non-POSIX systems that include leap seconds
in their notion of a timestamp, leap seconds are ignored by
fromtimestamp(), and then it's possible to have two timestamps
differing by a second that yield identical datetime objects.
.. seealso:: `utcfromtimestamp`.
"""
def utcfromtimestamp(timestamp):
"""Return the UTC datetime from the POSIX timestamp with tzinfo None.
This may raise `ValueError`, if the timestamp is out of the range of
values supported by the platform C ``gmtime()`` function. It's common for
this to be restricted to years in 1970 through 2038.
.. seealso:: `fromtimestamp`.
"""
def fromordinal(ordinal):
"""Return the datetime from the proleptic Gregorian ordinal.
January 1 of year 1 has ordinal 1. `ValueError` is raised unless
1 <= ordinal <= datetime.max.toordinal().
The hour, minute, second and microsecond of the result are all 0, and
tzinfo is None.
"""
def combine(date, time):
"""Return a new datetime object.
Its date members are equal to the given date object's, and whose time
and tzinfo members are equal to the given time object's. For any
datetime object *d*, ``d == datetime.combine(d.date(), d.timetz())``.
If date is a datetime object, its time and tzinfo members are ignored.
"""
class IDateTime(IDate, IDateTimeClass):
"""Object contains all the information from a date object and a time object.
Implemented by `datetime.datetime`.
"""
year = Attribute("Year between MINYEAR and MAXYEAR inclusive")
month = Attribute("Month between 1 and 12 inclusive")
day = Attribute(
"Day between 1 and the number of days in the given month of the year")
hour = Attribute("Hour in range(24)")
minute = Attribute("Minute in range(60)")
second = Attribute("Second in range(60)")
microsecond = Attribute("Microsecond in range(1000000)")
tzinfo = Attribute(
"""The object passed as the tzinfo argument to the datetime constructor
or None if none was passed""")
def date():
"""Return date object with same year, month and day."""
def time():
"""Return time object with same hour, minute, second, microsecond.
tzinfo is None.
.. seealso:: Method :meth:`timetz`.
"""
def timetz():
"""Return time object with same hour, minute, second, microsecond,
and tzinfo.
.. seealso:: Method :meth:`time`.
"""
def replace(year, month, day, hour, minute, second, microsecond, tzinfo):
"""Return a datetime with the same members, except for those members
given new values by whichever keyword arguments are specified.
Note that ``tzinfo=None`` can be specified to create a naive datetime from
an aware datetime with no conversion of date and time members.
"""
def astimezone(tz):
"""Return a datetime object with new tzinfo member tz, adjusting the
date and time members so the result is the same UTC time as self, but
in tz's local time.
tz must be an instance of a tzinfo subclass, and its utcoffset() and
dst() methods must not return None. self must be aware (self.tzinfo
must not be None, and self.utcoffset() must not return None).
If self.tzinfo is tz, self.astimezone(tz) is equal to self: no
adjustment of date or time members is performed. Else the result is
local time in time zone tz, representing the same UTC time as self:
after astz = dt.astimezone(tz), astz - astz.utcoffset()
will usually have the same date and time members as dt - dt.utcoffset().
The discussion of class `datetime.tzinfo` explains the cases at Daylight Saving
Time transition boundaries where this cannot be achieved (an issue only
if tz models both standard and daylight time).
If you merely want to attach a time zone object *tz* to a datetime *dt*
without adjustment of date and time members, use ``dt.replace(tzinfo=tz)``.
If you merely want to remove the time zone object from an aware
datetime dt without conversion of date and time members, use
``dt.replace(tzinfo=None)``.
Note that the default `tzinfo.fromutc` method can be overridden in a
tzinfo subclass to effect the result returned by `astimezone`.
"""
def utcoffset():
"""Return the timezone offset in minutes east of UTC (negative west of
UTC)."""
def dst():
"""Return 0 if DST is not in effect, or the DST offset (in minutes
eastward) if DST is in effect.
"""
def tzname():
"""Return the timezone name."""
def timetuple():
"""Return a 9-element tuple of the form returned by `time.localtime`."""
def utctimetuple():
"""Return UTC time tuple compatilble with `time.gmtime`."""
def toordinal():
"""Return the proleptic Gregorian ordinal of the date.
The same as self.date().toordinal().
"""
def weekday():
"""Return the day of the week as an integer.
Monday is 0 and Sunday is 6. The same as self.date().weekday().
See also isoweekday().
"""
def isoweekday():
"""Return the day of the week as an integer.
Monday is 1 and Sunday is 7. The same as self.date().isoweekday.
.. seealso:: `weekday`, `isocalendar`.
"""
def isocalendar():
"""Return a 3-tuple, (ISO year, ISO week number, ISO weekday).
The same as self.date().isocalendar().
"""
def isoformat(sep='T'):
"""Return a string representing the date and time in ISO 8601 format.
YYYY-MM-DDTHH:MM:SS.mmmmmm or YYYY-MM-DDTHH:MM:SS if microsecond is 0
If `utcoffset` does not return None, a 6-character string is appended,
giving the UTC offset in (signed) hours and minutes:
YYYY-MM-DDTHH:MM:SS.mmmmmm+HH:MM or YYYY-MM-DDTHH:MM:SS+HH:MM
if microsecond is 0.
The optional argument sep (default 'T') is a one-character separator,
placed between the date and time portions of the result.
"""
def __str__():
"""For a datetime instance *d*, ``str(d)`` is equivalent to ``d.isoformat(' ')``.
"""
def ctime():
"""Return a string representing the date and time.
``datetime(2002, 12, 4, 20, 30, 40).ctime() == 'Wed Dec 4 20:30:40 2002'``.
``d.ctime()`` is equivalent to ``time.ctime(time.mktime(d.timetuple()))`` on
platforms where the native C ``ctime()`` function (which `time.ctime`
invokes, but which `datetime.ctime` does not invoke) conforms to the
C standard.
"""
def strftime(format):
"""Return a string representing the date and time.
This is controlled by an explicit format string.
"""
class ITimeClass(Interface):
"""This is the time class interface.
This is symbolic; this module does **not** make
`datetime.time` provide this interface.
"""
min = Attribute("The earliest representable time")
max = Attribute("The latest representable time")
resolution = Attribute(
"The smallest possible difference between non-equal time objects")
class ITime(ITimeClass):
"""Represent time with time zone.
Implemented by `datetime.time`.
Operators:
__repr__, __str__
__cmp__, __hash__
"""
hour = Attribute("Hour in range(24)")
minute = Attribute("Minute in range(60)")
second = Attribute("Second in range(60)")
microsecond = Attribute("Microsecond in range(1000000)")
tzinfo = Attribute(
"""The object passed as the tzinfo argument to the time constructor
or None if none was passed.""")
def replace(hour, minute, second, microsecond, tzinfo):
"""Return a time with the same value.
Except for those members given new values by whichever keyword
arguments are specified. Note that tzinfo=None can be specified
to create a naive time from an aware time, without conversion of the
time members.
"""
def isoformat():
"""Return a string representing the time in ISO 8601 format.
That is HH:MM:SS.mmmmmm or, if self.microsecond is 0, HH:MM:SS
If utcoffset() does not return None, a 6-character string is appended,
giving the UTC offset in (signed) hours and minutes:
HH:MM:SS.mmmmmm+HH:MM or, if self.microsecond is 0, HH:MM:SS+HH:MM
"""
def __str__():
"""For a time t, str(t) is equivalent to t.isoformat()."""
def strftime(format):
"""Return a string representing the time.
This is controlled by an explicit format string.
"""
def utcoffset():
"""Return the timezone offset in minutes east of UTC (negative west of
UTC).
If tzinfo is None, returns None, else returns
self.tzinfo.utcoffset(None), and raises an exception if the latter
doesn't return None or a timedelta object representing a whole number
of minutes with magnitude less than one day.
"""
def dst():
"""Return 0 if DST is not in effect, or the DST offset (in minutes
eastward) if DST is in effect.
If tzinfo is None, returns None, else returns self.tzinfo.dst(None),
and raises an exception if the latter doesn't return None, or a
timedelta object representing a whole number of minutes with
magnitude less than one day.
"""
def tzname():
"""Return the timezone name.
If tzinfo is None, returns None, else returns self.tzinfo.tzname(None),
or raises an exception if the latter doesn't return None or a string
object.
"""
class ITZInfo(Interface):
"""Time zone info class.
"""
def utcoffset(dt):
"""Return offset of local time from UTC, in minutes east of UTC.
If local time is west of UTC, this should be negative.
Note that this is intended to be the total offset from UTC;
for example, if a tzinfo object represents both time zone and DST
adjustments, utcoffset() should return their sum. If the UTC offset
isn't known, return None. Else the value returned must be a timedelta
object specifying a whole number of minutes in the range -1439 to 1439
inclusive (1440 = 24*60; the magnitude of the offset must be less
than one day).
"""
def dst(dt):
"""Return the daylight saving time (DST) adjustment, in minutes east
of UTC, or None if DST information isn't known.
"""
def tzname(dt):
"""Return the time zone name corresponding to the datetime object as
a string.
"""
def fromutc(dt):
"""Return an equivalent datetime in self's local time."""
classImplements(timedelta, ITimeDelta)
classImplements(date, IDate)
classImplements(datetime, IDateTime)
classImplements(time, ITime)
classImplements(tzinfo, ITZInfo)
## directlyProvides(timedelta, ITimeDeltaClass)
## directlyProvides(date, IDateClass)
## directlyProvides(datetime, IDateTimeClass)
## directlyProvides(time, ITimeClass) | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/common/idatetime.py | idatetime.py |
"""Interfaces for standard python exceptions
"""
from zope.interface import Interface
from zope.interface import classImplements
class IException(Interface):
"Interface for `Exception`"
classImplements(Exception, IException)
class IStandardError(IException):
"Interface for `StandardError` (no longer existing.)"
class IWarning(IException):
"Interface for `Warning`"
classImplements(Warning, IWarning)
class ISyntaxError(IStandardError):
"Interface for `SyntaxError`"
classImplements(SyntaxError, ISyntaxError)
class ILookupError(IStandardError):
"Interface for `LookupError`"
classImplements(LookupError, ILookupError)
class IValueError(IStandardError):
"Interface for `ValueError`"
classImplements(ValueError, IValueError)
class IRuntimeError(IStandardError):
"Interface for `RuntimeError`"
classImplements(RuntimeError, IRuntimeError)
class IArithmeticError(IStandardError):
"Interface for `ArithmeticError`"
classImplements(ArithmeticError, IArithmeticError)
class IAssertionError(IStandardError):
"Interface for `AssertionError`"
classImplements(AssertionError, IAssertionError)
class IAttributeError(IStandardError):
"Interface for `AttributeError`"
classImplements(AttributeError, IAttributeError)
class IDeprecationWarning(IWarning):
"Interface for `DeprecationWarning`"
classImplements(DeprecationWarning, IDeprecationWarning)
class IEOFError(IStandardError):
"Interface for `EOFError`"
classImplements(EOFError, IEOFError)
class IEnvironmentError(IStandardError):
"Interface for `EnvironmentError`"
classImplements(EnvironmentError, IEnvironmentError)
class IFloatingPointError(IArithmeticError):
"Interface for `FloatingPointError`"
classImplements(FloatingPointError, IFloatingPointError)
class IIOError(IEnvironmentError):
"Interface for `IOError`"
classImplements(IOError, IIOError)
class IImportError(IStandardError):
"Interface for `ImportError`"
classImplements(ImportError, IImportError)
class IIndentationError(ISyntaxError):
"Interface for `IndentationError`"
classImplements(IndentationError, IIndentationError)
class IIndexError(ILookupError):
"Interface for `IndexError`"
classImplements(IndexError, IIndexError)
class IKeyError(ILookupError):
"Interface for `KeyError`"
classImplements(KeyError, IKeyError)
class IKeyboardInterrupt(IStandardError):
"Interface for `KeyboardInterrupt`"
classImplements(KeyboardInterrupt, IKeyboardInterrupt)
class IMemoryError(IStandardError):
"Interface for `MemoryError`"
classImplements(MemoryError, IMemoryError)
class INameError(IStandardError):
"Interface for `NameError`"
classImplements(NameError, INameError)
class INotImplementedError(IRuntimeError):
"Interface for `NotImplementedError`"
classImplements(NotImplementedError, INotImplementedError)
class IOSError(IEnvironmentError):
"Interface for `OSError`"
classImplements(OSError, IOSError)
class IOverflowError(IArithmeticError):
"Interface for `ArithmeticError`"
classImplements(OverflowError, IOverflowError)
class IOverflowWarning(IWarning):
"""Deprecated, no standard class implements this.
This was the interface for ``OverflowWarning`` prior to Python 2.5,
but that class was removed for all versions after that.
"""
class IReferenceError(IStandardError):
"Interface for `ReferenceError`"
classImplements(ReferenceError, IReferenceError)
class IRuntimeWarning(IWarning):
"Interface for `RuntimeWarning`"
classImplements(RuntimeWarning, IRuntimeWarning)
class IStopIteration(IException):
"Interface for `StopIteration`"
classImplements(StopIteration, IStopIteration)
class ISyntaxWarning(IWarning):
"Interface for `SyntaxWarning`"
classImplements(SyntaxWarning, ISyntaxWarning)
class ISystemError(IStandardError):
"Interface for `SystemError`"
classImplements(SystemError, ISystemError)
class ISystemExit(IException):
"Interface for `SystemExit`"
classImplements(SystemExit, ISystemExit)
class ITabError(IIndentationError):
"Interface for `TabError`"
classImplements(TabError, ITabError)
class ITypeError(IStandardError):
"Interface for `TypeError`"
classImplements(TypeError, ITypeError)
class IUnboundLocalError(INameError):
"Interface for `UnboundLocalError`"
classImplements(UnboundLocalError, IUnboundLocalError)
class IUnicodeError(IValueError):
"Interface for `UnicodeError`"
classImplements(UnicodeError, IUnicodeError)
class IUserWarning(IWarning):
"Interface for `UserWarning`"
classImplements(UserWarning, IUserWarning)
class IZeroDivisionError(IArithmeticError):
"Interface for `ZeroDivisionError`"
classImplements(ZeroDivisionError, IZeroDivisionError) | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/common/interfaces.py | interfaces.py |
from zope.interface import Interface
from zope.interface.common import collections
class IItemMapping(Interface):
"""Simplest readable mapping object
"""
def __getitem__(key):
"""Get a value for a key
A `KeyError` is raised if there is no value for the key.
"""
class IReadMapping(collections.IContainer, IItemMapping):
"""
Basic mapping interface.
.. versionchanged:: 5.0.0
Extend ``IContainer``
"""
def get(key, default=None):
"""Get a value for a key
The default is returned if there is no value for the key.
"""
def __contains__(key):
"""Tell if a key exists in the mapping."""
# Optional in IContainer, required by this interface.
class IWriteMapping(Interface):
"""Mapping methods for changing data"""
def __delitem__(key):
"""Delete a value from the mapping using the key."""
def __setitem__(key, value):
"""Set a new item in the mapping."""
class IEnumerableMapping(collections.ISized, IReadMapping):
"""
Mapping objects whose items can be enumerated.
.. versionchanged:: 5.0.0
Extend ``ISized``
"""
def keys():
"""Return the keys of the mapping object.
"""
def __iter__():
"""Return an iterator for the keys of the mapping object.
"""
def values():
"""Return the values of the mapping object.
"""
def items():
"""Return the items of the mapping object.
"""
class IMapping(IWriteMapping, IEnumerableMapping):
''' Simple mapping interface '''
class IIterableMapping(IEnumerableMapping):
"""A mapping that has distinct methods for iterating
without copying.
"""
class IClonableMapping(Interface):
"""Something that can produce a copy of itself.
This is available in `dict`.
"""
def copy():
"return copy of dict"
class IExtendedReadMapping(IIterableMapping):
"""
Something with a particular method equivalent to ``__contains__``.
On Python 2, `dict` provided the ``has_key`` method, but it was removed
in Python 3.
"""
class IExtendedWriteMapping(IWriteMapping):
"""Additional mutation methods.
These are all provided by `dict`.
"""
def clear():
"delete all items"
def update(d):
" Update D from E: for k in E.keys(): D[k] = E[k]"
def setdefault(key, default=None):
"D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D"
def pop(k, default=None):
"""
pop(k[,default]) -> value
Remove specified key and return the corresponding value.
If key is not found, *default* is returned if given, otherwise
`KeyError` is raised. Note that *default* must not be passed by
name.
"""
def popitem():
"""remove and return some (key, value) pair as a
2-tuple; but raise KeyError if mapping is empty"""
class IFullMapping(
collections.IMutableMapping,
IExtendedReadMapping, IExtendedWriteMapping, IClonableMapping, IMapping,):
"""
Full mapping interface.
Most uses of this interface should instead use
:class:`~zope.interface.commons.collections.IMutableMapping` (one of the
bases of this interface). The required methods are the same.
.. versionchanged:: 5.0.0
Extend ``IMutableMapping``
""" | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/common/mapping.py | mapping.py |
from zope.interface import classImplements
from zope.interface.common import collections
from zope.interface.common import numbers
from zope.interface.common import io
__all__ = [
'IList',
'ITuple',
'ITextString',
'IByteString',
'INativeString',
'IBool',
'IDict',
'IFile',
]
# pylint:disable=no-self-argument
class IList(collections.IMutableSequence):
"""
Interface for :class:`list`
"""
extra_classes = (list,)
def sort(key=None, reverse=False):
"""
Sort the list in place and return None.
*key* and *reverse* must be passed by name only.
"""
class ITuple(collections.ISequence):
"""
Interface for :class:`tuple`
"""
extra_classes = (tuple,)
class ITextString(collections.ISequence):
"""
Interface for text ("unicode") strings.
This is :class:`str`
"""
extra_classes = (str,)
class IByteString(collections.IByteString):
"""
Interface for immutable byte strings.
On all Python versions this is :class:`bytes`.
Unlike :class:`zope.interface.common.collections.IByteString`
(the parent of this interface) this does *not* include
:class:`bytearray`.
"""
extra_classes = (bytes,)
class INativeString(ITextString):
"""
Interface for native strings.
On all Python versions, this is :class:`str`. Tt extends
:class:`ITextString`.
"""
# We're not extending ABCInterface so extra_classes won't work
classImplements(str, INativeString)
class IBool(numbers.IIntegral):
"""
Interface for :class:`bool`
"""
extra_classes = (bool,)
class IDict(collections.IMutableMapping):
"""
Interface for :class:`dict`
"""
extra_classes = (dict,)
class IFile(io.IIOBase):
"""
Interface for :class:`file`.
It is recommended to use the interfaces from :mod:`zope.interface.common.io`
instead of this interface.
On Python 3, there is no single implementation of this interface;
depending on the arguments, the :func:`open` builtin can return
many different classes that implement different interfaces from
:mod:`zope.interface.common.io`.
"""
extra_classes = () | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/common/builtins.py | builtins.py |
import itertools
from types import FunctionType
from zope.interface import classImplements
from zope.interface import Interface
from zope.interface.interface import fromFunction
from zope.interface.interface import InterfaceClass
from zope.interface.interface import _decorator_non_return
__all__ = [
# Nothing public here.
]
# pylint:disable=inherit-non-class,
# pylint:disable=no-self-argument,no-method-argument
# pylint:disable=unexpected-special-method-signature
class optional:
# Apply this decorator to a method definition to make it
# optional (remove it from the list of required names), overriding
# the definition inherited from the ABC.
def __init__(self, method):
self.__doc__ = method.__doc__
class ABCInterfaceClass(InterfaceClass):
"""
An interface that is automatically derived from a
:class:`abc.ABCMeta` type.
Internal use only.
The body of the interface definition *must* define
a property ``abc`` that is the ABC to base the interface on.
If ``abc`` is *not* in the interface definition, a regular
interface will be defined instead (but ``extra_classes`` is still
respected).
Use the ``@optional`` decorator on method definitions if
the ABC defines methods that are not actually required in all cases
because the Python language has multiple ways to implement a protocol.
For example, the ``iter()`` protocol can be implemented with
``__iter__`` or the pair ``__len__`` and ``__getitem__``.
When created, any existing classes that are registered to conform
to the ABC are declared to implement this interface. This is *not*
automatically updated as the ABC registry changes. If the body of the
interface definition defines ``extra_classes``, it should be a
tuple giving additional classes to declare implement the interface.
Note that this is not fully symmetric. For example, it is usually
the case that a subclass relationship carries the interface
declarations over::
>>> from zope.interface import Interface
>>> class I1(Interface):
... pass
...
>>> from zope.interface import implementer
>>> @implementer(I1)
... class Root(object):
... pass
...
>>> class Child(Root):
... pass
...
>>> child = Child()
>>> isinstance(child, Root)
True
>>> from zope.interface import providedBy
>>> list(providedBy(child))
[<InterfaceClass __main__.I1>]
However, that's not the case with ABCs and ABC interfaces. Just
because ``isinstance(A(), AnABC)`` and ``isinstance(B(), AnABC)``
are both true, that doesn't mean there's any class hierarchy
relationship between ``A`` and ``B``, or between either of them
and ``AnABC``. Thus, if ``AnABC`` implemented ``IAnABC``, it would
not follow that either ``A`` or ``B`` implements ``IAnABC`` (nor
their instances provide it)::
>>> class SizedClass(object):
... def __len__(self): return 1
...
>>> from collections.abc import Sized
>>> isinstance(SizedClass(), Sized)
True
>>> from zope.interface import classImplements
>>> classImplements(Sized, I1)
None
>>> list(providedBy(SizedClass()))
[]
Thus, to avoid conflicting assumptions, ABCs should not be
declared to implement their parallel ABC interface. Only concrete
classes specifically registered with the ABC should be declared to
do so.
.. versionadded:: 5.0.0
"""
# If we could figure out invalidation, and used some special
# Specification/Declaration instances, and override the method ``providedBy`` here,
# perhaps we could more closely integrate with ABC virtual inheritance?
def __init__(self, name, bases, attrs):
# go ahead and give us a name to ease debugging.
self.__name__ = name
extra_classes = attrs.pop('extra_classes', ())
ignored_classes = attrs.pop('ignored_classes', ())
if 'abc' not in attrs:
# Something like ``IList(ISequence)``: We're extending
# abc interfaces but not an ABC interface ourself.
InterfaceClass.__init__(self, name, bases, attrs)
ABCInterfaceClass.__register_classes(self, extra_classes, ignored_classes)
self.__class__ = InterfaceClass
return
based_on = attrs.pop('abc')
self.__abc = based_on
self.__extra_classes = tuple(extra_classes)
self.__ignored_classes = tuple(ignored_classes)
assert name[1:] == based_on.__name__, (name, based_on)
methods = {
# Passing the name is important in case of aliases,
# e.g., ``__ror__ = __or__``.
k: self.__method_from_function(v, k)
for k, v in vars(based_on).items()
if isinstance(v, FunctionType) and not self.__is_private_name(k)
and not self.__is_reverse_protocol_name(k)
}
methods['__doc__'] = self.__create_class_doc(attrs)
# Anything specified in the body takes precedence.
methods.update(attrs)
InterfaceClass.__init__(self, name, bases, methods)
self.__register_classes()
@staticmethod
def __optional_methods_to_docs(attrs):
optionals = {k: v for k, v in attrs.items() if isinstance(v, optional)}
for k in optionals:
attrs[k] = _decorator_non_return
if not optionals:
return ''
docs = "\n\nThe following methods are optional:\n - " + "\n-".join(
"{}\n{}".format(k, v.__doc__) for k, v in optionals.items()
)
return docs
def __create_class_doc(self, attrs):
based_on = self.__abc
def ref(c):
mod = c.__module__
name = c.__name__
if mod == str.__module__:
return "`%s`" % name
if mod == '_io':
mod = 'io'
return "`{}.{}`".format(mod, name)
implementations_doc = "\n - ".join(
ref(c)
for c in sorted(self.getRegisteredConformers(), key=ref)
)
if implementations_doc:
implementations_doc = "\n\nKnown implementations are:\n\n - " + implementations_doc
based_on_doc = (based_on.__doc__ or '')
based_on_doc = based_on_doc.splitlines()
based_on_doc = based_on_doc[0] if based_on_doc else ''
doc = """Interface for the ABC `{}.{}`.\n\n{}{}{}""".format(
based_on.__module__, based_on.__name__,
attrs.get('__doc__', based_on_doc),
self.__optional_methods_to_docs(attrs),
implementations_doc
)
return doc
@staticmethod
def __is_private_name(name):
if name.startswith('__') and name.endswith('__'):
return False
return name.startswith('_')
@staticmethod
def __is_reverse_protocol_name(name):
# The reverse names, like __rand__,
# aren't really part of the protocol. The interpreter has
# very complex behaviour around invoking those. PyPy
# doesn't always even expose them as attributes.
return name.startswith('__r') and name.endswith('__')
def __method_from_function(self, function, name):
method = fromFunction(function, self, name=name)
# Eliminate the leading *self*, which is implied in
# an interface, but explicit in an ABC.
method.positional = method.positional[1:]
return method
def __register_classes(self, conformers=None, ignored_classes=None):
# Make the concrete classes already present in our ABC's registry
# declare that they implement this interface.
conformers = conformers if conformers is not None else self.getRegisteredConformers()
ignored = ignored_classes if ignored_classes is not None else self.__ignored_classes
for cls in conformers:
if cls in ignored:
continue
classImplements(cls, self)
def getABC(self):
"""
Return the ABC this interface represents.
"""
return self.__abc
def getRegisteredConformers(self):
"""
Return an iterable of the classes that are known to conform to
the ABC this interface parallels.
"""
based_on = self.__abc
# The registry only contains things that aren't already
# known to be subclasses of the ABC. But the ABC is in charge
# of checking that, so its quite possible that registrations
# are in fact ignored, winding up just in the _abc_cache.
try:
registered = list(based_on._abc_registry) + list(based_on._abc_cache)
except AttributeError:
# Rewritten in C in CPython 3.7.
# These expose the underlying weakref.
from abc import _get_dump
data = _get_dump(based_on)
registry = data[0]
cache = data[1]
registered = [x() for x in itertools.chain(registry, cache)]
registered = [x for x in registered if x is not None]
return set(itertools.chain(registered, self.__extra_classes))
def _create_ABCInterface():
# It's a two-step process to create the root ABCInterface, because
# without specifying a corresponding ABC, using the normal constructor
# gets us a plain InterfaceClass object, and there is no ABC to associate with the
# root.
abc_name_bases_attrs = ('ABCInterface', (Interface,), {})
instance = ABCInterfaceClass.__new__(ABCInterfaceClass, *abc_name_bases_attrs)
InterfaceClass.__init__(instance, *abc_name_bases_attrs)
return instance
ABCInterface = _create_ABCInterface() | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/common/__init__.py | __init__.py |
__docformat__ = 'restructuredtext'
from zope.interface import Interface
from zope.interface.common import collections
class IMinimalSequence(collections.IIterable):
"""Most basic sequence interface.
All sequences are iterable. This requires at least one of the
following:
- a `__getitem__()` method that takes a single argument; integer
values starting at 0 must be supported, and `IndexError` should
be raised for the first index for which there is no value, or
- an `__iter__()` method that returns an iterator as defined in
the Python documentation (http://docs.python.org/lib/typeiter.html).
"""
def __getitem__(index):
"""``x.__getitem__(index) <==> x[index]``
Declaring this interface does not specify whether `__getitem__`
supports slice objects."""
class IFiniteSequence(collections.ISized, IMinimalSequence):
"""
A sequence of bound size.
.. versionchanged:: 5.0.0
Extend ``ISized``
"""
class IReadSequence(collections.IContainer, IFiniteSequence):
"""
read interface shared by tuple and list
This interface is similar to
:class:`~zope.interface.common.collections.ISequence`, but
requires that all instances be totally ordered. Most users
should prefer ``ISequence``.
.. versionchanged:: 5.0.0
Extend ``IContainer``
"""
def __contains__(item):
"""``x.__contains__(item) <==> item in x``"""
# Optional in IContainer, required here.
def __lt__(other):
"""``x.__lt__(other) <==> x < other``"""
def __le__(other):
"""``x.__le__(other) <==> x <= other``"""
def __eq__(other):
"""``x.__eq__(other) <==> x == other``"""
def __ne__(other):
"""``x.__ne__(other) <==> x != other``"""
def __gt__(other):
"""``x.__gt__(other) <==> x > other``"""
def __ge__(other):
"""``x.__ge__(other) <==> x >= other``"""
def __add__(other):
"""``x.__add__(other) <==> x + other``"""
def __mul__(n):
"""``x.__mul__(n) <==> x * n``"""
def __rmul__(n):
"""``x.__rmul__(n) <==> n * x``"""
class IExtendedReadSequence(IReadSequence):
"""Full read interface for lists"""
def count(item):
"""Return number of occurrences of value"""
def index(item, *args):
"""index(value, [start, [stop]]) -> int
Return first index of *value*
"""
class IUniqueMemberWriteSequence(Interface):
"""The write contract for a sequence that may enforce unique members"""
def __setitem__(index, item):
"""``x.__setitem__(index, item) <==> x[index] = item``
Declaring this interface does not specify whether `__setitem__`
supports slice objects.
"""
def __delitem__(index):
"""``x.__delitem__(index) <==> del x[index]``
Declaring this interface does not specify whether `__delitem__`
supports slice objects.
"""
def __iadd__(y):
"""``x.__iadd__(y) <==> x += y``"""
def append(item):
"""Append item to end"""
def insert(index, item):
"""Insert item before index"""
def pop(index=-1):
"""Remove and return item at index (default last)"""
def remove(item):
"""Remove first occurrence of value"""
def reverse():
"""Reverse *IN PLACE*"""
def sort(cmpfunc=None):
"""Stable sort *IN PLACE*; `cmpfunc(x, y)` -> -1, 0, 1"""
def extend(iterable):
"""Extend list by appending elements from the iterable"""
class IWriteSequence(IUniqueMemberWriteSequence):
"""Full write contract for sequences"""
def __imul__(n):
"""``x.__imul__(n) <==> x *= n``"""
class ISequence(IReadSequence, IWriteSequence):
"""
Full sequence contract.
New code should prefer
:class:`~zope.interface.common.collections.IMutableSequence`.
Compared to that interface, which is implemented by :class:`list`
(:class:`~zope.interface.common.builtins.IList`), among others,
this interface is missing the following methods:
- clear
- count
- index
This interface adds the following methods:
- sort
""" | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/common/sequence.py | sequence.py |
import sys
from abc import ABCMeta
from collections import abc
from collections import OrderedDict
from collections import UserList
from collections import UserDict
from collections import UserString
from zope.interface.common import ABCInterface
from zope.interface.common import optional
# pylint:disable=inherit-non-class,
# pylint:disable=no-self-argument,no-method-argument
# pylint:disable=unexpected-special-method-signature
# pylint:disable=no-value-for-parameter
def _new_in_ver(name, ver,
bases_if_missing=(ABCMeta,),
register_if_missing=()):
if ver:
return getattr(abc, name)
# TODO: It's a shame to have to repeat the bases when
# the ABC is missing. Can we DRY that?
missing = ABCMeta(name, bases_if_missing, {
'__doc__': "The ABC %s is not defined in this version of Python." % (
name
),
})
for c in register_if_missing:
missing.register(c)
return missing
__all__ = [
'IAsyncGenerator',
'IAsyncIterable',
'IAsyncIterator',
'IAwaitable',
'ICollection',
'IContainer',
'ICoroutine',
'IGenerator',
'IHashable',
'IItemsView',
'IIterable',
'IIterator',
'IKeysView',
'IMapping',
'IMappingView',
'IMutableMapping',
'IMutableSequence',
'IMutableSet',
'IReversible',
'ISequence',
'ISet',
'ISized',
'IValuesView',
]
class IContainer(ABCInterface):
abc = abc.Container
@optional
def __contains__(other):
"""
Optional method. If not provided, the interpreter will use
``__iter__`` or the old ``__getitem__`` protocol
to implement ``in``.
"""
class IHashable(ABCInterface):
abc = abc.Hashable
class IIterable(ABCInterface):
abc = abc.Iterable
@optional
def __iter__():
"""
Optional method. If not provided, the interpreter will
implement `iter` using the old ``__getitem__`` protocol.
"""
class IIterator(IIterable):
abc = abc.Iterator
class IReversible(IIterable):
abc = _new_in_ver('Reversible', True, (IIterable.getABC(),))
@optional
def __reversed__():
"""
Optional method. If this isn't present, the interpreter
will use ``__len__`` and ``__getitem__`` to implement the
`reversed` builtin.
"""
class IGenerator(IIterator):
# New in Python 3.5
abc = _new_in_ver('Generator', True, (IIterator.getABC(),))
class ISized(ABCInterface):
abc = abc.Sized
# ICallable is not defined because there's no standard signature.
class ICollection(ISized,
IIterable,
IContainer):
abc = _new_in_ver('Collection', True,
(ISized.getABC(), IIterable.getABC(), IContainer.getABC()))
class ISequence(IReversible,
ICollection):
abc = abc.Sequence
extra_classes = (UserString,)
# On Python 2, basestring is registered as an ISequence, and
# its subclass str is an IByteString. If we also register str as
# an ISequence, that tends to lead to inconsistent resolution order.
ignored_classes = (basestring,) if str is bytes else () # pylint:disable=undefined-variable
@optional
def __reversed__():
"""
Optional method. If this isn't present, the interpreter
will use ``__len__`` and ``__getitem__`` to implement the
`reversed` builtin.
"""
@optional
def __iter__():
"""
Optional method. If not provided, the interpreter will
implement `iter` using the old ``__getitem__`` protocol.
"""
class IMutableSequence(ISequence):
abc = abc.MutableSequence
extra_classes = (UserList,)
class IByteString(ISequence):
"""
This unifies `bytes` and `bytearray`.
"""
abc = _new_in_ver('ByteString', True,
(ISequence.getABC(),),
(bytes, bytearray))
class ISet(ICollection):
abc = abc.Set
class IMutableSet(ISet):
abc = abc.MutableSet
class IMapping(ICollection):
abc = abc.Mapping
extra_classes = (dict,)
# OrderedDict is a subclass of dict. On CPython 2,
# it winds up registered as a IMutableMapping, which
# produces an inconsistent IRO if we also try to register it
# here.
ignored_classes = (OrderedDict,)
class IMutableMapping(IMapping):
abc = abc.MutableMapping
extra_classes = (dict, UserDict,)
ignored_classes = (OrderedDict,)
class IMappingView(ISized):
abc = abc.MappingView
class IItemsView(IMappingView, ISet):
abc = abc.ItemsView
class IKeysView(IMappingView, ISet):
abc = abc.KeysView
class IValuesView(IMappingView, ICollection):
abc = abc.ValuesView
@optional
def __contains__(other):
"""
Optional method. If not provided, the interpreter will use
``__iter__`` or the old ``__len__`` and ``__getitem__`` protocol
to implement ``in``.
"""
class IAwaitable(ABCInterface):
abc = _new_in_ver('Awaitable', True)
class ICoroutine(IAwaitable):
abc = _new_in_ver('Coroutine', True)
class IAsyncIterable(ABCInterface):
abc = _new_in_ver('AsyncIterable', True)
class IAsyncIterator(IAsyncIterable):
abc = _new_in_ver('AsyncIterator', True)
class IAsyncGenerator(IAsyncIterator):
abc = _new_in_ver('AsyncGenerator', True) | zope.interface | /zope.interface-6.1a2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl/zope/interface/common/collections.py | collections.py |
=========
Changes
=========
5.0 (2023-02-21)
================
- Add support for Python 3.9, 3.10, 3.11.
- Drop support for Python 2.7, 3.5, 3.6.
- Drop support for deprecated ``python setup.py test``.
4.4.0 (2021-03-19)
==================
- Fixed deprecation warning for ``zope.site.hooks`` in tests.
- Add support for Python 3.7 and 3.8.
- Drop support for Python 3.4.
- Fix incorrect import of
``zope.interface.interfaces.IComponentLookup`` in tests.
4.3.0 (2017-07-26)
==================
- Add support for Python 3.6.
- Drop support for Python 3.3.
4.2.0 (2016-12-08)
==================
- Raise more informative KeyError subclasses from the utility when intids
or objects cannot be found. This distinguishes them from errors
raised by normal dictionaries or BTrees, and is useful in unit
testing or when persisting intids or sharing them among processes
for later or concurrent use.
- Propagate ``POSKeyError`` from ``queryId`` instead of returning the
default object. This exception indicates a corrupt database, not a
missing object. The ``queryObject`` function already behaved this way.
- Stop depending on ZODB for anything except testing.
- Add support for Python 3.5 and PyPy3 5.2.
- Drop support for Python 2.6.
4.1.0 (2014-12-27)
==================
- Add support for PyPy (PyPy3 blocked on PyPy3-compatible ``zodbpickle``).
- Add support for Python 3.4.
4.0.0 (2014-12-20)
==================
- Add support for testing on Travis.
4.0.0a1 (2013-02-22)
====================
- Add support for Python 3.3.
- Replace deprecated ``zope.interface.implements`` usage with equivalent
``zope.interface.implementer`` decorator.
- Drop support for Python 2.4 and 2.5.
- Bug fix: ensure that the IntId utility never generates ids greater
than the maxint of the BTree family being used.
3.7.2 (2009-12-27)
==================
- Use the zope.component API in favor of ztapi.
- Remove ``zope.app.testing`` dependency.
3.7.1 (2009-05-18)
==================
- Remove dependencies on ``zope.container``. Instead import
``Object*Event`` classes from ``zope.lifecycleevent`` and import
``IContained`` from ``zope.location``. In order to be able to do
this, depend on ``zope.lifecycleevent``>=3.5.2 and
``zope.location``>=3.5.4.
- Remove a dependency on ``zope.container.contained.Contained``
(this is a dumb base class that defines __parent__ and __name__
as None and declares that the class implements IContained).
3.7.0 (2009-02-01)
==================
- Split out this package from ``zope.app.intid``. The latter one
now only contains browser views and compatibility imports while
whole IntId functionality is moved here.
| zope.intid | /zope.intid-5.0.tar.gz/zope.intid-5.0/CHANGES.rst | CHANGES.rst |
================
``zope.intid``
================
.. image:: https://img.shields.io/pypi/v/zope.intid.svg
:target: https://pypi.org/project/zope.intid/
:alt: Latest release
.. image:: https://img.shields.io/pypi/pyversions/zope.intid.svg
:target: https://pypi.org/project/zope.intid/
:alt: Supported Python versions
.. image:: https://github.com/zopefoundation/zope.intid/actions/workflows/tests.yml/badge.svg
:target: https://github.com/zopefoundation/zope.intid/actions/workflows/tests.yml
.. image:: https://readthedocs.org/projects/zopeintid/badge/?version=latest
:target: http://zopeintid.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
.. image:: https://coveralls.io/repos/github/zopefoundation/zope.intid/badge.svg?branch=master
:target: https://coveralls.io/github/zopefoundation/zope.intid?branch=master
:alt: Code Coverage
This package provides an API to create integer ids for any object. Later
objects can be looked up by their id as well. This functionality is commonly
used in situations where dealing with objects is undesirable, such as in
search indices or any code that needs an easy hash of an object.
Documentation is hosted at http://zopeintid.readthedocs.io
| zope.intid | /zope.intid-5.0.tar.gz/zope.intid-5.0/README.rst | README.rst |
from zope.interface import Attribute
from zope.interface import Interface
from zope.interface import implementer
class IntIdMissingError(KeyError):
"""
Raised when ``getId`` cannot find an intid.
"""
class ObjectMissingError(KeyError):
"""
Raised when ``getObject`` cannot find an object.
"""
class IntIdsCorruptedError(KeyError):
"""
Raised when internal corruption is detected in the utility.
Users should not need to catch this because this situation should
not happen.
"""
class IIntIdsQuery(Interface):
"Query for IDs and objects"
def getObject(uid):
"""Return an object by its unique id"""
def getId(ob):
"""Get a unique id of an object.
"""
def queryObject(uid, default=None):
"""Return an object by its unique id
Return the default if the uid isn't registered
"""
def queryId(ob, default=None):
"""Get a unique id of an object.
Return the default if the object isn't registered
"""
def __iter__():
"""Return an iteration on the ids"""
class IIntIdsSet(Interface):
"Register and unregister objects."
def register(ob):
"""Register an object and returns a unique id generated for it.
The object *must* be adaptable to
:class:`~zope.keyreference.interfaces.IKeyReference`.
If the object is already registered, its id is returned anyway.
"""
def unregister(ob):
"""Remove the object from the indexes.
IntIdMissingError is raised if ob is not registered previously.
"""
class IIntIdsManage(Interface):
"""Some methods used by the view."""
def __len__():
"""Return the number of objects indexed."""
def items():
"""Return a list of (id, object) pairs."""
class IIntIds(IIntIdsSet, IIntIdsQuery, IIntIdsManage):
"""A utility that assigns unique ids to objects.
Allows to query object by id and id by object.
"""
class IIntIdEvent(Interface):
"""Generic base interface for IntId-related events"""
object = Attribute("The object related to this event")
original_event = Attribute("The ObjectEvent related to this event")
class IIntIdRemovedEvent(IIntIdEvent):
"""A unique id will be removed
The event is published before the unique id is removed
from the utility so that the indexing objects can unindex the object.
"""
@implementer(IIntIdRemovedEvent)
class IntIdRemovedEvent:
"""The event which is published before the unique id is removed
from the utility so that the catalogs can unindex the object.
"""
def __init__(self, object, event):
self.object = object
self.original_event = event
class IIntIdAddedEvent(IIntIdEvent):
"""A unique id has been added
The event gets sent when an object is registered in a
unique id utility.
"""
idmap = Attribute(
"The dictionary that holds an (utility -> id) mapping of created ids")
@implementer(IIntIdAddedEvent)
class IntIdAddedEvent:
"""The event which gets sent when an object is registered in a
unique id utility.
"""
def __init__(self, object, event, idmap=None):
self.object = object
self.original_event = event
self.idmap = idmap | zope.intid | /zope.intid-5.0.tar.gz/zope.intid-5.0/src/zope/intid/interfaces.py | interfaces.py |
import random
import BTrees
from persistent import Persistent
from zope.component import adapter
from zope.component import getAllUtilitiesRegisteredFor
from zope.component import handle
from zope.event import notify
from zope.interface import implementer
from zope.keyreference.interfaces import IKeyReference
from zope.keyreference.interfaces import NotYet
from zope.lifecycleevent.interfaces import IObjectAddedEvent
from zope.lifecycleevent.interfaces import IObjectRemovedEvent
from zope.location.interfaces import IContained
from zope.location.interfaces import ILocation
from zope.security.proxy import removeSecurityProxy
from zope.intid.interfaces import IIntIdEvent
from zope.intid.interfaces import IIntIds
from zope.intid.interfaces import IntIdAddedEvent
from zope.intid.interfaces import IntIdMissingError
from zope.intid.interfaces import IntIdRemovedEvent
from zope.intid.interfaces import IntIdsCorruptedError
from zope.intid.interfaces import ObjectMissingError
try:
# POSKeyError is a subclass of KeyError; in the cases where we
# catch KeyError for an item missing from a BTree, we still
# want to propagate this exception that indicates a corrupt database
# (as opposed to a corrupt IntIds)
from ZODB.POSException import POSKeyError as _POSKeyError
except ImportError: # pragma: no cover
# In practice, ZODB will probably be installed. But if not,
# then POSKeyError can never be generated, so use a unique
# exception that we'll never catch.
class _POSKeyError(BaseException):
pass
@implementer(IIntIds, IContained)
class IntIds(Persistent):
"""This utility provides a two way mapping between objects and
integer ids.
IKeyReferences to objects are stored in the indexes.
"""
__parent__ = __name__ = None
_v_nextid = None
_randrange = random.randrange
family = BTrees.family32
def __init__(self, family=None):
if family is not None:
self.family = family
self.ids = self.family.OI.BTree()
self.refs = self.family.IO.BTree()
def __len__(self):
return len(self.ids)
def items(self):
return list(self.refs.items())
def __iter__(self):
return self.refs.iterkeys()
def getObject(self, id):
try:
return self.refs[id]()
except _POSKeyError:
raise
except KeyError:
raise ObjectMissingError(id)
def queryObject(self, id, default=None):
r = self.refs.get(id)
if r is not None:
return r()
return default
def getId(self, ob):
try:
key = IKeyReference(ob)
except (NotYet, TypeError, ValueError):
raise IntIdMissingError(ob)
try:
return self.ids[key]
except _POSKeyError:
raise
except KeyError:
raise IntIdMissingError(ob)
def queryId(self, ob, default=None):
try:
return self.getId(ob)
except _POSKeyError:
raise
except KeyError:
return default
def _generateId(self):
"""Generate an id which is not yet taken.
This tries to allocate sequential ids so they fall into the
same BTree bucket, and randomizes if it stumbles upon a
used one.
"""
nextid = getattr(self, '_v_nextid', None)
while True:
if nextid is None:
nextid = self._randrange(0, self.family.maxint)
uid = nextid
if uid not in self.refs:
nextid += 1
if nextid > self.family.maxint:
nextid = None
self._v_nextid = nextid
return uid
nextid = None
def register(self, ob):
# Note that we'll still need to keep this proxy removal.
ob = removeSecurityProxy(ob)
key = IKeyReference(ob)
if key in self.ids:
return self.ids[key]
uid = self._generateId()
self.refs[uid] = key
self.ids[key] = uid
return uid
def unregister(self, ob):
# Note that we'll still need to keep this proxy removal.
ob = removeSecurityProxy(ob)
key = IKeyReference(ob, None)
if key is None:
return
try:
uid = self.ids[key]
except _POSKeyError:
raise
except KeyError:
raise IntIdMissingError(ob)
try:
del self.refs[uid]
except _POSKeyError:
raise
except KeyError:
# It was in self.ids, but not self.refs. Something is corrupted.
# We've always let this KeyError propagate, before cleaning up
# self.ids, meaning that getId(ob) will continue to work, but
# getObject(uid) will not.
raise IntIdsCorruptedError(ob, uid)
del self.ids[key]
def _utilities_and_key(ob):
utilities = tuple(getAllUtilitiesRegisteredFor(IIntIds))
return utilities, IKeyReference(ob, None) if utilities else None
@adapter(ILocation, IObjectRemovedEvent)
def removeIntIdSubscriber(ob, event):
"""A subscriber to ObjectRemovedEvent
Removes the unique ids registered for the object in all the unique
id utilities.
"""
utilities, key = _utilities_and_key(ob)
if not utilities or key is None:
# Unregister only objects that adapt to key reference
return
# Notify the catalogs that this object is about to be removed.
notify(IntIdRemovedEvent(ob, event))
for utility in utilities:
try:
utility.unregister(key)
except KeyError:
# Silently ignoring all kinds corruption here
pass
@adapter(ILocation, IObjectAddedEvent)
def addIntIdSubscriber(ob, event):
"""A subscriber to ObjectAddedEvent
Registers the object added in all unique id utilities and fires
an event for the catalogs.
"""
utilities, key = _utilities_and_key(ob)
if not utilities or key is None:
# Register only objects that adapt to key reference
return
idmap = {}
for utility in utilities:
idmap[utility] = utility.register(key)
# Notify the catalogs that this object was added.
notify(IntIdAddedEvent(ob, event, idmap))
@adapter(IIntIdEvent)
def intIdEventNotify(event):
"""Event subscriber to dispatch IntIdEvent to interested adapters."""
handle(event.object, event) | zope.intid | /zope.intid-5.0.tar.gz/zope.intid-5.0/src/zope/intid/__init__.py | __init__.py |
=====================
What is zope.intid?
=====================
This package provides an API to create integer ids for any object. Later
objects can be looked up by their id as well. This functionality is commonly
used in situations where dealing with objects is undesirable, such as in
search indices or any code that needs an easy hash of an object.
.. toctree::
:maxdepth: 1
api
.. toctree::
:maxdepth: 2
changelog
Development
===========
zope.intid is hosted at GitHub:
https://github.com/zopefoundation/zope.intid/
Project URLs
============
* http://pypi.python.org/pypi/zope.intid (PyPI entry and downloads)
====================
Indices and tables
====================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
| zope.intid | /zope.intid-5.0.tar.gz/zope.intid-5.0/docs/index.rst | index.rst |
zope.introspector
*****************
What is zope.introspector?
==========================
`zope.introspector` helps developers to get information about objects
in their Zope/Python runtime environment.
It provides an easy to use API that enables developers to create
'object descriptors' for any object and is usable in almost every Zope
environment, namely Zope 2, Zope 3 and Plone. Although
`zope.introspector` is mainly tested with Python 2.4, also Python 2.5
installs should work.
`zope.introspector` is extensible. That means, that you can write your
own descriptors for certain types of objects or aspects
thereof. Please see the detailed documentation in
'src/zope/introspector' to learn more about that.
The package does not provide viewing components. Instead you can use
packages, that are built on top of `zope.introspector`. These provide
viewing components, that apply to more specific frameworks like Plone
or Grok.
Installing zope.introspector
============================
`zope.introspector` is provided as an Python egg on cheeseshop and set
up via `zc.buildout`_
.. _zc.buildout: http://cheeseshop.python.org/pypi/zc.buildout
You may have setuptools already installed for your system Python. In
that case, you may need to upgrade it first because buildout requires
a very recent version::
$ sudo easy_install -U setuptools
If this command fails because easy_install is not available, there is
a good chance you do not have setuptools available for your system
Python. If so, there is no problem because setuptools will be
installed locally by buildout.
Because `zope.introspector` is a developer tool, you normally use it
by including the package the `setup.py` file of your own
package. There will most probably a section called `install_requires`
where you add 'zope.introspector' like this::
...
install_requires=['setuptools',
# Add extra requirements here
'zope.introspector',
...
],
In `zc.buildout` based package setups you can 'activate' usage of
`zope.introspector` afterwards simply by (re)running `bin/buildout`.
| zope.introspector | /zope.introspector-0.1.1.tar.gz/zope.introspector-0.1.1/README.txt | README.txt |
"""Helper functions for zope.introspector.
"""
import types
import inspect
import pkg_resources
from zope.interface import implementedBy
from zope.security.proxy import isinstance, removeSecurityProxy
from martian.scan import resolve as ext_resolve
def resolve(obj_or_dotted_name):
"""Get an object denoted by a dotted name.
"""
if not isinstance(obj_or_dotted_name, basestring):
return obj_or_dotted_name
return ext_resolve(obj_or_dotted_name)
def is_namespace_package(dotted_name):
"""Tell, whether a dotted name denotes a namespace package.
"""
return dotted_name in pkg_resources._namespace_packages.keys()
def get_package_items(dotted_name):
"""Get the items of a package, that is modules, subpackages, etc.
Delivers names of subpackages, modules, .txt, .rst and .zcml files.
Supports also namespace packages.
Supports also zipped eggs.
"""
if is_namespace_package(dotted_name):
return get_namespace_package_items(dotted_name)
resources = pkg_resources.resource_listdir(dotted_name, '')
result = []
for res in resources:
if res.startswith('.'):
# Ignore hidden files and directories.
continue
if pkg_resources.resource_isdir(dotted_name, res):
if pkg_resources.resource_exists(
dotted_name, res + '/__init__.py'):
result.append(res)
continue
if not '.' in res:
continue
name, ext = res.rsplit('.', 1)
if name == '__init__':
continue
if ext.lower() == 'py':
result.append(name)
if ext.lower() in ['txt', 'rst', 'zcml']:
result.append(res)
return result
def get_namespace_package_items(dotted_name):
"""Get subpackages of a namespace package.
"""
ws = pkg_resources.working_set
pkg_names = []
for entry in ws.entry_keys.values():
pkg_names.extend(entry)
result = []
for name in pkg_names:
if not name.startswith(dotted_name):
continue
name = name.split(dotted_name)[1]
if '.' in name:
name = name.split('.')[1]
result.append(name)
result = list(set(result)) # make entries unique
return result
def get_function_signature(func):
"""Return the signature of a function or method."""
if not isinstance(func, (types.FunctionType, types.MethodType)):
raise TypeError("func must be a function or method")
args, varargs, varkw, defaults = inspect.getargspec(func)
placeholder = object()
sig = '('
# By filling up the default tuple, we now have equal indeces for
# args and default.
if defaults is not None:
defaults = (placeholder,)*(len(args)-len(defaults)) + defaults
else:
defaults = (placeholder,)*len(args)
str_args = []
for name, default in zip(args, defaults):
# Neglect self, since it is always there and not part of the
# signature. This way the implementation and interface
# signatures should match.
if name == 'self' and type(func) == types.MethodType:
continue
# Make sure the name is a string
if isinstance(name, (tuple, list)):
name = '(' + ', '.join(name) + ')'
elif not isinstance(name, str):
name = repr(name)
if default is placeholder:
str_args.append(name)
else:
str_args.append(name + '=' + repr(default))
if varargs:
str_args.append('*'+varargs)
if varkw:
str_args.append('**'+varkw)
sig += ', '.join(str_args)
return sig + ')'
def get_attributes(obj, public_only=True):
"""Return a list of attribute names.
If `public_only` is set to `False` also attributes, whose names
start with an underscore ('_') are returned.
Taken from ``zope.app.apidoc`` with some modifications.
"""
attrs = []
for attr in dir(obj):
if attr.startswith('_') and public_only is True:
continue
try:
getattr(obj, attr)
except:
continue
attrs.append(attr)
return sorted(attrs)
_marker = object()
def get_python_path(obj):
"""Return the path of the object in standard Python notation.
This method should try very hard to return a string, even if it is not a
valid Python path.
Taken from ``zope.app.apidoc``.
"""
if obj is None:
return None
# Even for methods `im_class` and `__module__` is not allowed to be
# accessed (which is probably not a bad idea). So, we remove the security
# proxies for this check.
naked = removeSecurityProxy(obj)
if hasattr(naked, "im_class"):
naked = naked.im_class
module = getattr(naked, '__module__', _marker)
if module is _marker:
return naked.__name__
return '%s.%s' %(module, naked.__name__)
def get_interface_for_attribute(name, interfaces=_marker, klass=_marker,
as_path=True):
"""Determine the interface in which an attribute is defined.
Taken from ``zope.app.apidoc``.
"""
if (interfaces is _marker) and (klass is _marker):
raise ValueError("need to specify interfaces or klass")
if (interfaces is not _marker) and (klass is not _marker):
raise ValueError("must specify only one of interfaces and klass")
if interfaces is _marker:
direct_interfaces = list(implementedBy(klass))
interfaces = {}
for interface in direct_interfaces:
interfaces[interface] = 1
for base in interface.getBases():
interfaces[base] = 1
interfaces = interfaces.keys()
for interface in interfaces:
if name in interface.names():
if as_path:
return get_python_path(interface)
return interface
return None | zope.introspector | /zope.introspector-0.1.1.tar.gz/zope.introspector-0.1.1/src/zope/introspector/util.py | util.py |
"""Interfaces for zope.introspector.
"""
from zope import interface
class IInfos(interface.Interface):
"""Give all IInfo adapters relevant for this object.
"""
def infos():
"""Return the applicable infos.
"""
class IInfo(interface.Interface):
"""Introspection information about an aspect of an object.
"""
class IObjectInfo(IInfo):
"""Information about simple types.
"""
def getType():
"""Get the type of the object handled here.
"""
def isModule(self):
"""Returns true of the object is a module.
"""
def isClass(self):
"""Returns true of the object is a class.
"""
def getDottedName(self):
"""Returns the dotted name of the object.
"""
def getFile(self):
"""Returns the source file where the object is defined.
"""
def getAttributes(self):
"""Return all attributes of the object.
"""
def getMethods(self):
"""Returns all methods of the object.
"""
class IModuleInfo(IInfo):
"""Information about modules.
"""
pass
class IPackageInfo(IInfo):
"""Information about packages.
"""
def getPackageFiles():
"""Get the package files contained in a package.
"""
class ITypeInfo(IInfo):
"""Information about types.
"""
pass
class IUtilityInfo(interface.Interface):
"""Information about utilities available to an object.
"""
def getAllUtilities():
"""Get all utilities available to an object.
"""
class IRegistryInfo(interface.Interface):
"""Keeps information about the Component registry.
"""
def getAllRegistrations(registry):
""" Returns a list of everything registered in the component registry.
"""
def getAllUtilities(registry):
""" Returns a list of all utilities registered in the
component registery.
"""
def getAllAdapters(registry):
""" Returns a list of all adapters registered in the component
registery.
"""
def getAllHandlers(registry):
""" Returns a list of all handlers registered in the component
registery.
"""
def getAllSubscriptionAdapters(registry):
""" Returns a list of all handlers registered in the component
registery.
"""
def getRegistrationsForInterface(searchString, types):
""" Searches the component registry for any interface with
searchString in the name...
Returns a list of component objects.
"""
def getAllInterfaces():
""" Returns a dictionary with all interfaces...
{'zope':
{'app':
{'apidoc': [...],
'applicationcontrol': [...],
},
'component': [...],
},
'docutils': [...],
}
"""
class IRegistrySearch(interface.Interface):
""" Adapter interface that takes care of doing searches in
different types of registry registrations.
"""
def __init__(registration):
""" Registers the registration in the adapter...
"""
def searchRegistration(string, registry, caseSensitive):
""" Implements the search...
returns True or False
"""
def getInterfaces():
""" Returns a list with the interfaces involved in this registration
"""
def getObject():
""" Returns the registration
"""
class IViewInfo(IInfo):
"""The representation of an object that has views associated.
"""
def getViews(layer=None):
"""Get the views for context object.
Optional layer argument retrieves views registered for this layer.
Returns iterator (view name, view factory) tuples.
"""
def getAllViews():
"""Get all views for context objects, for any layer that is in a skin.
Returns iterator of (skin name, (skin) layer, view name,
view factory) tuples.
The default layer will be returned with u'' as the skin name.
"""
class IDocString(interface.Interface):
"""Objects that have a docstring.
"""
def getDocString(header_only=True):
"""Get the docstring.
If header_only is `True`, return the whole
docstring. Otherwise only the part up to the first blank line.
""" | zope.introspector | /zope.introspector-0.1.1.tar.gz/zope.introspector-0.1.1/src/zope/introspector/interfaces.py | interfaces.py |
from zope.interface import implements
from zope.component import adapts
from zope.introspector.interfaces import IRegistrySearch
from zope.component.interfaces import (IAdapterRegistration,
IHandlerRegistration,
IUtilityRegistration,
ISubscriptionAdapterRegistration)
import grokcore.component as grok
class AdapterSearch(grok.Adapter):
grok.implements(IRegistrySearch)
grok.context(IAdapterRegistration)
def __init__(self, registration):
self.registration = registration
def searchRegistration(self, string, caseSensitive = False, registry='base'):
if registry is not getattr(self.registration.registry, '__name__'):
return False
if string in getattr(self.registration.provided, '__name__', ''):
return True
elif string in self.registration.name:
return True
elif string in getattr(self.registration.factory, '__name__', ''):
return True
# elif string in self.registration.info:
# return True
else:
for each in self.registration.required:
if string in getattr(each, '__name__', ''):
return True
return False
def getInterfaces(self):
interfaces = []
for each in list(
self.registration.required) + [self.registration.provided]:
module = getattr(each, '__module__')
name = getattr(each, '__name__')
if module:
name = '%s.%s' % (module,name)
interfaces.append(name)
return interfaces
def getObject(self):
return self.registration
class SubscriptionSearch(AdapterSearch):
grok.implements(IRegistrySearch)
grok.context(ISubscriptionAdapterRegistration)
class HandlerSearch(grok.Adapter):
grok.implements(IRegistrySearch)
grok.context(IHandlerRegistration)
def __init__(self, registration):
self.registration = registration
def searchRegistration(self, string, caseSensitive = False, registry='base'):
if registry is not getattr(self.registration.registry, '__name__'):
return False
if string in self.registration.name:
return True
elif string in getattr(self.registration.factory, '__name__',''):
return True
# elif string in self.registration.info:
# return True
else:
for each in self.registration.required:
if string in getattr(each, '__name__',''):
return True
return False
def getInterfaces(self):
interfaces = []
for each in list(
self.registration.required) + [self.registration.factory]:
module = getattr(each, '__module__')
name = getattr(each, '__name__')
if module:
name = '%s.%s' % (module,name)
interfaces.append(name)
return interfaces
def getObject(self):
return self.registration
class UtilitySearch(grok.Adapter):
grok.implements(IRegistrySearch)
grok.context(IUtilityRegistration)
def __init__(self, registration):
self.registration = registration
def searchRegistration(self, string, caseSensitive = False, registry='base'):
if registry is not getattr(self.registration.registry, '__name__'):
return False
if string in getattr(self.registration.provided, '__name__',''):
return True
elif string in self.registration.name:
return True
# elif string in self.registration.info:
# return True
return False
def getInterfaces(self):
interfaces = []
module = getattr(self.registration.provided, '__module__')
name = getattr(self.registration.provided, '__name__')
if module:
name = '%s.%s' % (module,name)
interfaces.append(name)
return interfaces
def getObject(self):
return self.registration | zope.introspector | /zope.introspector-0.1.1.tar.gz/zope.introspector-0.1.1/src/zope/introspector/adapters.py | adapters.py |
from zope.interface import implements
from zope.introspector.interfaces import IRegistryInfo, IRegistrySearch
from zope.component import globalregistry, getSiteManager
from zope.interface.adapter import AdapterRegistry
from zope.component.registry import (AdapterRegistration,
HandlerRegistration,
UtilityRegistration)
import grokcore.component as grok
class RegistryInfoUtility(grok.GlobalUtility):
""" Give information about the component registry.
Implements the IRegistryInfo interface.
"""
implements(IRegistryInfo)
context = None
def getAllRegistrations(self, registry='base'):
""" See zope.introspector.interfaces for documentation.
"""
adapters = self.getAllAdapters(registry)
handlers = self.getAllHandlers(registry)
utils = self.getAllUtilities(registry)
subsriptionAdapters = self.getAllSubscriptionAdapters(registry)
return adapters + handlers + utils + subsriptionAdapters
def getAllUtilities(self, registry=None, context=None):
contxt = context or self.context
smlist = [getSiteManager(context)]
seen = []
result = []
while smlist:
sm = smlist.pop()
if sm in seen:
continue
seen.append(sm)
smlist += list(sm.__bases__)
for u in sm.registeredUtilities():
if registry and not (registry == u.registry.__name__):
continue
result.append(u)
return result
def getAllAdapters(self, registry='base'):
""" See zope.introspector.interfaces for documentation.
"""
def f(item):
return registry is getattr(item.registry, '__name__')
return filter(f, globalregistry.base.registeredAdapters())
def getAllHandlers(self, registry='base'):
""" See zope.introspector.interfaces for documentation.
"""
def f(item):
return registry is getattr(item.registry, '__name__')
return filter(f, globalregistry.base.registeredHandlers())
def getAllSubscriptionAdapters(self, registry='base'):
""" See zope.introspector.interfaces for documentation.
"""
def f(item):
return registry is getattr(item.registry, '__name__')
return filter(f, globalregistry.base.registeredSubscriptionAdapters())
def getRegistrationsForInterface(self, searchString='', types=['all']):
""" See zope.introspector.interfaces for documentation.
"""
interfaces = []
searchInterfaces = []
if 'all' in types:
searchInterfaces = self.getAllRegistrations()
if 'adapters' in types:
searchInterfaces.extend(self.getAllAdapters())
if 'utilities' in types:
searchInterfaces.extend(self.getAllUtilities())
if 'handlers' in types:
searchInterfaces.extend(self.getAllHandlers())
if 'subscriptionAdapters' in types:
searchInterfaces.extend(self.getAllSubscriptionAdapters())
if searchString == '*':
interfaces = searchInterfaces
else:
#Search using adapters
for eachRegistration in searchInterfaces:
if IRegistrySearch(eachRegistration).searchRegistration(
searchString):
interfaces.append(eachRegistration)
return interfaces
def getAllInterfaces(self):
""" See zope.introspector.interfaces for documentation.
"""
registrations = {}
for eachRegistration in self.getAllRegistrations():
reg = IRegistrySearch(eachRegistration)
interfacePaths = reg.getInterfaces()
for eachInterface in interfacePaths:
registrations = self._dicter(registrations,
eachInterface.split('.'),
reg.getObject())
return registrations
def _dicter(self, dictionary, modPath, item):
key = modPath[0]
if key in dictionary:
# has key enter that dictionary and continue looking for the end
if len(modPath) == 1:
dictionary[key].append(item)
else:
self._dicter(dictionary[key], modPath[1:], item)
else:
# No key found,
# create a dictionary and add.
dictionary[key] = self._createDict(modPath[1:], item)
return dictionary
def _createDict(self, path, item):
if not path:
return [item]
return {path[0]:self._createDict(path[1:], item)} | zope.introspector | /zope.introspector-0.1.1.tar.gz/zope.introspector-0.1.1/src/zope/introspector/registry.py | registry.py |
zope.introspector
*****************
An introspector for Zope.
:Test-Layer: nonunit
The `zope.introspector` package provides an extensible framework
for retrieving 'data' on 'entities'. It makes use of
grokcore.component for registration of adapters and utilities.
'Entity' in that respect means everything, that is descriptable by a
name in Python or everything, that can be passed to a method. In other
words: if you can pass something to a callable, then the introspector
should be able to give you some information about it.
'Data' in that respect means a container containing a set of data,
describing the given entity. The container might contain primitive
values (like numbers or strings) as well as more complex objects,
callables etc.
In plain words: Given a certain object you get a dataset describing
it.
Support for modification of objects (for instance for debugging
purposes) is still not implemented. This package also does not include
viewing components to display the results.
Inspecting Objects
===================
Because many objects have many different aspects that can be examined,
we provide a set of 'examiners', each one responsible for a certain
aspect.
Currently, the following introspectors are available
* ``ObjectInfo`` and relatives
Gives you information about simple and built-in types like strings,
classes, packages and functions. See `objectinfo.txt` to learn more
about that.
* ``UtilityInfo`` and relatives
Gives you information about the utilities that are available for a
certain objects. See `utilityinfo.txt` to learn more about that.
Code objects
------------
Code objects are such, that provide information about packages,
classes and other pieces of code. We can retrieve informations about
packages::
>>> import grokcore.component as grok
>>> grok.grok('zope.introspector')
>>>
Writing your own introspector
=============================
Writing an introspector means providing a component (the ``Info``
component), that delivers information about certain kinds of objects
and another component (the ``DescriptionProvider`` component), that
decides for an arbitrary object, whether it can be decribed by your
new ``Info`` component.
Step 1: Writing an ``Info`` component
-------------------------------------
An Info component can be a simple object. We define a class, whose
instances should be described afterwards::
>>> class Mammoth(object):
... def __init__(self, name='Fred'):
... self.name=name
An accompanied ``Info`` component now could look like this::
>>> class MammothInfo(object):
... def __init__(self, obj):
... self.obj = obj
... def getName(self):
... return self.obj.name
Apparently this class gives us interesting informations about
mammoths::
>>> fred = Mammoth()
>>> fred.name
'Fred'
The trick now is to make this ``Info`` available in the framework when
a ``Mammoth`` object should be described. This is currently not the
case. We generally look up infos for objects using an utility
providing ``IObjectDescriptionProvider`` interface::
>>> from zope.component import getUtility
>>> from zope.introspector.interfaces import IObjectDescriptionProvider
>>> info_provider = getUtility(IObjectDescriptionProvider)
When we ask this provider for infos about fred, we will get one of the
default ``Info`` components::
>>> info_provider.getDescription(fred)
<zope.introspector.objectinfo.ObjectInfo object at 0x...>
Instead of this ``ObjectInfo`` we want to get our new ``MammothInfo``
returned. To let this happen, we first have to register it by writing
a ``DescriptionProvider``.
Step 2: Writing an ``DescriptionProvider``
------------------------------------------
``DescriptionProviders`` are built by inheriting from
``zope.introspector.DescriptionProvider``. They provide a
``canHandle()`` and a ``getDescription()`` method::
>>> from zope.introspector import DescriptionProvider
>>> class MammothDescriptionProvider(DescriptionProvider):
... def canHandle(self, obj, *args, **kw):
... if isinstance(obj, Mammoth):
... return True
... return False
... def getDescription(self, obj, *args, **kw):
... return MammothInfo(obj)
If we ask this class whether it can handle a ``Mammoth`` instance, it
will agree::
>>> mdp = MammothDescriptionProvider()
>>> mdp.canHandle(fred)
True
For other objects it should fail::
>>> mdp.canHandle(object())
False
We can also get a description::
>>> mdp.getDescription(fred)
<MammothInfo object at 0x...>
This is all very well, but how can the framework know, that we have a
ready-to-use description provider for mammoths? The
``zope.introspector`` package uses grokkers from the ``martian``
package to find description providers on startup. Before grokking a
module with a description provider, the latter will be unknown to the
framework::
>>> info_provider.getDescription(fred)
<zope.introspector.objectinfo.ObjectInfo object at 0x...>
This means, that we have to grok all modules and classes, that contain
description providers::
>>> import grokcore.component as grok
>>> grok.testing.grok('zope.introspector')
>>> grok.testing.grok_component('MammothDescriptionProvider',
... MammothDescriptionProvider)
True
If we now repeat our request to the global info provider, we get the
descriptor we want::
>>> info_provider.getDescription(fred)
<MammothInfo object at 0x...>
We remove the MammothInfo handler to clean up the registry::
>>> import zope.introspector.descriptionprovider as zid
>>> zid.descriptor_registry = [x for x in zid.descriptor_registry
... if not x['handler'] is MammothDescriptionProvider]
| zope.introspector | /zope.introspector-0.1.1.tar.gz/zope.introspector-0.1.1/src/zope/introspector/README.txt | README.txt |
import inspect
import types
import pkg_resources
import grokcore.component as grok
from pkg_resources import DistributionNotFound
from grokcore.component.interfaces import IContext
from martian.scan import module_info_from_dotted_name
from martian.util import isclass
from zope.interface import implements, implementedBy
from zope.introspector.interfaces import IInfo, IDocString
from zope.introspector.util import (resolve, get_package_items,
is_namespace_package, get_attributes,
get_function_signature,
get_interface_for_attribute)
import os
class Code(object):
implements(IContext)
def __init__(self, dotted_name):
self.dotted_name = dotted_name
class PackageOrModule(Code):
def __init__(self, dotted_name):
super(PackageOrModule, self).__init__(dotted_name)
self._module_info = module_info_from_dotted_name(dotted_name)
self._module = self._module_info.getModule()
def getModuleInfo(self):
return self._module_info
class Package(PackageOrModule):
def getPath(self):
return os.path.dirname(self._module_info.path)
def __getitem__(self, name):
sub_module = None
try:
sub_module = module_info_from_dotted_name(
self.dotted_name + '.' + name)
except ImportError:
# No module of that name found. The name might denote
# something different like a file or be really trash.
pass
if sub_module is None:
file = File(self.dotted_name, name)
# if the file exists, use it, otherwise it's a KeyError - no
# file is here
if file.exists():
return file
else:
raise KeyError
if sub_module.isPackage():
return Package(sub_module.dotted_name)
return Module(sub_module.dotted_name)
class PackageInfo(grok.Adapter):
grok.context(Package)
grok.provides(IInfo)
grok.name('package')
def isNamespacePackage(self):
return is_namespace_package(self.context.dotted_name)
def getDottedName(self):
return self.context.dotted_name
def getPath(self):
return self.context.getPath()
def getPackageFiles(self):
result = [x for x in get_package_items(self.context.dotted_name)
if '.' in x and x.rsplit('.', 1)[-1] in ['txt', 'rst']]
return sorted(result)
def getZCMLFiles(self):
result = [x for x in get_package_items(self.context.dotted_name)
if '.' in x and x.rsplit('.', 1)[-1] in ['zcml']]
return sorted(result)
def _filterSubItems(self, filter=lambda x: True):
for name in get_package_items(self.context.dotted_name):
try:
info = module_info_from_dotted_name(
self.context.dotted_name + '.' + name)
if filter and filter(info):
yield info
except ImportError:
pass
except AttributeError:
# This is thrown sometimes by martian.scan if an
# object lacks a __file__ attribute and needs further
# investigation.
pass
def getSubPackages(self):
return sorted(self._filterSubItems(lambda x: x.isPackage()))
def getModules(self):
return sorted(self._filterSubItems(lambda x: not x.isPackage()))
def getEggInfo(self):
try:
info = pkg_resources.get_distribution(self.context.dotted_name)
except DistributionNotFound:
return None
version = info.has_version and info.version or None
return dict(
name=info.project_name,
version=version,
py_version=info.py_version,
location=info.location)
class Module(PackageOrModule):
def getPath(self):
return self._module_info.path
def __getitem__(self, name):
module = self._module_info.getModule()
obj = getattr(module, name, None)
if obj is None:
raise KeyError
sub_dotted_name = self.dotted_name + '.' + name
if isclass(obj):
return Class(sub_dotted_name)
elif type(obj) is types.FunctionType:
return Function(sub_dotted_name)
else:
return Instance(sub_dotted_name)
class ModuleInfo(grok.Adapter):
grok.context(Module)
grok.provides(IInfo)
grok.name('module')
def getDottedName(self):
return self.context.dotted_name
def getPath(self):
return self.context.getPath()
def _standardFilter(self, item):
"""Filter out, what we don't consider a module item.
"""
if getattr(item, '__module__', None) != self.context._module.__name__:
return False
return hasattr(item, '__name__')
def getMembers(self, filter_func=lambda x:True):
members = inspect.getmembers(
self.context._module,
predicate=lambda x: filter_func(x) and self._standardFilter(x))
return [self.context[x[0]] for x in members]
def getClasses(self):
return self.getMembers(filter_func=isclass)
def getFunctions(self):
filter_func = lambda x: inspect.isfunction(x) or inspect.ismethod(x)
return self.getMembers(filter_func=filter_func)
class File(Code):
def __init__(self, dotted_name, name):
super(File, self).__init__(dotted_name)
self.name = name
module_info = module_info_from_dotted_name(self.dotted_name)
self.path = module_info.getResourcePath(self.name)
def exists(self):
"""Check whether the file is a file we want to consider."""
return (os.path.isfile(self.path) and
os.path.splitext(self.path)[1].lower() in [
'.rst', '.txt', '.zcml'])
class FileInfo(grok.Adapter):
grok.context(File)
grok.provides(IInfo)
grok.name('file')
def getDottedName(self):
return self.context.dotted_name
def getName(self):
return self.context.name
def getPath(self):
return self.context.path
class Class(Code):
def __init__(self, dotted_name):
super(Class, self).__init__(dotted_name)
self._klass = resolve(dotted_name)
# Setup interfaces that are implemented by this class.
self._interfaces = tuple(implementedBy(self._klass))
all_ifaces = {}
self._all_ifaces = tuple(implementedBy(self._klass).flattened())
class ClassInfo(grok.Adapter):
grok.context(Class)
grok.provides(IInfo)
grok.name('class')
def _iterAllAttributes(self):
for name in get_attributes(self.context._klass):
iface = get_interface_for_attribute(
name, self.context._all_ifaces, as_path=False)
yield name, getattr(self.context._klass, name), iface
def getBases(self):
return (Class('%s.%s' % (x.__module__, x.__name__))
for x in self.context._klass.__bases__)
def getInterfaces(self):
return self.context._interfaces
def getAttributes(self):
return [(name, obj, iface)
for name, obj, iface in self._iterAllAttributes()
if not (inspect.ismethod(obj)
or inspect.ismethoddescriptor(obj))]
def getMethods(self):
return [(name, obj, iface)
for name, obj, iface in self._iterAllAttributes()
if inspect.ismethod(obj)]
def getMethodDescriptors(self):
return [(name, obj, iface)
for name, obj, iface in self._iterAllAttributes()
if inspect.ismethoddescriptor(obj)]
class Function(Code):
def __init__(self, dotted_name):
super(Function, self).__init__(dotted_name)
self.func = resolve(self.dotted_name)
def getSignature(self):
return get_function_signature(self.func)
class FunctionInfo(grok.Adapter):
grok.context(Function)
grok.provides(IInfo)
grok.name('function')
def getSignature(self):
return self.context.getSignature()
class Instance(Code):
pass
class DocString(grok.Adapter):
grok.context(Code)
grok.provides(IDocString)
def getDocString(self, heading_only=True):
try:
obj = resolve(self.context.dotted_name)
except ImportError:
return u''
except AttributeError:
return u''
docstring = getattr(obj, '__doc__', None)
if docstring is None:
return u''
lines = docstring.strip().split('\n')
if len(lines) and heading_only:
# Find first empty line to separate heading from trailing text.
headlines = []
for line in lines:
if line.strip() == "":
break
headlines.append(line)
lines = headlines
# Get rid of possible CVS id.
lines = [line for line in lines if not line.startswith('$Id')]
return '\n'.join(lines) | zope.introspector | /zope.introspector-0.1.1.tar.gz/zope.introspector-0.1.1/src/zope/introspector/code.py | code.py |
"""Representations of simple objects.
"""
import os
import inspect
import types
import grokcore.component as grok
from zope.interface import Interface
from zope.introspector.interfaces import (IObjectInfo, IModuleInfo,
IPackageInfo, ITypeInfo)
class ObjectInfo(grok.Adapter):
grok.implements(IObjectInfo)
grok.context(Interface)
grok.name('object')
dotted_name = None
def __init__(self, obj):
self.obj = obj
def getType(self):
return type(self.obj)
def isModule(self):
return inspect.ismodule(self.obj)
def isClass(self):
return inspect.isclass(self.obj)
def getDottedName(self):
if self.isClass():
class_ = self.obj
else:
class_ = self.obj.__class__
return class_.__module__ + '.' + class_.__name__
def getFile(self):
try:
return inspect.getsourcefile(self.obj.__class__)
except TypeError:
try:
return inspect.getsourcefile(self.getType())
except TypeError:
# This is probably a built-in or dynamically created type
return 'builtin'
def getAttributes(self):
attributes = []
for id, value in inspect.getmembers(self.obj.__class__):
if inspect.ismethod(value):
continue
attributes.append({'id': id,
'value': value,
})
return attributes
def getMethods(self):
methods = []
for id, value in inspect.getmembers(self.obj.__class__):
if inspect.ismethod(value):
try:
methods.append({'id': id,
'args':inspect.getargspec(value),
'comment': inspect.getcomments(value),
'doc': inspect.getdoc(value),
})
except:
pass
return methods
class ModuleInfo(ObjectInfo):
grok.implements(IModuleInfo)
grok.provides(IObjectInfo)
grok.context(types.ModuleType)
grok.name('module')
def getDottedName(self):
return self.obj.__name__
class PackageInfo(ModuleInfo):
grok.implements(IPackageInfo)
grok.provides(IPackageInfo)
grok.name('package')
def getPackageFiles(self, filter=None):
pkg_file_path = os.path.dirname(self.obj.__file__)
return sorted([x for x in os.listdir(pkg_file_path)
if os.path.isfile(os.path.join(pkg_file_path, x))
and (x.endswith('.txt') or x.endswith('.rst'))])
class TypeInfo(ObjectInfo):
grok.implements(ITypeInfo)
grok.provides(IObjectInfo)
grok.context(types.TypeType)
grok.name('type') | zope.introspector | /zope.introspector-0.1.1.tar.gz/zope.introspector-0.1.1/src/zope/introspector/objectinfo.py | objectinfo.py |
zope.introspectorui
*******************
What is zope.introspectorui?
============================
`zope.introspectorui` is a set of views for the information objects provided
by zope.introspector.
Installing zope.introspectorui
==============================
`zope.introspectorui` is provided as an Python egg on cheeseshop and set
up via `zc.buildout`_
.. _zc.buildout: http://cheeseshop.python.org/pypi/zc.buildout
You may have setuptools already installed for your system Python. In
that case, you may need to upgrade it first because buildout requires
a very recent version::
$ sudo easy_install -U setuptools
If this command fails because easy_install is not available, there is
a good chance you do not have setuptools available for your system
Python. If so, there is no problem because setuptools will be
installed locally by buildout.
Because `zope.introspectorui` is a developer tool, you normally use it
by including the package the `setup.py` file of your own
package. There will most probably a section called `install_requires`
where you add 'zope.introspector' like this::
...
install_requires=['setuptools',
# Add extra requirements here
'zope.introspectorui',
...
],
In `zc.buildout` based package setups you can 'activate' usage of
`zope.introspectorui` afterwards simply by (re)running `bin/buildout`.
| zope.introspectorui | /zope.introspectorui-0.2.tar.gz/zope.introspectorui-0.2/README.txt | README.txt |
zope.introspectorui
*******************
UI components for the zope.introspector
:Test-Layer: functional
The ``zope.introspectorui`` provides UI components, mainly views, to
display informations digged by ``zope.introspector``.
Instances
=========
We create a class and an instance of that class, that we can lookup
afterwards::
>>> from zope import interface
>>> class Test(object):
... interface.implements(interface.Interface)
>>> test_instance = Test()
We look up the object info for the test object. The object info is
provided by ``zope.introspector``::
>>> from zope import component
>>> from zope.introspector.interfaces import IObjectInfo
>>> object_info = component.getAdapter(test_instance, IObjectInfo,
... name='object')
Now we want to get some view for the object info obtained. This is the
job of ``zope.introspectorui``. We get a view for the instance defined
above::
>>> from zope.publisher.browser import TestRequest
>>> request = TestRequest()
>>> view = component.getMultiAdapter((object_info, request),
... name='index.html')
We can render the view::
>>> print view()
<table>...
...Type:...Test...
...Class:...__builtin__.Test...
...File:...builtin...
Packages
========
Packages also have information objects, so adapt this package, and
render that view:
>>> import zope.introspectorui
>>> from zope.introspector.interfaces import IPackageInfo
>>> package_info = component.getAdapter(zope.introspectorui, IPackageInfo,
... name='package')
>>> view = component.getMultiAdapter((package_info, request),
... name='index.html')
>>> print view()
<h1>...Package: <span>zope.introspectorui</span>...
| zope.introspectorui | /zope.introspectorui-0.2.tar.gz/zope.introspectorui-0.2/src/zope/introspectorui/README.txt | README.txt |
"""Views for code-related infos.
"""
import grokcore.view as grok
try:
from zope.location.location import located
except ImportError:
# Zope 2.10 compatibility:
from zope.location.interfaces import ILocation
from zope.location.location import LocationProxy, locate
def located(object, parent, name=None):
"""Locate an object in another and return it.
If the object does not provide ILocation a LocationProxy is returned.
"""
if ILocation.providedBy(object):
if parent is not object.__parent__ or name != object.__name__:
locate(object, parent, name)
return object
return LocationProxy(object, parent, name)
from zope.introspector.code import (PackageInfo, FileInfo, ModuleInfo,
ClassInfo, Function)
from zope.introspector.interfaces import IDocString
from zope.introspector.util import get_function_signature
from zope.introspectorui.interfaces import IBreadcrumbProvider, ICodeView
class Module(grok.View):
grok.implements(ICodeView)
grok.context(ModuleInfo)
grok.name('index')
def update(self):
self.docstring = self.getDocString(heading_only=False)
self.classes = self.getClassURLs()
self.functions = self.getFunctions()
def getDocString(self, item=None, heading_only=True):
if item is None:
item = self.context.context
return IDocString(item).getDocString(heading_only=heading_only)
def getItemURLs(self, items):
module = self.context.context
result = []
for item in items:
name = item.dotted_name.split('.')[-1]
obj = located(module[name], module, name)
result.append(dict(name=name, url=self.url(obj),
doc=self.getDocString(obj)))
return sorted(result, key=lambda x: x['name'])
def getClassURLs(self):
classes = self.context.getClasses()
return self.getItemURLs(classes)
def getFunctionURLs(self):
functions = self.context.getFunctions()
return self.getItemURLs(functions)
def getFunctions(self):
functions = self.context.getFunctions()
result = []
for func in functions:
name = func.dotted_name.split('.')[-1]
signature = func.getSignature()
result.append(dict(name=name,
signature=signature,
fullname=name+signature,
doc=self.getDocString(func,
heading_only=False)))
return sorted(result, key=lambda x: x['fullname'])
def getBreadcrumbs(self):
return IBreadcrumbProvider(self).getBreadcrumbs()
class Package(grok.View):
grok.implements(ICodeView)
grok.context(PackageInfo)
grok.name('index')
def update(self):
self.docstring = self.getDocString(heading_only=False)
self.files = self.getTextFileUrls()
self.zcmlfiles = self.getZCMLFileUrls()
self.subpkgs = self.getSubPackageUrls()
self.modules = self.getModuleUrls()
def getDocString(self, item=None, heading_only=True):
if item is None:
item = self.context.context
return IDocString(item).getDocString(heading_only=heading_only)
def _getFileUrls(self, filenames):
result = []
package = self.context.context
for name in filenames:
file = located(package[name], package, name)
result.append(dict(name=name, url=self.url(file)))
return sorted(result, key=lambda x: x['name'])
def getTextFileUrls(self):
filenames = self.context.getPackageFiles()
return self._getFileUrls(filenames)
def getZCMLFileUrls(self):
filenames = self.context.getZCMLFiles()
return self._getFileUrls(filenames)
def _getItemUrls(self, mod_infos):
result = []
package = self.context.context
for info in mod_infos:
mod = located(package[info.name], package, info.name)
result.append(dict(name=info.name, url=self.url(mod),
doc=self.getDocString(item=mod)))
return sorted(result, key=lambda x: x['name'])
def getSubPackageUrls(self):
mod_infos = self.context.getSubPackages()
return self._getItemUrls(mod_infos)
def getModuleUrls(self):
mod_infos = self.context.getModules()
return self._getItemUrls(mod_infos)
def getBreadcrumbs(self):
return IBreadcrumbProvider(self).getBreadcrumbs()
class Class(grok.View):
grok.implements(ICodeView)
grok.context(ClassInfo)
grok.name('index')
def update(self):
self.docstring = self.getDocString(heading_only=False)
self.bases = self.getBases()
self.attributes = self.getAttributes()
self.methods = self.getMethods()
def getDocString(self, item=None, heading_only=True):
if item is None:
item = self.context.context
return IDocString(item).getDocString(heading_only=heading_only)
def _locate(self, obj):
from zope.introspector.code import Package
root = self.context.context
while not isinstance(root, Package) or isinstance(
root.__parent__, Package):
root = root.__parent__
top_pkg_name = obj.dotted_name.split('.')[0]
result = located(Package(top_pkg_name),
root.__parent__,
top_pkg_name)
for part in obj.dotted_name.split('.')[1:]:
result = located(result[part], result, part)
return result
def getBases(self):
bases = list(self.context.getBases())
result = []
for base in bases:
url = None
try:
url = self.url(self._locate(base))
except AttributeError:
# martian.scan cannot handle builtins
continue
result.append(dict(name=base.dotted_name,
url=url,
doc=self.getDocString(item=base)))
return result
return (dict(name=x.dotted_name,
url=self.url(self._locate(x)),
doc=self.getDocString(item=x))
for x in bases)
def getAttributes(self):
return sorted([x[0] for x in self.context.getAttributes()])
def getMethods(self):
result = []
for name, obj, iface in self.context.getMethods():
dotted_name = self.context.context.dotted_name + '.' + name
item = Function(dotted_name)
signature = get_function_signature(obj)
if signature == '()':
signature = '(self)'
else:
signature = '(self, ' + signature[1:]
result.append(dict(name=name + signature,
doc=self.getDocString(item=item)))
return sorted(result, key=lambda x: x['name'])
def getBreadcrumbs(self):
return IBreadcrumbProvider(self).getBreadcrumbs()
class File(grok.View):
grok.implements(ICodeView)
grok.context(FileInfo)
grok.name('index')
def getBreadcrumbs(self):
return IBreadcrumbProvider(self).getBreadcrumbs()
def getRaw(self):
return open(self.context.getPath(), 'r').read() | zope.introspectorui | /zope.introspectorui-0.2.tar.gz/zope.introspectorui-0.2/src/zope/introspectorui/code.py | code.py |
=========
Changes
=========
6.0 (2023-04-25)
================
- Drop support for Python 2.7, 3.5, 3.6.
- Add support for Python 3.11.
- Make the tests compatible with ``zope.testing >= 5``.
5.0.0 (2022-03-25)
==================
- Remove ``__cmp__`` methods. Since the implementation of the rich
comparison methods (``__eq__``, etc) in 4.0a1, the interpreter won't
call ``__cmp__``, even on Python 2. See `issue 10
<https://github.com/zopefoundation/zope.keyreference/issues/10>`_.
- Add support for Python 3.8, 3.9, and 3.10.
- Drop support for Python 3.4.
4.2.0 (2018-10-26)
==================
- Add support for Python 3.5, 3.6, and 3.7.
- Drop support for Python 2.6 and 3.3.
4.1.0 (2014-12-27)
==================
- Add support for PyPy (PyPy3 blocked on PyPy3-compatible ``zodbpickle``).
- Add support for Python 3.4.
4.0.0 (2014-12-20)
==================
- Add support for testing on Travis.
4.0.0a2 (2013-02-25)
====================
- Ensure that the ``SimpleKeyReference`` implementation (used for testing)
also implements rich comparison properly.
4.0.0a1 (2013-02-22)
====================
- Add support for Python 3.3.
- Replace deprecated ``zope.component.adapts`` usage with equivalent
``zope.component.adapter`` decorator.
- Replace deprecated ``zope.interface.implements`` usage with equivalent
``zope.interface.implementer`` decorator.
- Drop support for Python 2.4 and 2.5.
3.6.4 (2011-11-30)
==================
- Fix tests broken by removal of ``zope.testing`` from test dependencies:
avoid the ``ZODB3`` module that needs it.
3.6.3 (2011-11-29)
==================
- Prefer the standard libraries doctest module to the one from ``zope.testing``.
3.6.2 (2009-09-15)
==================
- Make the tests pass with ZODB3.9, which changed the repr() of the persistent
classes.
3.6.1 (2009-02-01)
==================
- Load keyreferences, pickled by old zope.app.keyreference even
if its not installed anymore (so don't break if one updates a
project that don't directly depends on zope.app.keyreference).
3.6.0 (2009-01-31)
==================
- Rename ``zope.app.keyreference`` to ``zope.keyreference``.
| zope.keyreference | /zope.keyreference-6.0.tar.gz/zope.keyreference-6.0/CHANGES.rst | CHANGES.rst |
===================
zope.keyreference
===================
.. image:: https://img.shields.io/pypi/v/zope.keyreference.svg
:target: https://pypi.org/project/zope.keyreference/
:alt: Latest Version
.. image:: https://img.shields.io/pypi/pyversions/zope.keyreference.svg
:target: https://pypi.org/project/zope.keyreference/
:alt: Supported Python versions
.. image:: https://github.com/zopefoundation/zope.keyreference/actions/workflows/tests.yml/badge.svg
:target: https://github.com/zopefoundation/zope.keyreference/actions/workflows/tests.yml
:alt: Build Status
.. image:: https://coveralls.io/repos/github/zopefoundation/zope.keyreference/badge.svg
:target: https://coveralls.io/github/zopefoundation/zope.keyreference
:alt: Code Coverage
.. image:: https://readthedocs.org/projects/zopekeyreference/badge/?version=latest
:target: https://zopekeyreference.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
Object references that support stable comparison and hashes.
Documentation can be found at https://zopekeyreference.readthedocs.io
| zope.keyreference | /zope.keyreference-6.0.tar.gz/zope.keyreference-6.0/README.rst | README.rst |
import zope.interface
from ZODB.ConflictResolution import PersistentReference
from ZODB.interfaces import IConnection
import zope.keyreference.interfaces
@zope.interface.implementer(zope.keyreference.interfaces.IKeyReference)
class KeyReferenceToPersistent:
"""An IKeyReference for persistent objects which is comparable.
These references compare by database name and _p_oids of the objects they
reference.
"""
key_type_id = 'zope.app.keyreference.persistent'
def __init__(self, object):
if not getattr(object, '_p_oid', None):
connection = IConnection(object, None)
if connection is None:
raise zope.keyreference.interfaces.NotYet(object)
connection.add(object)
self.object = object
def __call__(self):
return self.object
def __hash__(self):
if isinstance(self.object, PersistentReference):
# we are doing conflict resolution.
database_name = self.object.database_name
if database_name is None:
# we can't hash
raise ValueError('database name unavailable at this time')
oid = self.object.oid
else:
database_name = self.object._p_jar.db().database_name
oid = self.object._p_oid
return hash((database_name, oid))
def _get_cmp_keys(self, other):
if self.key_type_id == other.key_type_id:
# While it makes subclassing this class inconvenient,
# comparing the object's type is faster than doing an
# isinstance check. The intent of using type instead
# of isinstance is to avoid loading state just to
# determine if we're in conflict resolution.
if isinstance(self.object, PersistentReference):
# We are doing conflict resolution.
assert isinstance(other.object, PersistentReference), (
'other object claims to be '
'zope.app.keyreference.persistent but, during conflict '
'resolution, object is not a PersistentReference')
self_name = self.object.database_name
other_name = other.object.database_name
if (self_name is None) ^ (other_name is None):
# one of the two database_names are None during conflict
# resolution. At this time the database_name is
# inaccessible, not unset (it is the same database as the
# object being resolved). If they were both None, we
# would know they are from the same database, so we can
# compare the oids. If neither were None, we would be
# able to reliably compare. However, in this case,
# one is None and the other is not, so we can't know how
# they would sort outside of conflict resolution. Give
# up.
raise ValueError('cannot sort reliably')
self_oid = self.object.oid
other_oid = other.object.oid
else:
self_name = self.object._p_jar.db().database_name
self_oid = self.object._p_oid
other_name = other.object._p_jar.db().database_name
other_oid = other.object._p_oid
return (self_name, self_oid), (other_name, other_oid)
return self.key_type_id, other.key_type_id
def __eq__(self, other):
a, b = self._get_cmp_keys(other)
return a == b
def __lt__(self, other):
a, b = self._get_cmp_keys(other)
return a < b
def __ne__(self, other):
a, b = self._get_cmp_keys(other)
return a != b
def __gt__(self, other):
a, b = self._get_cmp_keys(other)
return a > b
def __le__(self, other):
a, b = self._get_cmp_keys(other)
return a <= b
def __ge__(self, other):
a, b = self._get_cmp_keys(other)
return a >= b
@zope.interface.implementer(IConnection)
def connectionOfPersistent(ob):
"""An adapter which gets a ZODB connection of a persistent object.
We are assuming the object has a parent if it has been created in
this transaction.
Raises ValueError if it is impossible to get a connection.
"""
cur = ob
while not getattr(cur, '_p_jar', None):
cur = getattr(cur, '__parent__', None)
if cur is None:
return None
return cur._p_jar
# BBB: If zope.app.keyreference is not installed, we still want
# old key references to be available. So fake a module to make
# them unpickleable.
try:
import zope.app.keyreference
except ImportError:
import sys
from types import ModuleType as module
z_a_k = module('zope.app.keyreference')
sys.modules['zope.app.keyreference'] = z_a_k
z_a_k_p = module('zope.app.keyreference.persistent')
z_a_k_p.KeyReferenceToPersistent = KeyReferenceToPersistent
sys.modules['zope.app.keyreference.persistent'] = z_a_k_p | zope.keyreference | /zope.keyreference-6.0.tar.gz/zope.keyreference-6.0/src/zope/keyreference/persistent.py | persistent.py |
============================
What is zope.keyreference?
============================
Object references that support stable comparison and hashes.
.. toctree::
:maxdepth: 1
persistent
reference
.. toctree::
:maxdepth: 2
changelog
Development
===========
zope.keyreference is hosted at GitHub:
https://github.com/zopefoundation/zope.keyreference/
Project URLs
============
* https://pypi.org/project/zope.keyreference/ (PyPI entry and downloads)
====================
Indices and tables
====================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
| zope.keyreference | /zope.keyreference-6.0.tar.gz/zope.keyreference-6.0/docs/index.rst | index.rst |
===============================
Zope 3 Controlled Package Index
===============================
This package has been developed to support the maintenance of a stable set of
Zope project distributions. It manages the controlled packages configuration
file and supports the generation of buildout configuration files that can be
used by developers.
Another use of this package is to use it for testing new distributions against
the index. Here is the workflow for testing a new package against stable set:
1. Install the correct version of this package.
(a) Download the version of this package that manages the stable set that
you are interested in. Currently only the trunk exists, which manages
the Zope 3.4 release::
$ svn co svn://svn.zope.org/repos/main/zope.release/trunk zope3.4
$ cd zope3.4
(b) Bootstrap the checkout::
$ python ./bootstrap.py
(c) Run buildout to create the scripts::
$ ./bin/buildout
(d) Run the ``buildout.cfg`` generation script to build a configuration
file that can be used for testing:
$ ./bin/generate-buildout
2. From the generated configuration file, you can now build a testing
environment.
(a) Enter the test directory and create a buildout:
$ cd test
$ python ../bootstrap.py
$ ./bin/buildout
(b) Run all the tests to verify that all tests are initially passing:
$ ./bin/test -vpc1
3. Modify the ``buildout.cfg`` to look for your the new distribution to be
tested:
(a) Comment out the "index" option. This needs to be done, so that the new
package is going to be picked up.
(b) Change the version number of the package of interest in the "versions"
section.
Alternative:
(a) Check out the new distribution from SVN.
(b) Add a "develop path/to/my/package" line in the "buildout" section of
``buildout.cfg``.
4. Run the tests, making sure that they all pass.
5. Modify ``controlled-packages.cfg`` by adding the new version of the package
to the package's version list.
6. Generate all files again and upload them:
$ cd ..
$ ./bin/generate-buildout
$ ./bin/generate-versions
$ ./bin/upload
Once the files are uploaded, a crontab-job, running every minute, will
detect the changes in ``controlled-pages.cfg`` and will generate the new
controlled package pages.
Note: I think the process is still a tiny bit too long. I probably write a
script that makes testing a new version of a package easier, but let's see
whether this process is workable first.
| zope.kgs | /zope.kgs-1.2.0.tar.gz/zope.kgs-1.2.0/README.txt | README.txt |
"""KGS configuration file parser."""
import datetime
import dateutil.parser
import os.path
import urllib2
import ConfigParser
from zc.buildout.buildout import _update, _isurl
MAIN_SECTION = 'KGS'
EXTENDS_OPTION = 'extends'
def _open(base, filename, seen):
"""Open a configuration file and return the result as a dictionary,
Recursively open other files based on options found.
Note: Shamelessly copied from zc.buildout!
"""
if _isurl(filename):
fp = urllib2.urlopen(filename)
base = filename[:filename.rfind('/')]
elif _isurl(base):
if os.path.isabs(filename):
fp = open(filename)
base = os.path.dirname(filename)
else:
filename = base + '/' + filename
fp = urllib2.urlopen(filename)
base = filename[:filename.rfind('/')]
else:
filename = os.path.join(base, filename)
fp = open(filename)
base = os.path.dirname(filename)
if filename in seen:
raise ValueError("Recursive file include", seen, filename)
seen.append(filename)
result = {}
parser = ConfigParser.RawConfigParser()
parser.optionxform = lambda s: s
parser.readfp(fp)
extends = None
for section in parser.sections():
options = dict(parser.items(section))
if section == MAIN_SECTION:
extends = options.pop(EXTENDS_OPTION, extends)
result[section] = options
if extends:
extends = extends.split()
extends.reverse()
for fname in extends:
result = _update(_open(base, fname, seen), result)
seen.pop()
return result
def _getAbsolutePath(section, basePath, name, default):
path = section.get(name, default)
if path:
if not os.path.isabs(path):
path = os.path.join(basePath, path)
if path and not os.path.exists(path):
path = None
return path
class Package(object):
def __init__(self, name, versions, tested, testExtras):
self.name = name
self.versions = versions
self.tested = tested
self.testExtras = testExtras
def __repr__(self):
return '<%s %r>' %(self.__class__.__name__, self.name)
class KGS(object):
name = u'noname'
version = u'unknown'
date = None
changelog = None
announcement = None
files = ()
packages = ()
def __init__(self, path):
self.path = path
self._extract()
def _extract(self):
basePath = os.path.dirname(self.path)
result = _open(basePath, self.path, [])
if MAIN_SECTION in result:
section = result[MAIN_SECTION]
# Get name and version.
self.name = section.get('name', self.name)
self.version = section.get('version', self.version)
# Get the changelog.
self.changelog = _getAbsolutePath(
section, basePath, 'changelog', self.changelog)
# Get the announcement.
self.announcement = _getAbsolutePath(
section, basePath, 'announcement', self.announcement)
# Get the date.
dateStr = section.get('date')
if dateStr:
self.date = dateutil.parser.parse(dateStr).date()
# Get the release files.
files = section.get('files')
if files:
files = files.split()
for path in files:
if not os.path.isabs(path):
path = os.path.join(basePath, path)
if path and os.path.exists(path):
self.files += (path,)
del result[MAIN_SECTION]
self.packages = []
sections = result.keys()
sections.sort()
for section in sections:
self.packages.append(
Package(section,
result[section]['versions'].split(),
ConfigParser.ConfigParser._boolean_states[
result[section]['tested']],
result[section].get('test-extras')
)
)
def __repr__(self):
return '<%s %r>' %(self.__class__.__name__, self.name) | zope.kgs | /zope.kgs-1.2.0.tar.gz/zope.kgs-1.2.0/src/zope/kgs/kgs.py | kgs.py |
===============
Known Good Sets
===============
This package provides a set of scripts and tools to manage Good-Known-Sets, or
short KGSs. A KGS is a set of package distributions that are known to work
well together. You can verify this, for example, by running all the tests of
all the packages at once.
Let me show you how a typical controlled packages configuration file looks
like:
>>> import tempfile
>>> cfgFile = tempfile.mktemp('-cp.cfg')
>>> open(cfgFile, 'w').write('''\
... [DEFAULT]
... tested = true
...
... [KGS]
... name = zope-dev
... version = 1.2.0
... date = 2009-01-01
... changelog = CHANGES.txt
... announcement = ANNOUNCEMENT.txt
... files =
... zope-dev-1.2.0.tgz
... zope-dev-1.2.0.zip
... zope-dev-1.2.0.exe
...
... [packageA]
... versions = 1.0.0
... 1.0.1
...
... [packageB]
... versions = 1.2.3
... test-extras = test
...
... [packageC]
... # Do not test this package.
... tested = false
... versions = 4.3.1
... ''')
As you can see, this file uses an INI-style format. The "DEFAULT" section is
special, as it will insert the specified options into all other sections as
default. The "KGS" section specifies some global information about the KGS,
such as the name of the KGS. Since this section references several external
files, we should quickly create those.
>>> import os
>>> dir = os.path.dirname(cfgFile)
>>> open(os.path.join(dir, 'CHANGES.txt'), 'w').write('''\
... =======
... Changes
... =======
...
... packageA
... ========
...
... Version 1.0.0
... -------------
...
... * Initial Release
... ''')
>>> open(os.path.join(dir, 'ANNOUNCEMENT.txt'), 'w').write('''\
... =======================
... zope-dev 1.2.0 Released
... =======================
...
... The announcement text!
... ''')
>>> open(os.path.join(dir, 'zope-dev-1.2.0.tgz'), 'w').write('tgz')
>>> open(os.path.join(dir, 'zope-dev-1.2.0.exe'), 'w').write('exe')
All other sections refer to package names. Currently each package section
supports two options. The "versions" option lists all versions that are known
to work in the KGS. Those versions should *always* only be bug fixes to the
first listed version. The second option, "tested", specifies whether the
package should be part of the KGS test suite. By default, we want all packages
to be tested, but some packages require very specific test setups that cannot
be easily reproduced _[1], so we turn off those tests.
You can also stack controlled package configurations on top of each
other. Base configurations can be specified using the `extends` option:
>>> import tempfile
>>> cfgFile2 = tempfile.mktemp('-cp.cfg')
>>> open(cfgFile2, 'w').write('''\
... [DEFAULT]
... tested = true
...
... [KGS]
... name = grok-dev
... version = 0.1.0
... extends = %s
...
... [packageA]
... versions = 1.0.2
...
... [packageD]
... versions = 2.2.3
... 2.2.4
... ''' %cfgFile)
As you can see, you can completely override another package's version
specification as well.
Generating the configuration file and managing it is actually the hard
part. Let's now see what we can do with it.
.. [1]: This is usually due to bugs in setuptools or buildout, such as PYC
files not containing the correct reference to their PY file.
Generate Versions
-----------------
One of the easiest scripts, is the version generation. This script will
generate a "versions" section that is compatible with buildout.
>>> versionsFile = tempfile.mktemp('-versions.cfg')
>>> from zope.kgs import version
>>> version.main((cfgFile, versionsFile))
>>> print open(versionsFile, 'r').read()
[versions]
packageA = 1.0.1
packageB = 1.2.3
packageC = 4.3.1
Let's now ensure that the versions also work for the extended configuration:
>>> versionsFile2 = tempfile.mktemp('-versions.cfg')
>>> version.main((cfgFile2, versionsFile2))
>>> print open(versionsFile2, 'r').read()
[versions]
packageA = 1.0.2
packageB = 1.2.3
packageC = 4.3.1
packageD = 2.2.4
Generate Buildout
-----------------
In order to be able to test the KGS, you can also generate a full buildout
file that will create and install a testrunner over all packages for you:
>>> buildoutFile = tempfile.mktemp('-buildout.cfg')
>>> from zope.kgs import buildout
>>> buildout.main((cfgFile, buildoutFile))
>>> print open(buildoutFile, 'r').read()
[buildout]
parts = test
versions = versions
<BLANKLINE>
[test]
recipe = zc.recipe.testrunner
eggs = packageA
packageB [test]
<BLANKLINE>
[versions]
packageA = 1.0.1
packageB = 1.2.3
packageC = 4.3.1
<BLANKLINE>
Let's make sure that the buildout generation also honors the extensions:
>>> buildoutFile2 = tempfile.mktemp('-buildout.cfg')
>>> buildout.main((cfgFile2, buildoutFile2))
>>> print open(buildoutFile2, 'r').read()
[buildout]
parts = test
versions = versions
<BLANKLINE>
[test]
recipe = zc.recipe.testrunner
eggs = packageA
packageB [test]
packageD
<BLANKLINE>
[versions]
packageA = 1.0.2
packageB = 1.2.3
packageC = 4.3.1
packageD = 2.2.4
<BLANKLINE>
Flat Links Pages
----------------
We can also create a flat links page that can be used in the
`dependency_links` argument in your `setup.py` file. Since this module
accesses the original PyPI to ask for the download locations and filenames, we
have to create a controlled packages configuration file that contains real
packages with real version numbers:
>>> cfgFileReal = tempfile.mktemp('-cp.cfg')
>>> open(cfgFileReal, 'w').write('''\
... [DEFAULT]
... tested = true
...
... [KGS]
... name = zope-dev
... version = 3.4.0b2
...
... [PIL]
... versions = 1.1.6
...
... [zope.component]
... versions = 3.4.0
...
... [zope.interface]
... versions = 3.4.0
... 3.4.1
...
... [z3c.formdemo]
... versions = 1.1.0
... ''')
Let's now create the links page:
>>> linksFile = tempfile.mktemp('-links.html')
>>> from zope.kgs import link
>>> link.main((cfgFileReal, linksFile))
>>> print open(linksFile, 'r').read()
<html>
<head>
<title>Links for the "zope-dev" KGS (version 3.4.0b2)</title>
</head>
<body>
<h1>Links for the "zope-dev" KGS (version 3.4.0b2)</h1>
<a href="http://pypi.python.org/packages/2.4/z/z3c.formdemo/z3c.formdemo-1.1.0-py2.4.egg#md5=9d605bd559ea33ac57ce11f5c80fa3d3">z3c.formdemo-1.1.0-py2.4.egg</a><br/>
<a href="http://pypi.python.org/packages/source/z/z3c.formdemo/z3c.formdemo-1.1.0.tar.gz#md5=f224a49cea737112284f74b859e3eed0">z3c.formdemo-1.1.0.tar.gz</a><br/>
<a href="http://pypi.python.org/packages/2.4/z/zope.component/zope.component-3.4.0-py2.4.egg#md5=c0763e94912e4a8ac1e321a068c916ba">zope.component-3.4.0-py2.4.egg</a><br/>
<a href="http://pypi.python.org/packages/source/z/zope.component/zope.component-3.4.0.tar.gz#md5=94afb57dfe605d7235ff562d1eaa3bed">zope.component-3.4.0.tar.gz</a><br/>
<a href="http://pypi.python.org/packages/source/z/zope.interface/zope.interface-3.4.0.tar.gz#md5=0be9fd80b7bb6bee520e56eba7d29c90">zope.interface-3.4.0.tar.gz</a><br/>
<a href="http://pypi.python.org/packages/2.4/z/zope.interface/zope.interface-3.4.0-py2.4-win32.egg#md5=3fa5e992271375eac597622d8e2fd5ec">zope.interface-3.4.0-py2.4-win32.egg</a><br/>
<a href="http://pypi.python.org/packages/source/z/zope.interface/zope.interface-3.4.1.tar.gz#md5=b085f4a774adab688e037ad32fbbf08e">zope.interface-3.4.1.tar.gz</a><br/>
</body>
</html>
PPIX Support
------------
You can also use the KGS to limit the available packages in a package index
generated ``zc.mirrorcheeseshopslashsimple``. This script also uses PyPI to
look up distribution file, so wave to use the real configuration file again.
Let's create the pages:
>>> indexDir = tempfile.mkdtemp('-ppix')
>>> from zope.kgs import ppix
>>> ppix.main((cfgFileReal, indexDir))
The index contains one directory per package. So let's have a look:
>>> import os
>>> sorted(os.listdir(indexDir))
['PIL', 'z3c.formdemo', 'zope.component', 'zope.interface']
Each directory contains a single "index.html" file with the download links:
>>> pkgDir = os.path.join(indexDir, 'zope.component')
>>> sorted(os.listdir(pkgDir))
['index.html']
>>> pkgIndex = os.path.join(pkgDir, 'index.html')
>>> print open(pkgIndex, 'r').read()
<html>
<head>
<title>Links for "zope.component"</title>
</head>
<body>
<h1>Links for "zope.component"</h1>
<a href="http://pypi.python.org/packages/2.4/z/zope.component/zope.component-3.4.0-py2.4.egg#md5=c0763e94912e4a8ac1e321a068c916ba">zope.component-3.4.0-py2.4.egg</a><br/>
<a href="http://pypi.python.org/packages/source/z/zope.component/zope.component-3.4.0.tar.gz#md5=94afb57dfe605d7235ff562d1eaa3bed">zope.component-3.4.0.tar.gz</a><br/>
</body>
</html>
PIL is an interesting case, because it does not upload its distribution files
yet, at least not for version 1.1.6:
>>> pkgIndex = os.path.join(indexDir, 'PIL', 'index.html')
>>> print open(pkgIndex, 'r').read()
<html><head><title>Links for PIL</title></head><body><h1>Links for PIL</h1><a href='http://www.pythonware.com/products/pil' rel="homepage">1.1.5 home_page</a><br/>
<a href='http://effbot.org/zone/pil-changes-115.htm' rel="download">1.1.5 download_url</a><br/>
<a href='http://www.pythonware.com/products/pil' rel="homepage">1.1.5a2 home_page</a><br/>
<a href='http://effbot.org/zone/pil-changes-115.htm' rel="download">1.1.5a2 download_url</a><br/>
<a href='http://www.pythonware.com/products/pil' rel="homepage">1.1.5a1 home_page</a><br/>
<a href='http://effbot.org/zone/pil-changes-115.htm' rel="download">1.1.5a1 download_url</a><br/>
<a href='http://www.pythonware.com/products/pil/' rel="homepage">1.1.4 home_page</a><br/>
<a href='http://www.pythonware.com/products/pil/' rel="homepage">1.1.3 home_page</a><br/>
<a href='http://www.pythonware.com/downloads/Imaging-1.1.3.tar.gz' rel="download">1.1.3 download_url</a><br/>
<a href='http://www.pythonware.com/products/pil' rel="homepage">1.1.6 home_page</a><br/>
<a href='http://effbot.org/downloads/#Imaging' rel="download">1.1.6 download_url</a><br/>
</body></html>
Optionally, you can also specify the `-i` option to generate an overview:
>>> ppix.main(('-i', cfgFileReal, indexDir))
>>> sorted(os.listdir(indexDir))
['PIL', 'index.html', 'z3c.formdemo', 'zope.component', 'zope.interface']
Let's now look at the file:
>>> indexPage = os.path.join(indexDir, 'index.html')
>>> print open(indexPage, 'r').read()
<html>
<head>
<title>Simple Index for the "zope-dev" KGS (version 3.4.0b2)</title>
</head>
<body>
<h1>Simple Index for the "zope-dev" KGS (version 3.4.0b2)</h1>
<a href="PIL">PIL</a><br/>
<a href="z3c.formdemo">z3c.formdemo</a><br/>
<a href="zope.component">zope.component</a><br/>
<a href="zope.interface">zope.interface</a><br/>
</body>
</html>
Allowing exisitng package pages to be overwritten and making the main index
page an optional feature makes it possible to use this script for two use
cases: (1) Merge the constraints into a PPIX index created by
``zc.mirrorcheeseshopslashsimple``, and (2) create a standalone index which
only provides the packages of the KGS.
Getting the Latest Versions
---------------------------
When updating the KGS, it is often useful to know for which packages have new
releases.
>>> from zope.kgs import latest
>>> latest.main((cfgFileReal,))
z3c.formdemo: 1.1.1, 1.1.2, 1.2.0, 1.3.0, 1.3.0b1, 1.4.0, ...
However, it is often desired only to show new minor versions; in this case, we
can pass an option to exclude all versions that have a different major
version:
>>> latest.main(('-m', cfgFileReal))
z3c.formdemo: 1.1.1, 1.1.2
Sometimes you're only interested in changes that apply to a single package,
and you won't want to wait for the script to query all of the others
>>> latest.main(('-m', cfgFileReal, 'zope.app.server'))
>>> latest.main(('-m', cfgFileReal, 'z3c.formdemo'))
z3c.formdemo: 1.1.1, 1.1.2
Extracting Change Information
-----------------------------
When releasing a version of the KGS, it is desirable to produce a list of
changes since the last release. Changes are commonly compared to an older
version.
>>> cfgFileRealOrig = tempfile.mktemp('-cp.cfg')
>>> open(cfgFileRealOrig, 'w').write('''\
... [DEFAULT]
... tested = true
...
... [KGS]
... name = zope-dev
... version = 3.4.0b1
...
... [PIL]
... versions = 1.1.6
...
... [zope.component]
... versions = 3.4.0
...
... [zope.interface]
... versions = 3.4.0
... ''')
Let's now produce the changes:
>>> from zope.kgs import change
>>> change.main((cfgFileReal, cfgFileRealOrig))
Processing ('PIL', '1.1.6')
Processing ('z3c.formdemo', '1.1.0')
Processing ('zope.component', '3.4.0')
Processing ('zope.interface', '3.4.1')
===
PIL
===
<BLANKLINE>
No changes or information not found.
<BLANKLINE>
============
z3c.formdemo
============
<BLANKLINE>
1.1.0 (unknown)
---------------
<BLANKLINE>
- Feature: New "SQL Message" demo shows how ``z3c.form`` can be used with
non-object data. Specificically, this small application demonstrates using a
Gadfly database using pure SQL calls without any ORM.
<BLANKLINE>
- Feature: New "Address Book" demo that demonstrates more complex use cases,
such as subforms, composite widgets, and mappings/lists
<BLANKLINE>
<BLANKLINE>
==============
zope.component
==============
<BLANKLINE>
3.4.0 (2007-09-29)
------------------
<BLANKLINE>
No further changes since 3.4.0a1.
<BLANKLINE>
<BLANKLINE>
==============
zope.interface
==============
<BLANKLINE>
3.4.1 (unknown)
---------------
<BLANKLINE>
Fixed a setup bug that prevented installation from source on systems
without setuptools.
<BLANKLINE>
3.4.0 (unknown)
---------------
<BLANKLINE>
Final release for 3.4.0.
<BLANKLINE>
<BLANKLINE>
You can also create the changes without an original file, in which case only
the versions listed in the current KGS are considered.
>>> change.main((cfgFileReal,))
Processing ('PIL', '1.1.6')
Processing ('z3c.formdemo', '1.1.0')
Processing ('zope.component', '3.4.0')
Processing ('zope.interface', '3.4.1')
===
PIL
===
<BLANKLINE>
No changes or information not found.
<BLANKLINE>
============
z3c.formdemo
============
<BLANKLINE>
1.1.0 (unknown)
---------------
<BLANKLINE>
- Feature: New "SQL Message" demo shows how ``z3c.form`` can be used with
non-object data. Specificically, this small application demonstrates using a
Gadfly database using pure SQL calls without any ORM.
<BLANKLINE>
- Feature: New "Address Book" demo that demonstrates more complex use cases,
such as subforms, composite widgets, and mappings/lists
<BLANKLINE>
<BLANKLINE>
==============
zope.component
==============
<BLANKLINE>
3.4.0 (2007-09-29)
------------------
<BLANKLINE>
No further changes since 3.4.0a1.
<BLANKLINE>
<BLANKLINE>
==============
zope.interface
==============
<BLANKLINE>
3.4.1 (unknown)
---------------
<BLANKLINE>
Fixed a setup bug that prevented installation from source on systems
without setuptools.
<BLANKLINE>
3.4.0 (unknown)
---------------
<BLANKLINE>
Final release for 3.4.0.
<BLANKLINE>
<BLANKLINE>
The Site Generator
------------------
The easiest way to publish the KGS is via a directory published by a Web
server. Whenever a new `controlled-packages.cfg` file is uploaded, a script is
run that generates all the files. I usually set up a crontab job to do
this. The site generator script acts upon a directory, in which it assumes a
`controlled-packages.cfg` file was placed:
>>> siteDir = tempfile.mkdtemp()
>>> cfgFileSite = os.path.join(siteDir, 'controlled-packages.cfg')
>>> import shutil
>>> shutil.copy(cfgFileReal, cfgFileSite)
>>> from zope.kgs import site
>>> site.main(['-s', siteDir])
Let's have a look at the generated files:
>>> from pprint import pprint
>>> pprint(sorted(os.listdir(siteDir)))
['3.4.0b2', 'index.html', 'intro.html', 'resources']
>>> sorted(os.listdir(os.path.join(siteDir, '3.4.0b2')))
['ANNOUNCEMENT.html', 'CHANGES.html',
'buildout.cfg', 'controlled-packages.cfg', 'index', 'index.html',
'links.html', 'minimal', 'versions.cfg']
>>> sorted(os.listdir(os.path.join(siteDir, '3.4.0b2', 'minimal')))
['PIL', 'index.html', 'z3c.formdemo', 'zope.component', 'zope.interface']
If you try to generate the site again without adding the controlled packages
config file to the site directory again, it will simply return:
>>> site.main(['-s', siteDir])
Basic Parser API
----------------
The ``kgs.py`` module provides a simple class that parses the KGS
configuration file and provides all data in an object-oriented manner.
>>> from zope.kgs import kgs
The class is simply instnatiated using the path to the config file:
>>> myKGS = kgs.KGS(cfgFile)
>>> myKGS
<KGS 'zope-dev'>
The name, version and date of the KGS is available via:
>>> myKGS.name
'zope-dev'
>>> myKGS.version
'1.2.0'
>>> myKGS.date
datetime.date(2009, 1, 1)
When the changelog and/or announcement files are available, the KGS references
the absolute path:
>>> myKGS.changelog
'.../CHANGES.txt'
>>> myKGS.announcement
'.../ANNOUNCEMENT.txt'
The same is true for other release-related files:
>>> myKGS.files
('.../zope-dev-1.2.0.tgz',
'.../zope-dev-1.2.0.exe')
The packages are available under `packages`:
>>> myKGS.packages
[<Package 'packageA'>, <Package 'packageB'>, <Package 'packageC'>]
Each package is also an object:
>>> pkgA = myKGS.packages[0]
>>> pkgA
<Package 'packageA'>
>>> pkgA.name
'packageA'
>>> pkgA.versions
['1.0.0', '1.0.1']
>>> pkgA.tested
True
As we have seen in the scripts above, the KGS class also supports the
`entends` option. Thus, let's load the KGS for the config file 2:
>>> myKGS2 = kgs.KGS(cfgFile2)
>>> myKGS2
<KGS 'grok-dev'>
>>> myKGS2.name
'grok-dev'
>>> myKGS2.packages
[<Package 'packageA'>,
<Package 'packageB'>,
<Package 'packageC'>,
<Package 'packageD'>]
| zope.kgs | /zope.kgs-1.2.0.tar.gz/zope.kgs-1.2.0/src/zope/kgs/README.txt | README.txt |
import logging
import os
import pickle
import re
import sys
import xml.parsers.expat
import xmlrpclib
import pkg_resources
import zope.kgs.kgs
SERVER_URL = "http://pypi.python.org/pypi"
def loadCache(fn):
if os.path.exists(fn):
return pickle.load(open(fn))
return {}
def saveCache(fn, cache):
pickle.dump(cache, open(fn, 'w'))
# version_line finds a version number and an optional date
version_line = re.compile(
r"(version\s*|)([0-9.][0-9a-zA-Z.]*)(\s*[(]([0-9a-z?-]+)[)])?",
re.IGNORECASE)
# decoration_line matches lines to ignore
decoration_line = re.compile(r"---|===")
# define logger for output
logger = logging.getLogger('info')
def parseReleases(lines):
"""Parse the list of releases from a CHANGES.txt file.
Yields (version, release_date, [line]) for each release listed in the
change log.
"""
if isinstance(lines, basestring):
lines = lines.split('\n')
version = None
release_date = None
changes = None
for line in lines:
line = line.rstrip()
mo = version_line.match(line)
if mo is not None:
if changes is not None:
yield version, release_date, changes
changes = []
version = mo.group(2)
release_date = mo.group(4)
continue
elif decoration_line.match(line) is not None:
continue
elif changes is None :
continue
elif line.startswith('Detailed Documentation'):
yield version, release_date, changes
break
changes.append(line)
# include the last list of changes
if version is not None and changes is not None:
yield version, release_date, changes
def extractChanges(text, firstVersion, lastVersion):
"""Parse the changes out of a CHANGES.txt in the given range.
For each release, yields (version, release_date, change text).
"""
first = pkg_resources.parse_version(firstVersion)
last = pkg_resources.parse_version(lastVersion)
for version, release_date, changes in parseReleases(text):
try:
v = pkg_resources.parse_version(version)
except AttributeError:
import pdb; pdb.set_trace()
raise
if first <= v <= last:
yield version, release_date, '\n'.join(changes)
def generateChanges(currentPath, origPath):
kgs = zope.kgs.kgs.KGS(currentPath)
server = xmlrpclib.Server(SERVER_URL)
origVersions = {}
if origPath:
origKgs = zope.kgs.kgs.KGS(origPath)
for package in origKgs.packages:
origVersions[package.name] = package.versions[-1]
changes = []
cache = loadCache('descriptions.dat')
for package in kgs.packages:
key = package.name, package.versions[-1]
logger.info('Processing ' + str(key))
if key in cache:
description = cache[key]
else:
# Extract release data from server.
try:
data = server.release_data(package.name, package.versions[-1])
except xml.parsers.expat.ExpatError, err:
logger.warn('XML-RPC Error: ' + err.message)
continue
cache[key] = description = data['description']
if description is None:
logger.warn('No description found: ' + str(key))
continue
saveCache('descriptions.dat', cache)
firstVersion = origVersions.get(
package.name, package.versions[0])
lastVersion = package.versions[-1]
versions = list(
extractChanges(description, firstVersion, lastVersion))
changes.append((package.name, versions))
return changes
def printChanges(changes, output):
for name, versions in changes:
print >> output, '=' * len(name)
print >> output, name
print >> output, '=' * len(name)
print >> output
if not versions:
print >> output, 'No changes or information not found.'
for version, release_date, text in versions:
s = '%s (%s)' % (version, release_date or 'unknown')
print >> output, s
print >> output, '-' * len(s)
print >> output
print >> output, text.strip()
print >> output
print >> output
def main(args=None):
if args is None:
args = sys.argv[1:]
if len(args) < 1 or args[0] in ('-h', '--help'):
print __doc__ % sys.argv[0]
sys.exit(1)
logger.setLevel(1)
handler = logging.StreamHandler(sys.stdout)
logger.addHandler(handler)
currentPackageConfigPath = os.path.abspath(args[0])
origPackageConfigPath = None
if len(args) > 1:
origPackageConfigPath = os.path.abspath(args[1])
changes = generateChanges(currentPackageConfigPath, origPackageConfigPath)
printChanges(changes, sys.stdout)
logger.removeHandler(handler) | zope.kgs | /zope.kgs-1.2.0.tar.gz/zope.kgs-1.2.0/src/zope/kgs/change.py | change.py |
"""Generates a full KGS site with all bells and whistles."""
import datetime
import docutils.core
import logging
import optparse
import os
import pkg_resources
import re
import shutil
import sys
import time
from zope.kgs import version, buildout, ppix, link, kgs, template
TIMESTAMP_FILENAME = 'cf-timestamp'
FEATURES = [
('controlled-packages.cfg', u'Controlled Packages'),
('versions.cfg', u'Versions'),
('buildout.cfg', u'Buildout Configuration'),
('links.html', u'Package Links'),
('minimal', u'Minimal Index'),
('index', u'Index'),
]
formatter = logging.Formatter('%(levelname)s - %(message)s')
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
logger = logging.getLogger('info')
logger.addHandler(handler)
logger.setLevel(logging.ERROR)
def _getRenderedFilename(version, filename):
if not filename:
return
return '%s/%s' % (version,
os.path.split(filename)[-1].split('.')[0] + '.html')
def _getRenderedTxt(filename):
if not filename:
return ""
f = open(filename)
parts = docutils.core.publish_parts(source=f.read(), writer_name='html')
return parts['html_body']
def generateData(src):
versions = []
for filename in os.listdir(src):
path = os.path.join(src, filename)
if not (os.path.isdir(path) and
os.path.exists(os.path.join(path, 'controlled-packages.cfg'))):
continue
kgsPath = os.path.join(path, 'controlled-packages.cfg')
set = kgs.KGS(kgsPath)
features = []
for (filename, title) in FEATURES:
if filename in os.listdir(path):
features.append({'url': '%s/%s' % (set.version, filename),
'title': title})
files = []
for filepath in set.files:
filename = os.path.split(filepath)[-1]
files.append({
'url': set.version + '/' + filename,
'name': filename
})
versionData = {
'name': set.version,
'date': set.date and str(set.date) or None,
'features': features,
'changelog': {
'url':_getRenderedFilename(set.version, set.changelog),
'html': _getRenderedTxt(set.changelog)},
'announcement': {
'url':_getRenderedFilename(set.version, set.announcement),
'html': _getRenderedTxt(set.announcement)},
'files': files,
}
versions.append(versionData)
versions.sort(key=lambda x: pkg_resources.parse_version(x['name']),
reverse=True)
return {'versions': versions,
'latest': versions[0],
'title': set.name,
'siteRoot':''}
def generateSite(siteDir, templateDir, force=False, offline=False,
noLinks=False, noIndex=False, noMinimalIndex=False):
# Create some important variables
kgsPath = os.path.join(siteDir, 'controlled-packages.cfg')
# If the `controlled-packages.cfg` file is not found,
if not os.path.exists(kgsPath):
logger.info("The site is up-to-date. No new file "
"`controlled-packages.cfg` was found.")
return
set = kgs.KGS(kgsPath)
ver = set.version
logger.info(
"Building site for version %s using config: %s" % (ver, kgsPath))
# Create a directory for the new version
versionDir = os.path.join(siteDir, ver)
if os.path.exists(versionDir):
if force:
logger.info('Recreating directory %s.' %versionDir)
shutil.rmtree(versionDir)
os.mkdir(versionDir)
else:
os.mkdir(versionDir)
# Copy the KGS config file, changelog, announcement, and release files to
# the version directory
shutil.move(kgsPath, versionDir)
if set.changelog:
shutil.move(set.changelog, versionDir)
if set.announcement:
shutil.move(set.announcement, versionDir)
for filepath in set.files:
shutil.move(filepath, versionDir)
# Recreate the KGS Path
kgsPath = os.path.join(versionDir, 'controlled-packages.cfg')
# Insert date into KGS, if it is not set.
if not set.date:
text = open(kgsPath, 'r').read()
pos = re.search('\[KGS\]\n(?:.+\n)*', text).end()
text = text[:pos] + 'date = %s\n' %datetime.date.today() + text[pos:]
open(kgsPath, 'w').write(text)
# Recreate the KGS
set = kgs.KGS(kgsPath)
# Create the buildout config file
buildoutPath = os.path.join(versionDir, 'buildout.cfg')
logger.info("Generating buildout config: %s" % buildoutPath)
buildout.generateBuildout(kgsPath, buildoutPath)
# Create a versions config file and version it
versionsPath = os.path.join(versionDir, 'versions.cfg')
logger.info("Generating version config file: %s" % versionsPath)
version.generateVersions(kgsPath, versionsPath)
# Create a links config file and version it
if not noLinks:
linksPath = os.path.join(versionDir, 'links.html')
logger.info("generating links")
link.generateLinks(kgsPath, linksPath, offline=offline)
# Update the full index (which is assumed to live in the site directory)
if not noIndex:
logger.info("updating the index")
idxDir = os.path.join(versionDir, 'index')
if not os.path.exists(idxDir):
os.mkdir(idxDir)
ppix.generatePackagePages(kgsPath, idxDir, offline=offline)
ppix.generateIndexPage(kgsPath, idxDir)
# Update the minimal index
if not noMinimalIndex:
logger.info("updating the minimal index")
midxDir = os.path.join(versionDir, 'minimal')
if not os.path.exists(midxDir):
os.mkdir(midxDir)
ppix.generatePackagePages(kgsPath, midxDir, offline=offline)
ppix.generateIndexPage(kgsPath, midxDir)
# Generate Web Site
logger.info("Generating Web Site")
template.generateSite(templateDir, siteDir, generateData(siteDir))
logger.info("finished generating site.")
parser = optparse.OptionParser()
parser.add_option(
"-q","--quiet", action="store_true",
dest="quiet", default=False,
help="When specified, no messages are displayed.")
parser.add_option(
"-v","--verbose", action="store_true",
dest="verbose", default=False,
help="When specified, debug information is created.")
parser.add_option(
"-s","--site-dir", action="store",
type="string", dest="siteDir", metavar="DIR",
help="The directory where the site should be generated")
parser.add_option(
"-t","--template-dir", action="store",
type="string", dest="templateDir", metavar="DIR",
default=os.path.join(os.path.dirname(__file__), 'templates'),
help="The directory where the site templates are located.")
parser.add_option(
"-w","--website-only", action="store_true",
dest="websiteOnly", default=False,
help="When specified, only the Web site is (re-)generated.")
parser.add_option(
"-f","--force", action="store_true", dest="force", default=False,
help=("Force the site to rebuild even if it is already at the "
"latest version."))
parser.add_option(
"-o","--offline", action="store_true", dest="offlineMode", default=False,
help=("Run in offline mode. Doesn't really do much, good for "
"developing templates."))
parser.add_option(
"--no-index", action="store_true", dest="noIndex", default=False,
help=("When set, no index is created."))
parser.add_option(
"--no-minimal-index", action="store_true", dest="noMinimalIndex",
default=False,
help=("When set, no minimal index is created."))
parser.add_option(
"--no-links", action="store_true", dest="noLinks", default=False,
help=("When set, no links file is created."))
def main(args=None):
if args is None:
args = sys.argv[1:]
if not args:
args = ['-h']
options, args = parser.parse_args(args)
if options.verbose:
logger.setLevel(logging.INFO)
if options.quiet:
logger.setLevel(logging.FATAL)
if not options.siteDir:
logger.error("You must specify the site directory with the -s option.")
sys.exit(1)
siteDir = os.path.abspath(options.siteDir)
templateDir = os.path.abspath(options.templateDir)
if options.websiteOnly:
# Generate Web Site
logger.info("Generating Web Site")
template.generateSite(templateDir, siteDir, generateData(siteDir))
logger.info("finished generating site.")
else:
generateSite(
siteDir, templateDir, options.force, options.offlineMode,
options.noLinks, options.noIndex, options.noMinimalIndex) | zope.kgs | /zope.kgs-1.2.0.tar.gz/zope.kgs-1.2.0/src/zope/kgs/site.py | site.py |
"""Helper components for the Web site generation.
"""
import os
import shutil
import copy
import zope.pagetemplate.pagetemplatefile
class Template(zope.pagetemplate.pagetemplatefile.PageTemplateFile):
def __init__(self, path, data, templates):
super(Template, self).__init__(path)
self.templates = templates
self.data = data
def pt_getContext(self, args=(), options=None, **ignore):
rval = self.data.copy()
rval.update(
{'args': args,
'nothing': None,
'self': self,
'templates': self.templates,
})
rval.update(self.pt_getEngine().getBaseNames())
return rval
class DirectoryContext(object):
def __init__(self, path, data, root=None):
self.path = path
self.data = data
self.root = root or self
def __getitem__(self, name):
path = os.path.join(self.path, name)
if os.path.exists(path):
return Template(path, self.data, self.root)
return None
def generateSite(src, dst, data, templates=None):
if templates is None:
templates = DirectoryContext(src, data)
for filename in os.listdir(src):
srcPath = os.path.join(src, filename)
dstPath = os.path.join(dst, filename)
if filename.startswith('.'):
continue
elif srcPath.endswith('.pt'):
continue
elif srcPath.endswith('.html'):
html = Template(srcPath, data, templates)()
open(dstPath, 'w').write(html)
elif filename == 'VERSION':
for version in data['versions']:
versionDir = os.path.join(dst, version['name'])
newData = copy.deepcopy(data)
newData['version'] = version
newData['siteRoot'] = '../%s' % newData['siteRoot']
generateSite(srcPath, versionDir, newData, templates)
elif os.path.isdir(srcPath):
if not os.path.exists(dstPath):
os.mkdir(dstPath)
newData = copy.deepcopy(data)
newData['siteRoot'] = '../%s' % newData['siteRoot']
generateSite(srcPath, dstPath, newData, templates)
else:
shutil.copyfile(srcPath, dstPath) | zope.kgs | /zope.kgs-1.2.0.tar.gz/zope.kgs-1.2.0/src/zope/kgs/template.py | template.py |
import os
import sys
import urllib
import urllib2
import xmlrpclib
import zope.kgs.kgs
TEMPLATE = ('<html>\n<head>\n<title>%(title)s</title>\n</head>\n'
'<body>\n<h1>%(title)s</h1>\n%(body)s\n</body>\n'
'</html>')
LINK_TEMPLATE = '<a href="%(url)s#md5=%(md5_digest)s">%(filename)s</a><br/>'
SIMPLE_LINK_TEMPLATE = '<a href="%(url)s">%(name)s</a><br/>'
SIMPLE_BASE_URL = "http://pypi.python.org/simple/"
def generatePackagePage(package, destDir, server, offline=False):
packagePath = os.path.join(destDir, package.name)
links = []
if not offline:
for version in package.versions:
dist_links = server.package_urls(package.name, version)
for link in dist_links:
links.append(LINK_TEMPLATE %link)
if not os.path.exists(packagePath):
os.mkdir(packagePath)
if links or offline:
open(os.path.join(packagePath, 'index.html'), 'w').write(
TEMPLATE %{'title': 'Links for "%s"' %package.name,
'body': '\n'.join(links)})
else:
# A small fallback, in case PyPI does not maintain the release
# files.
page = urllib2.urlopen(SIMPLE_BASE_URL + package.name + '/').read()
open(os.path.join(packagePath, 'index.html'), 'w').write(page)
def generatePackagePages(packageConfigPath, destDir, offline=False):
kgs = zope.kgs.kgs.KGS(packageConfigPath)
server = xmlrpclib.Server('http://pypi.python.org/pypi')
for package in kgs.packages:
generatePackagePage(package, destDir, server, offline=offline)
def generateIndexPage(packageConfigPath, destDir):
kgs = zope.kgs.kgs.KGS(packageConfigPath)
links = []
for pkg in kgs.packages:
links.append(
SIMPLE_LINK_TEMPLATE %{
'url': urllib.quote(pkg.name), 'name': pkg.name}
)
open(os.path.join(destDir, 'index.html'), 'w').write(
TEMPLATE %{
'title': 'Simple Index for the "%s" KGS (version %s)' %(kgs.name,
kgs.version),
'body': '\n'.join(links)})
def main(args=None):
if args is None:
args = sys.argv[1:]
if len(args) < 1:
print __doc__ % sys.argv[0]
sys.exit(1)
createIndex = False
if args[0] == '-i':
createIndex = True
args = args[1:]
packageConfigPath = os.path.abspath(args[0])
destDir = os.path.join(
os.path.dirname(packageConfigPath), 'index')
if len(args) == 2:
destDir = args[1]
if not os.path.exists(destDir):
os.mkdir(destDir)
generatePackagePages(packageConfigPath, destDir)
if createIndex:
generateIndexPage(packageConfigPath, destDir) | zope.kgs | /zope.kgs-1.2.0.tar.gz/zope.kgs-1.2.0/src/zope/kgs/ppix.py | ppix.py |
=============
Quick Start
=============
.. module:: zope.lifecycleevent
This document describes the various event types defined by this
package and provides some basic examples of using them to inform parts
of the system about object changes.
All events have three components: an *interface* defining the event's
structure, a default *implementation* of that interface (the *event
object*), and a high-level *convenience function* (defined by the
:class:`~.IZopeLifecycleEvent` interface) for easily sending that
event in a single function call.
.. note:: The convenience functions are simple wrappers for
constructing an event object and sending it via
:func:`zope.event.notify`. Here we will only discuss using these
functions; for more information on the advanced usage of when and
how to construct and send event objects manually, see
:doc:`manual`.
.. note:: This document will not discuss actually *handling* these
events (setting up *subscribers* for them). For information on
that topic, see :doc:`handling`.
We will go through the events in approximate order of how they would
be used to follow the life-cycle of an object.
Creation
========
The first event is :class:`~.IObjectCreatedEvent`, implemented by
:class:`~.ObjectCreatedEvent`, which is used to communicate that a single object
has been created. It can be sent with the
:func:`zope.lifecycleevent.created` function.
For example:
>>> from zope.lifecycleevent import created
>>> obj = {}
>>> created(obj)
Copying
=======
Copying an object is a special case of creating one. It can happen at
any time and is implemented with :class:`~.IObjectCopiedEvent`,
:class:`~.ObjectCopiedEvent`, or the API
:func:`zope.lifecycleevent.copied`.
>>> from zope.lifecycleevent import copied
>>> import pickle
>>> copy = pickle.loads(pickle.dumps(obj))
>>> copied(copy, obj)
.. note::
Handlers for :class:`~.IObjectCreatedEvent` can expect to
receive events for :class:`~.IObjectCopiedEvent` as well.
.. _addition:
Addition
========
After objects are created, it is common to *add* them somewhere for
storage or access. This can be accomplished with the
:class:`~.IObjectAddedEvent` and its implementation
:class:`~.ObjectAddedEvent`, or the API
:func:`zope.lifecycleevent.added`.
>>> from zope.lifecycleevent import ObjectAddedEvent
>>> from zope.lifecycleevent import added
>>> container = {}
>>> container['name'] = obj
>>> added(obj, container, 'name')
If the object being added has a non-None ``__name__`` or ``__parent__``
attribute, we can omit those values when we call ``added`` and the
attributes will be used.
>>> class Location(object):
... __parent__ = None
... __name__ = None
>>> location = Location()
>>> location.__name__ = "location"
>>> location.__parent__ = container
>>> container[location.__name__] = location
>>> added(location)
.. tip::
The interface :class:`zope.location.interfaces.ILocation`
defines these attributes (although we don't require the object to
implement that interface), and containers that implement
:class:`zope.container.interfaces.IWriteContainer` are expected to
set them (such containers will also automatically send the
:class:`~.IObjectAddedEvent`).
Modification
============
One of the most common types of events used from this package is the
:class:`~.IObjectModifiedEvent` (implemented by
:class:`~.ObjectModifiedEvent`) that represents object modification.
In the simplest case, it may be enough to simply notify interested
parties that the object has changed. Like the other events, this can
be done manually or through the convenience API
(:func:`zope.lifecycleevent.modified`):
>>> obj['key'] = 42
>>> from zope.lifecycleevent import modified
>>> modified(obj)
Providing Additional Information
--------------------------------
Some event consumers like indexes (catalogs) and caches may need more
information to update themselves in an efficient manner. The necessary
information can be provided as optional "modification descriptions" of
the :class:`~.ObjectModifiedEvent` (or again, via the
:func:`~zope.lifecycleevent.modified` function).
This package doesn't strictly define what a "modification description"
must be. The most common (and thus most interoperable) descriptions
are based on interfaces.
We could simply pass an interface itself to say "something about the
way this object implements the interface changed":
>>> from zope.interface import Interface, Attribute, implementer
>>> class IFile(Interface):
... data = Attribute("The data of the file.")
... name = Attribute("The name of the file.")
>>> @implementer(IFile)
... class File(object):
... data = ''
... name = ''
>>> file = File()
>>> created(file)
>>> file.data = "123"
>>> modified(file, IFile)
Attributes
~~~~~~~~~~
We can also be more specific in a case like this where we know exactly
what attribute of the interface we modified. There is a helper class
:class:`zope.lifecycleevent.Attributes` that assists:
>>> from zope.lifecycleevent import Attributes
>>> file.data = "abc"
>>> modified(file, Attributes(IFile, "data"))
If we modify multiple attributes of an interface at the same time, we
can include that information in a single ``Attributes`` object:
>>> file.data = "123"
>>> file.name = "123.txt"
>>> modified(file, Attributes(IFile, "data", "name"))
Sometimes we may change attributes from multiple interfaces at the
same time. We can also represent this by including more than one
``Attributes`` instance:
>>> import time
>>> class IModified(Interface):
... lastModified = Attribute("The timestamp when the object was modified.")
>>> @implementer(IModified)
... class ModifiedFile(File):
... lastModified = 0
>>> file = ModifiedFile()
>>> created(file)
>>> file.data = "abc"
>>> file.lastModified = time.time()
>>> modified(file,
... Attributes(IFile, "data"),
... Attributes(IModified, "lastModified"))
Sequences
~~~~~~~~~
When an object is a sequence or container, we can specify
the individual indexes or keys that we changed using
:class:`zope.lifecycleevent.Sequence`.
First we'll need to define a sequence and create an instance:
>>> from zope.interface.common.sequence import ISequence
>>> class IFileList(ISequence):
... "A sequence of IFile objects."
>>> @implementer(IFileList)
... class FileList(list):
... pass
>>> files = FileList()
>>> created(files)
Now we can modify the sequence by adding an object to it:
>>> files.append(File())
>>> from zope.lifecycleevent import Sequence
>>> modified(files, Sequence(IFileList, len(files) - 1))
We can also replace an existing object:
>>> files[0] = File()
>>> modified(files, Sequence(IFileList, 0))
Of course ``Attributes`` and ``Sequences`` can be combined in any
order and length necessary to describe the modifications fully.
Modification Descriptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Although this package does not require any particular definition or
implementation of modification descriptions, it provides the two that
we've already seen: :class:`~zope.lifecycleevent.Attributes` and
:class:`~zope.lifecycleevent.Sequence`. Both of these classes
implement the marker interface
:class:`~zope.lifecycleevent.interfaces.IModificationDescription`. If
you implement custom modification descriptions, consider implementing
this marker interface.
Movement
========
Sometimes objects move from one place to another. This can be
described with the interface :class:`~.IObjectMovedEvent`, its
implementation :class:`~.ObjectMovedEvent` or the API
:func:`zope.lifecycleevent.moved`.
Objects may move within a single container by changing their name:
>>> from zope.lifecycleevent import moved
>>> container['new name'] = obj
>>> del container['name']
>>> moved(obj,
... oldParent=container, oldName='name',
... newParent=container, newName='new name')
Or they may move to a new container (under the same name, or a
different name):
>>> container2 = {}
>>> container2['new name'] = obj
>>> del container['new name']
>>> moved(obj,
... oldParent=container, oldName='new name',
... newParent=container2, newName='new name')
Unlike :ref:`addition <addition>`, any ``__name__`` and ``__parent__``
attribute on the object are ignored and must be provided explicitly.
.. tip::
Much like the addition of objects,
:class:`zope.container.interfaces.IWriteContainer` implementations
are expected to update the ``__name__`` and ``__parent__``
attributes automatically, and to automatically send the appropriate
movement event.
Removal
=======
Finally, objects can be removed from the system altogether with
:class:`IObjectRemovedEvent`, :class:`ObjectRemovedEvent` and
:func:`zope.lifecycleevent.removed`.
>>> from zope.lifecycleevent import removed
>>> del container2['new name']
>>> removed(obj, container2, 'new name')
.. note::
This is a special case of movement where the new parent and
new name are always ``None``. Handlers for
:class:`~.IObjectMovedEvent` can expect to receive events for
:class:`~.IObjectRemovedEvent` as well.
If the object being removed provides the ``__name__`` or
``__parent__`` attribute, those arguments can be omitted and the
attributes will be used instead.
>>> location = container['location']
>>> del container[location.__name__]
>>> removed(location)
.. tip::
Once again, :class:`~zope.container.interfaces.IWriteContainer`
implementations will send the correct event automatically.
| zope.lifecycleevent | /zope.lifecycleevent-5.0-py3-none-any.whl/zope/lifecycleevent/README.rst | README.rst |
=================
Handling Events
=================
This document provides information on how to handle the lifycycle
events defined and sent by this package.
Background information on handling events is found in
:mod:`zope.event's documentation <zope.event>`.
Class Based Handling
====================
:mod:`zope.event` includes `a simple framework`_ for dispatching
events based on the class of the event. This could be used to provide
handlers for each of the event classes defined by this package
(:class:`ObjectCreatedEvent`, etc). However, it doesn't allow
configuring handlers based on the kind of *object* the event contains.
To do that, we need another level of dispatching.
Fortunately, that level of dispatching already exists within
:mod:`zope.component`.
.. _a simple framework: https://zopeevent.readthedocs.io/en/latest/classhandler.html
Component Based Handling
========================
:mod:`zope.component` includes an `event dispatching framework`_ that
lets us dispatch events based not just on the kind of the event, but
also on the kind of object the event contains.
All of the events defined by this package are implementations of
:class:`zope.interface.interfaces.IObjectEvent`. :mod:`zope.component`
`includes special support`_ for these kinds of events. That document
walks through a generic example in Python code. Here we will show an
example specific to life cycle events using the type of configuration
that is more likely to be used in a real application.
For this to work, it's important that :mod:`zope.component` is configured
correctly. Usually this is done with ZCML executed at startup time (we
will be using strings in this documentation, but usually this resides
in files, most often named ``configure.zcml``):
>>> from zope.configuration import xmlconfig
>>> _ = xmlconfig.string("""
... <configure xmlns="http://namespaces.zope.org/zope">
... <include package="zope.component" />
... </configure>
... """)
First we will define an object we're interested in getting events for:
>>> from zope.interface import Interface, Attribute, implementer
>>> class IFile(Interface):
... data = Attribute("The data of the file.")
... name = Attribute("The name of the file.")
>>> @implementer(IFile)
... class File(object):
... data = ''
... name = ''
Next, we will write our subscriber. Normally, ``zope.event``
subscribers take just one argument, the event object. But when we use
the automatic dispatching that ``zope.component`` provides, our
function will receive *two* arguments: the object of the event, and
the event. We can use the decorators that ``zope.component`` supplies
to annotate the function with the kinds of arguments it wants to
handle. Alternatively, we could specify that information when we
register the handler with zope.component (we'll see an example of that
later).
>>> from zope.component import adapter
>>> from zope.lifecycleevent import IObjectCreatedEvent
>>> @adapter(IFile, IObjectCreatedEvent)
... def on_file_created(file, event):
... print("A file of type '%s' was created" % (file.__class__.__name__))
Finally, we will register our handler with zope.component. This is
also usually done with ZCML executed at startup time:
>>> _ = xmlconfig.string("""
... <configure xmlns="http://namespaces.zope.org/zope">
... <include package="zope.component" file="meta.zcml" />
... <subscriber handler="__main__.on_file_created"/>
... </configure>
... """)
Now we can send an event noting that a file was created, and our handler
will be called:
>>> from zope.lifecycleevent import created
>>> file = File()
>>> created(file)
A file of type 'File' was created
Other types of objects don't trigger our handler:
>>> created(object)
The hierarchy is respected, so if we define a subclass of ``File`` and
indeed, even a sub-interface of ``IFile``, our handler will be
invoked.
>>> class SubFile(File): pass
>>> created(SubFile())
A file of type 'SubFile' was created
>>> class ISubFile(IFile): pass
>>> @implementer(ISubFile)
... class IndependentSubFile(object):
... data = name = ''
>>> created(IndependentSubFile())
A file of type 'IndependentSubFile' was created
We can further register a handler just for the subinterface we
created. Here we'll also demonstrate supplying this information in
ZCML.
>>> def generic_object_event(obj, event):
... print("Got '%s' for an object of type '%s'" % (event.__class__.__name__, obj.__class__.__name__))
>>> _ = xmlconfig.string("""
... <configure xmlns="http://namespaces.zope.org/zope">
... <include package="zope.component" file="meta.zcml" />
... <subscriber handler="__main__.generic_object_event"
... for="__main__.ISubFile zope.lifecycleevent.IObjectCreatedEvent" />
... </configure>
... """)
Now both handlers will be called for implementations of ``ISubFile``,
but still only the original implementation will be called for base ``IFiles``.
>>> created(IndependentSubFile())
A file of type 'IndependentSubFile' was created
Got 'ObjectCreatedEvent' for an object of type 'IndependentSubFile'
>>> created(File())
A file of type 'File' was created
Projects That Rely on Dispatched Events
---------------------------------------
Handlers for life cycle events are commonly registered with
``zope.component`` as a means for keeping projects uncoupled. This
section provides a partial list of such projects for reference.
As mentioned in :doc:`quickstart`, the containers provided by
`zope.container`_ generally automatically send the correct life
cycle events.
At a low-level, there are utilities that assign integer IDs to objects
as they are created such as `zope.intid`_ and `zc.intid`_.
``zc.intid``, in particular, `documents the way it uses events`_.
``zope.catalog`` can `automatically index documents`_ as part of
handling life cycle events.
Containers and Sublocations
---------------------------
The events :class:`~ObjectAddedEvent` and :class:`~ObjectRemovedEvent`
usually need to be (eventually) sent in pairs for any given object.
That is, when an added event is sent for an object, for symmetry
eventually a removed event should be sent too. This makes sure that
proper cleanup can happen.
Sometimes one object can be said to contain other objects. This is
obvious in the case of lists, dictionaries and the container objects
provided by `zope.container`_, but the same can sometimes be said for
other types of objects too that reference objects in their own
attributes.
What happens when a life cycle event for such an object is sent? By
default, *nothing*. This may leave the system in an inconsistent
state.
For example, lets create a container and add some objects to
it. First we'll set up a generic event handler so we can see the
events that go out.
>>> _ = xmlconfig.string("""
... <configure xmlns="http://namespaces.zope.org/zope">
... <include package="zope.component" file="meta.zcml" />
... <subscriber handler="__main__.generic_object_event"
... for="* zope.interface.interfaces.IObjectEvent" />
... </configure>
... """)
Got...
>>> from zope.lifecycleevent import added
>>> container = {}
>>> created(container)
Got 'ObjectCreatedEvent' for an object of type 'dict'
>>> object1 = object()
>>> container['object1'] = object1
>>> added(object1, container, 'object1')
Got 'ObjectAddedEvent' for an object of type 'object'
We can see that we got an "added" event for the object we stored in
the container. What happens when we remove the container?
>>> from zope.lifecycleevent import removed
>>> tmp = container
>>> del container
>>> removed(tmp, '', '')
Got 'ObjectRemovedEvent' for an object of type 'dict'
>>> del tmp
We only got an event for the container, not the objects it contained!
If the handlers that fired when we added "object1" had done anything
that needed to be *undone* for symmetry when "object1" was removed
(e.g., if it had been indexed and needed to be unindexed) the system
is now corrupt because those handlers never got the
``ObjectRemovedEvent`` for "object1".
The solution to this problem comes from `zope.container`_. It defines
the concept of :class:`~zope.container.interfaces.ISubLocations`: a
way for any given object to inform other objects about the objects it
contains (and it provides a :class:`default implementation of
ISubLocations <zope.container.contained.ContainerSublocations>` for
containers). It also provides :func:`a function
<zope.container.contained.dispatchToSublocations>` that will send
events that happen to the *parent* object for all the *child* objects
it contains.
In this way, its possible for any arbitrary life cycle event to
automatically be propagated to its children without any specific
caller of ``remove``, say, needing to have any specific knowledge
about containment relationships.
For this to work, two things must be done:
1. Configure `zope.container`_. This too is usually done in ZCML with
``<include package="zope.container"/>``.
2. Provide an adapter to :class:`~.ISubLocations` when some object can
contain other objects that need events.
.. _zope.intid: https://zopeintid.readthedocs.io/en/latest/
.. _zc.intid: https://zcintid.readthedocs.io/en/latest/
.. _documents the way it uses events: https://zcintid.readthedocs.io/en/latest/subscribers.html
.. _automatically index documents: https://zopecatalog.readthedocs.io/en/latest/events.html
.. _zope.container: https://zopecontainer.readthedocs.io/en/latest/
.. _event dispatching framework: https://zopecomponent.readthedocs.io/en/latest/event.html
.. _includes special support : https://zopecomponent.readthedocs.io/en/latest/event.html#object-events
| zope.lifecycleevent | /zope.lifecycleevent-5.0-py3-none-any.whl/zope/lifecycleevent/handling.rst | handling.rst |
"""Event-related interfaces
"""
__docformat__ = 'restructuredtext'
from zope.interface import Attribute
from zope.interface import Interface
from zope.interface import interfaces
class IZopeLifecycleEvent(Interface):
"""
High-level functions for sending events.
These are implemented by the :mod:`zope.lifecycleevent` module.
"""
def created(object):
"""Send an :class:`~.IObjectCreatedEvent` for ``object``."""
def modified(object, *descriptions):
"""Send an :class:`~.IObjectModifiedEvent` for ``object``.
``descriptions`` is a sequence of interfaces or fields which were
updated. The :class:`IAttributes` and :class:`ISequence` helpers
can be used.
"""
def copied(object, original):
"""Send an :class:`~.IObjectCopiedEvent` for ``object``.
``original`` is the object the copy was created from.
"""
def moved(object, oldParent, oldName, newParent, newName):
"""Send an :class:`~.IObjectMovedEvent` for ``object``.
``oldParent`` is the container ``object`` was removed from.
``oldName`` was the name used to store ``object`` in ``oldParent``.
``newParent`` is the container ``object`` was added to.
``newName`` is the name used to store ``object`` in ``newParent``.
Note that ``newParent`` and ``oldParent`` may be the same if the names
are different, and vice versa.
"""
def added(object, newParent=None, newName=None):
"""Send an :class:`~.IObjectAddedEvent` for ``object``.
``newParent`` is the container ``object`` was added to.
``newName`` is the name used to store ``object`` in the container.
If either of these is not provided or is ``None``, they will
be taken from the values of ``object.__parent__`` or
``object.__name__``, respectively.
"""
def removed(object, oldParent=None, oldName=None):
"""Send an :class:`~.IObjectRemovedEvent` for ``object``.
``oldParent`` is the container ``object`` was removed from.
``oldName`` was the name used to store ``object`` in `oldParent`.
If either of these is not provided or is ``None``, they will
be taken from the values of ``object.__parent__`` or
``object.__name__``, respectively.
"""
class IObjectCreatedEvent(interfaces.IObjectEvent):
"""An object has been created.
The ``object`` attribute will commonly have a value of ``None``
for its ``__name__`` and ``__parent__`` values (if it has those attributes
at all).
"""
class IObjectCopiedEvent(IObjectCreatedEvent):
"""An object has been copied."""
original = Attribute("The original from which the copy was made.")
class IObjectModifiedEvent(interfaces.IObjectEvent):
"""An object has been modified"""
descriptions = Attribute("""The supplied modification descriptions.
These may be interfaces or implementations of
:class:`IModificationDescription` such as :class:`~.Attributes` or
:class:`~.Sequence`""")
class IModificationDescription(Interface):
"""Marker interface for descriptions of object modifications.
Can be used as a parameter of an IObjectModifiedEvent."""
class IAttributes(IModificationDescription):
"""Describes the attributes of an interface."""
interface = Attribute("The involved interface.")
attributes = Attribute("A sequence of modified attributes.")
class ISequence(IModificationDescription):
"""Describes the modified keys of a sequence-like interface."""
interface = Attribute("The involved interface.")
keys = Attribute("A sequence of modified keys.")
##############################################################################
# Moving Objects
class IObjectMovedEvent(interfaces.IObjectEvent):
"""An object has been moved."""
oldParent = Attribute("The old location parent for the object.")
oldName = Attribute("The old location name for the object.")
newParent = Attribute("The new location parent for the object.")
newName = Attribute("The new location name for the object.")
##############################################################################
# Adding objects
class IObjectAddedEvent(IObjectMovedEvent):
"""An object has been added to a container."""
##############################################################################
# Removing objects
class IObjectRemovedEvent(IObjectMovedEvent):
"""An object has been removed from a container.""" | zope.lifecycleevent | /zope.lifecycleevent-5.0-py3-none-any.whl/zope/lifecycleevent/interfaces.py | interfaces.py |
=============================
Creating and Sending Events
=============================
As discussed in :doc:`quickstart`, most uses of
``zope.lifecycleevent`` will be satisfied with the high level API
described by
:class:`~zope.lifecycleevent.interfaces.IZopeLifecycleEvent`, but it is
possible to create and send events manually, both those defined here
and your own subclasses.
Provided Events
===============
All of the functions described in :doc:`quickstart` are very simple
wrappers that create an event object defined by this package and then
use :func:`zope.event.notify` to send it. You can do the same, as
shown below, but there is usually little reason to do so.
>>> from zope.event import notify
>>> from zope.lifecycleevent import ObjectCreatedEvent
>>> from zope.lifecycleevent import ObjectCopiedEvent
>>> from zope.lifecycleevent import ObjectModifiedEvent
>>> from zope.lifecycleevent import ObjectMovedEvent
>>> from zope.lifecycleevent import ObjectRemovedEvent
>>> obj = object()
>>> notify(ObjectCreatedEvent(obj))
>>> notify(ObjectCopiedEvent(object(), obj))
>>> notify(ObjectMovedEvent(obj,
... None, 'oldName',
... None, 'newName'))
>>> notify(ObjectModifiedEvent(obj, "description 1", "description 2"))
>>> notify(ObjectRemovedEvent(obj, "oldParent", "oldName"))
Subclassing Events
==================
It can sometimes be helpful to subclass one of the provided event
classes. If you then want to send a notification of that class, you
must manually construct and notify it.
One reason to create a subclass is to be able to add additional
attributes to the event object, perhaps changing the constructor
signature in the process. Another reason to create a subclass is to be
able to easily subscribe to all events that are *just* of that class.
The class :class:`zope.container.contained.ContainerModifiedEvent` is
used for this reason.
For example, in an application with distinct users, we might want to
let subscribers know which user created the object. We might also want
to be able to distinguish between objects that are created by a user
and those that are automatically created as part of system operation
or administration. The following subclass lets us do both.
>>> class ObjectCreatedByEvent(ObjectCreatedEvent):
... "A created event that tells you who created the object."
... def __init__(self, object, created_by):
... super(ObjectCreatedByEvent, self).__init__(object)
... self.created_by = created_by
>>> obj = object()
>>> notify(ObjectCreatedByEvent(obj, "Black Night"))
| zope.lifecycleevent | /zope.lifecycleevent-5.0-py3-none-any.whl/zope/lifecycleevent/manual.rst | manual.rst |
__docformat__ = 'restructuredtext'
from zope.event import notify
from zope.interface import implementer
from zope.interface import moduleProvides
from zope.interface.interfaces import ObjectEvent
from zope.lifecycleevent.interfaces import IAttributes
from zope.lifecycleevent.interfaces import IObjectAddedEvent
from zope.lifecycleevent.interfaces import IObjectCopiedEvent
from zope.lifecycleevent.interfaces import IObjectCreatedEvent
from zope.lifecycleevent.interfaces import IObjectModifiedEvent
from zope.lifecycleevent.interfaces import IObjectMovedEvent
from zope.lifecycleevent.interfaces import IObjectRemovedEvent
from zope.lifecycleevent.interfaces import ISequence
from zope.lifecycleevent.interfaces import IZopeLifecycleEvent
moduleProvides(IZopeLifecycleEvent)
@implementer(IObjectCreatedEvent)
class ObjectCreatedEvent(ObjectEvent):
"""An object has been created"""
def created(object):
"See :meth:`.IZopeLifecycleEvent.created`"
notify(ObjectCreatedEvent(object))
@implementer(IAttributes)
class Attributes:
"""Describes modified attributes of an interface."""
def __init__(self, interface, *attributes):
self.interface = interface
self.attributes = attributes
@implementer(ISequence)
class Sequence:
"""Describes modified keys of an interface."""
def __init__(self, interface, *keys):
self.interface = interface
self.keys = keys
@implementer(IObjectModifiedEvent)
class ObjectModifiedEvent(ObjectEvent):
"""An object has been modified"""
def __init__(self, object, *descriptions):
"""Init with a list of modification descriptions."""
super().__init__(object)
self.descriptions = descriptions
def modified(object, *descriptions):
"See :meth:`.IZopeLifecycleEvent.modified`"
notify(ObjectModifiedEvent(object, *descriptions))
@implementer(IObjectCopiedEvent)
class ObjectCopiedEvent(ObjectCreatedEvent):
"""An object has been copied"""
def __init__(self, object, original):
super().__init__(object)
self.original = original
def copied(object, original):
"See :meth:`.IZopeLifecycleEvent.copied`"
notify(ObjectCopiedEvent(object, original))
@implementer(IObjectMovedEvent)
class ObjectMovedEvent(ObjectEvent):
"""An object has been moved"""
def __init__(self, object, oldParent, oldName, newParent, newName):
ObjectEvent.__init__(self, object)
self.oldParent = oldParent
self.oldName = oldName
self.newParent = newParent
self.newName = newName
def moved(object, oldParent, oldName, newParent, newName):
"See :meth:`.IZopeLifecycleEvent.moved`"
notify(ObjectMovedEvent(object, oldParent, oldName, newParent, newName))
@implementer(IObjectAddedEvent)
class ObjectAddedEvent(ObjectMovedEvent):
"""An object has been added to a container.
If ``newParent`` or ``newName`` is not provided or is ``None``,
they will be taken from the values of ``object.__parent__`` or
``object.__name__``, respectively.
"""
def __init__(self, object, newParent=None, newName=None):
if newParent is None:
newParent = object.__parent__
if newName is None:
newName = object.__name__
ObjectMovedEvent.__init__(self, object, None, None, newParent, newName)
def added(object, newParent=None, newName=None):
"See :meth:`.IZopeLifecycleEvent.added`"
notify(ObjectAddedEvent(object, newParent, newName))
@implementer(IObjectRemovedEvent)
class ObjectRemovedEvent(ObjectMovedEvent):
"""An object has been removed from a container.
If ``oldParent`` or ``oldName`` is not provided or is ``None``,
they will be taken from the values of ``object.__parent__`` or
``object.__name__``, respectively.
"""
def __init__(self, object, oldParent=None, oldName=None):
if oldParent is None:
oldParent = object.__parent__
if oldName is None:
oldName = object.__name__
ObjectMovedEvent.__init__(self, object, oldParent, oldName, None, None)
def removed(object, oldParent=None, oldName=None):
"See :meth:`.IZopeLifecycleEvent.removed`"
notify(ObjectRemovedEvent(object, oldParent, oldName))
def _copy_docs():
for func_name, func_value in IZopeLifecycleEvent.namesAndDescriptions():
func = globals()[func_name]
func.__doc__ = func_value.__doc__
_copy_docs()
del _copy_docs | zope.lifecycleevent | /zope.lifecycleevent-5.0-py3-none-any.whl/zope/lifecycleevent/__init__.py | __init__.py |
=========
Changes
=========
5.0 (2023-05-25)
================
- Drop support for Python 2.7, 3.5, 3.6.
4.3 (2022-11-29)
================
- Add support for Python 3.8, 3.9, 3.10, 3.11.
- Drop support for Python 3.4.
4.2 (2018-10-09)
================
- Add support for Python 3.7.
4.1.0 (2017-08-03)
==================
- Drop support for Python 2.6, 3.2 and 3.3.
- Add a page to the docs on hacking ``zope.location``.
- Note additional documentation dependencies.
- Add support for Python 3.5 and 3.6.
- Remove internal ``_compat`` implementation module.
4.0.3 (2014-03-19)
==================
- Add Python 3.4 support.
- Update ``boostrap.py`` to version 2.2.
4.0.2 (2013-03-11)
==================
- Change the behavior of ``LocationProxy``'s ``__setattr__()`` to correctly
behave when dealing with the pure Python version of the ``ProxyBase``
class. Also added a test suite that fully tests the pure Python proxy
version of the ``LocationProxy`` class.
4.0.1 (2013-02-19)
==================
- Add Python 3.3 support.
4.0.0 (2012-06-07)
==================
- Remove backward-compatibility imports:
- ``zope.copy.clone`` (aliased as ``zope.location.pickling.locationCopy``)
- ``zope.copy.CopyPersistent`` (aliased as
``zope.location.pickling.CopyPersistent``).
- ``zope.site.interfaces.IPossibleSite`` (aliased as
``zope.location.interfaces.IPossibleSite``).
- Add Python 3.2 support.
- Make ``zope.component`` dependency optional. Use the ``component`` extra
to force its installation (or just require it directly). If
``zope.component`` is not present, this package defines the ``ISite``
interface itself, and omits adapter registrations from its ZCML.
- Add support for PyPy.
- Add support for continuous integration using ``tox`` and ``jenkins``.
- Bring unit test coverage to 100%.
- Add Sphinx documentation: moved doctest examples to API reference.
- Add 'setup.py docs' alias (installs ``Sphinx`` and dependencies).
- Add 'setup.py dev' alias (runs ``setup.py develop`` plus installs
``nose`` and ``coverage``).
- Replace deprecated ``zope.component.adapts`` usage with equivalent
``zope.component.adapter`` decorator.
- Replace deprecated ``zope.interface.implements`` usage with equivalent
``zope.interface.implementer`` decorator.
- Drop support for Python 2.4 and 2.5.
3.9.1 (2011-08-22)
==================
- Add zcml extra as well as a test for configure.zcml.
3.9.0 (2009-12-29)
==================
- Move LocationCopyHook related tests to zope.copy and remove a test
dependency on that package.
3.8.2 (2009-12-23)
==================
- Fix a typo in the configure.zcml.
3.8.1 (2009-12-23)
==================
- Remove dependency on zope.copy: the LocationCopyHook adapter is registered
only if zope.copy is available.
- Use the standard Python doctest module instead of zope.testing.doctest, which
has been deprecated.
3.8.0 (2009-12-22)
==================
- Adjust to testing output caused by new zope.schema.
3.7.1 (2009-11-18)
==================
- Move the IPossibleSite and ISite interfaces to zope.component as they are
dealing with zope.component's concept of a site, but not with location.
3.7.0 (2009-09-29)
==================
- Add getParent() to ILocationInfo and moved the actual implementation here
from zope.traversal.api, analogous to getParents().
- Actually remove deprecated PathPersistent class from
zope.location.pickling.
- Move ITraverser back to zope.traversing where it belongs conceptually. The
interface had been moved to zope.location to invert the package
interdependency but is no longer used here.
3.6.0 (2009-08-27)
==================
- New feature release: deprecate locationCopy, CopyPersistent and
PathPersistent from zope.location.pickling. These changes were already part
of the 3.5.3 release, which was erroneously numbered as a bugfix relese.
- Remove dependency on zope.deferredimport, directly import deprecated modules
without using it.
3.5.5 (2009-08-15)
==================
- Add zope.deferredimport as a dependency as it's used directly by
zope.location.pickling.
3.5.4 (2009-05-17)
==================
- Add ``IContained`` interface to ``zope.location.interfaces`` module.
This interface was moved from ``zope.container`` (after
``zope.container`` 3.8.2); consumers of ``IContained`` may now
depend on zope.location rather than zope.container to reduce
dependency cycles.
3.5.3 (2009-02-09)
==================
- Use new zope.copy package for implementing location copying. Thus
there's changes in the ``zope.locaton.pickling`` module:
* The ``locationCopy`` and ``CopyPersistent`` was removed in prefer
to their equivalents in zope.copy. Deprecated backward-compatibility
imports provided.
* The module now provides a ``zope.copy.interfaces.ICopyHook`` adapter
for ``ILocation`` objects that replaces the old CopyPersistent
functionality of checking for the need to clone objects based on
their location.
3.5.2 (2009-02-04)
==================
- Split RootPhysicallyLocatable adapter back from LocationPhysicallyLocatable,
because the IRoot object may not always provide ILocation and the code
for the root object is also simplier. It's basically a copy of the
RootPhysicallyLocatable adapter from zope.traversing version 3.5.0 and
below with ``getParents`` method added (returns an empty list).
3.5.1 (2009-02-02)
==================
- Improve test coverage.
- The new ``getParents`` method was extracted from ``zope.traversing``
and added to ILocationInfo interface in the previous release. Custom
ILocationInfo implementations should make sure they have this method
as well. That method is already used in ``zope.traversing.api.getParents``
function.
- Make ``getName`` of LocationPhysicallyLocatable always return empty
string for the IRoot object, like RootPhysicallyLocatable from
``zope.traversing`` did. So, now LocationPhysicallyLocatable is
fully compatible with RootPhysicallyLocatable, making the latter one
obsolete.
- Change package mailing list address to zope-dev at zope.org instead
of retired zope3-dev at zope.org.
3.5.0 (2009-01-31)
==================
- Reverse the dependency between zope.location and zope.traversing. This
also causes the dependency to various other packages go away.
3.4.0 (2007-10-02)
==================
- Initial release independent of the main Zope tree.
| zope.location | /zope.location-5.0.tar.gz/zope.location-5.0/CHANGES.rst | CHANGES.rst |
===================
``zope.location``
===================
.. image:: https://img.shields.io/pypi/v/zope.location.svg
:target: https://pypi.python.org/pypi/zope.location/
:alt: Latest release
.. image:: https://img.shields.io/pypi/pyversions/zope.location.svg
:target: https://pypi.org/project/zope.location/
:alt: Supported Python versions
.. image:: https://github.com/zopefoundation/zope.location/actions/workflows/tests.yml/badge.svg
:target: https://github.com/zopefoundation/zope.location/actions/workflows/tests.yml
.. image:: https://coveralls.io/repos/github/zopefoundation/zope.location/badge.svg?branch=master
:target: https://coveralls.io/github/zopefoundation/zope.location?branch=master
.. image:: https://readthedocs.org/projects/zopelocation/badge/?version=latest
:target: http://zopelocation.readthedocs.org/en/latest/
:alt: Documentation Status
In Zope 3, "locations" are special objects that have a structural
location, indicated with ``__name__`` and ``__parent__`` attributes.
See `zope.container <https://zopecontainer.readthedocs.io/en/latest>`_
for a useful extension of this concept to "containers."
Documentation is hosted at https://zopelocation.readthedocs.io/en/latest/
| zope.location | /zope.location-5.0.tar.gz/zope.location-5.0/README.rst | README.rst |
"""Location framework interfaces
"""
__docformat__ = 'restructuredtext'
from zope.interface import Attribute
from zope.interface import Interface
from zope.schema import TextLine
class ILocation(Interface):
"""Objects that can be located in a hierachy.
Given a parent and a name an object can be located within that parent. The
locatable object's `__name__` and `__parent__` attributes store this
information.
Located objects form a hierarchy that can be used to build file-system-like
structures. For example in Zope `ILocation` is used to build URLs and to
support security machinery.
To retrieve an object from its parent using its name, the `ISublocation`
interface provides the `sublocations` method to iterate over all objects
located within the parent. The object searched for can be found by reading
each sublocation's __name__ attribute.
"""
__parent__ = Attribute("The parent in the location hierarchy.")
__name__ = TextLine(
title=("The name within the parent"),
description=("The object can be looked up from the parent's "
"sublocations using this name."),
required=False,
default=None)
# The IContained interface was moved from zope.container to here in
# zope.container 3.8.2 to break dependency cycles. It is not actually
# used within this package, but is depended upon by external
# consumers.
class IContained(ILocation):
"""Objects contained in containers."""
class ILocationInfo(Interface):
"""Provides supplemental information for located objects.
Requires that the object has been given a location in a hierarchy.
"""
def getRoot():
"""Return the root object of the hierarchy."""
def getPath():
"""Return the physical path to the object as a string.
Uses '/' as the path segment separator.
"""
def getParent():
"""Returns the container the object was traversed via.
Returns None if the object is a containment root.
Raises TypeError if the object doesn't have enough context to get the
parent.
"""
def getParents():
"""Returns a list starting with the object's parent followed by
each of its parents.
Raises a TypeError if the object is not connected to a containment
root.
"""
def getName():
"""Return the last segment of the physical path."""
def getNearestSite():
"""Return the site the object is contained in
If the object is a site, the object itself is returned.
"""
class ISublocations(Interface):
"""Provide access to sublocations of an object.
All objects with the same parent object are called the ``sublocations`` of
that parent.
"""
def sublocations():
"""Return an iterable of the object's sublocations."""
class IRoot(Interface):
"""Marker interface to designate root objects within a location hierarchy.
"""
class LocationError(KeyError, LookupError):
"""There is no object for a given location."""
# Soft dependency on zope.component.
#
# Also, these interfaces used to be defined here directly, so this provides
# backward-compatibility
try:
from zope.component.interfaces import ISite
except ImportError: # pragma: no cover
class ISite(Interface):
pass | zope.location | /zope.location-5.0.tar.gz/zope.location-5.0/src/zope/location/interfaces.py | interfaces.py |
"""Classes to support implenting IContained
"""
__docformat__ = 'restructuredtext'
from zope.interface import implementer
from zope.location.interfaces import ILocationInfo
from zope.location.interfaces import IRoot
from zope.location.interfaces import ISite # zope.component, if present
@implementer(ILocationInfo)
class LocationPhysicallyLocatable:
"""Provide location information for location objects
"""
def __init__(self, context):
self.context = context
def getRoot(self):
"""See ILocationInfo.
"""
context = self.context
max = 9999
while context is not None:
if IRoot.providedBy(context):
return context
context = context.__parent__
max -= 1
if max < 1:
raise TypeError("Maximum location depth exceeded, "
"probably due to a a location cycle.")
raise TypeError("Not enough context to determine location root")
def getPath(self):
"""See ILocationInfo.
"""
path = []
context = self.context
max = 9999
while context is not None:
if IRoot.providedBy(context):
if path:
path.append('')
path.reverse()
return '/'.join(path)
return '/'
path.append(context.__name__)
context = context.__parent__
max -= 1
if max < 1:
raise TypeError("Maximum location depth exceeded, "
"probably due to a a location cycle.")
raise TypeError("Not enough context to determine location root")
def getParent(self):
"""See ILocationInfo.
"""
parent = getattr(self.context, '__parent__', None)
if parent is not None:
return parent
raise TypeError('Not enough context information to get parent',
self.context)
def getParents(self):
"""See ILocationInfo.
"""
# XXX Merge this implementation with getPath. This was refactored
# from zope.traversing.
parents = []
w = self.context
while True:
w = getattr(w, '__parent__', None)
if w is None:
break
parents.append(w)
if parents and IRoot.providedBy(parents[-1]):
return parents
raise TypeError("Not enough context information to get all parents")
def getName(self):
"""See ILocationInfo
"""
return self.context.__name__
def getNearestSite(self):
"""See ILocationInfo
"""
if ISite.providedBy(self.context):
return self.context
for parent in self.getParents():
if ISite.providedBy(parent):
return parent
return self.getRoot()
@implementer(ILocationInfo)
class RootPhysicallyLocatable:
"""Provide location information for the root object
This adapter is very simple, because there's no places to search
for parents and nearest sites, so we are only working with context
object, knowing that its the root object already.
"""
def __init__(self, context):
self.context = context
def getRoot(self):
"""See ILocationInfo
"""
return self.context
def getPath(self):
"""See ILocationInfo
"""
return '/'
def getParent(self):
"""See ILocationInfo.
"""
return None
def getParents(self):
"""See ILocationInfo
"""
return []
def getName(self):
"""See ILocationInfo
"""
return ''
def getNearestSite(self):
"""See ILocationInfo
"""
return self.context | zope.location | /zope.location-5.0.tar.gz/zope.location-5.0/src/zope/location/traversing.py | traversing.py |
"""Location support
"""
__docformat__ = 'restructuredtext'
from zope.interface import implementer
from zope.proxy import ProxyBase
from zope.proxy import getProxiedObject
from zope.proxy import non_overridable
from zope.proxy.decorator import DecoratorSpecificationDescriptor
from zope.location.interfaces import ILocation
@implementer(ILocation)
class Location:
"""Mix-in that implements ILocation.
It provides the `__parent__` and `__name__` attributes.
"""
__parent__ = None
__name__ = None
def locate(obj, parent, name=None):
"""Update a location's coordinates."""
obj.__parent__ = parent
obj.__name__ = name
def located(obj, parent, name=None):
"""Ensure and return the location of an object.
Updates the location's coordinates.
"""
location = ILocation(obj)
locate(location, parent, name)
return location
def LocationIterator(object):
"""Iterate over an object and all of its parents."""
while object is not None:
yield object
object = getattr(object, '__parent__', None)
def inside(l1, l2):
"""Test whether l1 is a successor of l2.
l1 is a successor of l2 if l2 is in the chain of parents of l1 or l2
is l1.
"""
while l1 is not None:
if l1 is l2:
return True
l1 = getattr(l1, '__parent__', None)
return False
class ClassAndInstanceDescr:
def __init__(self, *args):
self.funcs = args
def __get__(self, inst, cls):
if inst is None:
return self.funcs[1](cls)
return self.funcs[0](inst)
@implementer(ILocation)
class LocationProxy(ProxyBase):
"""Location-object proxy
This is a non-picklable proxy that can be put around objects that
don't implement `ILocation`.
"""
__slots__ = ('__parent__', '__name__')
__safe_for_unpickling__ = True
__doc__ = ClassAndInstanceDescr(
lambda inst: getProxiedObject(inst).__doc__,
lambda cls, __doc__=__doc__: __doc__,
)
def __new__(self, ob, container=None, name=None):
return ProxyBase.__new__(self, ob)
def __init__(self, ob, container=None, name=None):
ProxyBase.__init__(self, ob)
self.__parent__ = container
self.__name__ = name
def __getattribute__(self, name):
if name in LocationProxy.__dict__:
return object.__getattribute__(self, name)
return ProxyBase.__getattribute__(self, name)
def __setattr__(self, name, value):
if name in self.__slots__ + getattr(ProxyBase, '__slots__', ()):
# ('_wrapped', '__parent__', '__name__'):
try:
return object.__setattr__(self, name, value)
except TypeError: # pragma NO COVER C Optimization
return ProxyBase.__setattr__(self, name, value)
return ProxyBase.__setattr__(self, name, value)
@non_overridable
def __reduce__(self, proto=None):
raise TypeError("Not picklable")
__reduce_ex__ = __reduce__
__providedBy__ = DecoratorSpecificationDescriptor() | zope.location | /zope.location-5.0.tar.gz/zope.location-5.0/src/zope/location/location.py | location.py |
:mod:`zope.location` API
========================
:mod:`zope.location.interfaces`
-------------------------------
.. automodule:: zope.location.interfaces
.. autointerface:: ILocation
:members:
:member-order: bysource
.. autointerface:: IContained
:members:
:member-order: bysource
.. autointerface:: ILocationInfo
:members:
:member-order: bysource
.. autointerface:: ISublocations
:members:
:member-order: bysource
.. autointerface:: IRoot
:members:
:member-order: bysource
.. autoexception:: LocationError
:members:
:member-order: bysource
:mod:`zope.location.location`
-----------------------------
.. automodule:: zope.location.location
.. autoclass:: Location
:members:
:member-order: bysource
.. autofunction:: locate
.. autofunction:: located
.. autofunction:: LocationIterator
.. autofunction:: inside
.. autoclass:: LocationProxy
:members:
:member-order: bysource
:mod:`zope.location.traversing`
-------------------------------
.. automodule:: zope.location.traversing
.. autoclass:: LocationPhysicallyLocatable
.. doctest::
>>> from zope.interface.verify import verifyObject
>>> from zope.location.interfaces import ILocationInfo
>>> from zope.location.location import Location
>>> from zope.location.traversing import LocationPhysicallyLocatable
>>> info = LocationPhysicallyLocatable(Location())
>>> verifyObject(ILocationInfo, info)
True
.. automethod:: getRoot
.. doctest::
>>> from zope.interface import directlyProvides
>>> from zope.location.interfaces import IRoot
>>> from zope.location.location import Location
>>> from zope.location.traversing import LocationPhysicallyLocatable
>>> root = Location()
>>> directlyProvides(root, IRoot)
>>> LocationPhysicallyLocatable(root).getRoot() is root
True
>>> o1 = Location(); o1.__parent__ = root
>>> LocationPhysicallyLocatable(o1).getRoot() is root
True
>>> o2 = Location(); o2.__parent__ = o1
>>> LocationPhysicallyLocatable(o2).getRoot() is root
True
We'll get a TypeError if we try to get the location fo a
rootless object:
.. doctest::
>>> o1.__parent__ = None
>>> LocationPhysicallyLocatable(o1).getRoot()
Traceback (most recent call last):
...
TypeError: Not enough context to determine location root
>>> LocationPhysicallyLocatable(o2).getRoot()
Traceback (most recent call last):
...
TypeError: Not enough context to determine location root
If we screw up and create a location cycle, it will be caught:
.. doctest::
>>> o1.__parent__ = o2
>>> LocationPhysicallyLocatable(o1).getRoot()
Traceback (most recent call last):
...
TypeError: Maximum location depth exceeded, probably due to a a location cycle.
.. automethod:: getPath
.. doctest::
>>> from zope.interface import directlyProvides
>>> from zope.location.interfaces import IRoot
>>> from zope.location.location import Location
>>> from zope.location.traversing import LocationPhysicallyLocatable
>>> root = Location()
>>> directlyProvides(root, IRoot)
>>> print(LocationPhysicallyLocatable(root).getPath())
/
>>> o1 = Location(); o1.__parent__ = root; o1.__name__ = 'o1'
>>> print(LocationPhysicallyLocatable(o1).getPath())
/o1
>>> o2 = Location(); o2.__parent__ = o1; o2.__name__ = u'o2'
>>> print(LocationPhysicallyLocatable(o2).getPath())
/o1/o2
It is an error to get the path of a rootless location:
.. doctest::
>>> o1.__parent__ = None
>>> LocationPhysicallyLocatable(o1).getPath()
Traceback (most recent call last):
...
TypeError: Not enough context to determine location root
>>> LocationPhysicallyLocatable(o2).getPath()
Traceback (most recent call last):
...
TypeError: Not enough context to determine location root
If we screw up and create a location cycle, it will be caught:
.. doctest::
>>> o1.__parent__ = o2
>>> LocationPhysicallyLocatable(o1).getPath()
Traceback (most recent call last):
...
TypeError: Maximum location depth exceeded, """ \
"""probably due to a a location cycle.
.. automethod:: getParent
.. doctest::
>>> from zope.interface import directlyProvides
>>> from zope.location.interfaces import IRoot
>>> from zope.location.location import Location
>>> from zope.location.traversing import LocationPhysicallyLocatable
>>> root = Location()
>>> directlyProvides(root, IRoot)
>>> o1 = Location()
>>> o2 = Location()
>>> LocationPhysicallyLocatable(o2).getParent() # doctest: +ELLIPSIS
Traceback (most recent call last):
TypeError: ('Not enough context information to get parent', <zope.location.location.Location object at 0x...>)
>>> o1.__parent__ = root
>>> LocationPhysicallyLocatable(o1).getParent() == root
True
>>> o2.__parent__ = o1
>>> LocationPhysicallyLocatable(o2).getParent() == o1
True
.. automethod:: getParents
.. doctest::
>>> from zope.interface import directlyProvides
>>> from zope.interface import noLongerProvides
>>> from zope.location.interfaces import IRoot
>>> from zope.location.location import Location
>>> from zope.location.traversing import LocationPhysicallyLocatable
>>> root = Location()
>>> directlyProvides(root, IRoot)
>>> o1 = Location()
>>> o2 = Location()
>>> o1.__parent__ = root
>>> o2.__parent__ = o1
>>> LocationPhysicallyLocatable(o2).getParents() == [o1, root]
True
If the last parent is not an IRoot object, TypeError will be
raised as statet before.
>>> noLongerProvides(root, IRoot)
>>> LocationPhysicallyLocatable(o2).getParents()
Traceback (most recent call last):
...
TypeError: Not enough context information to get all parents
.. automethod:: getName
.. doctest::
>>> from zope.location.location import Location
>>> from zope.location.traversing import LocationPhysicallyLocatable
>>> o1 = Location(); o1.__name__ = u'o1'
>>> print(LocationPhysicallyLocatable(o1).getName())
o1
.. automethod:: getNearestSite
.. doctest::
>>> from zope.interface import directlyProvides
>>> from zope.component.interfaces import ISite
>>> from zope.location.interfaces import IRoot
>>> from zope.location.location import Location
>>> from zope.location.traversing import LocationPhysicallyLocatable
>>> o1 = Location()
>>> o1.__name__ = 'o1'
>>> LocationPhysicallyLocatable(o1).getNearestSite()
Traceback (most recent call last):
...
TypeError: Not enough context information to get all parents
>>> root = Location()
>>> directlyProvides(root, IRoot)
>>> o1 = Location()
>>> o1.__name__ = 'o1'
>>> o1.__parent__ = root
>>> LocationPhysicallyLocatable(o1).getNearestSite() is root
True
>>> directlyProvides(o1, ISite)
>>> LocationPhysicallyLocatable(o1).getNearestSite() is o1
True
>>> o2 = Location()
>>> o2.__parent__ = o1
>>> LocationPhysicallyLocatable(o2).getNearestSite() is o1
True
.. autoclass:: RootPhysicallyLocatable
.. doctest::
>>> from zope.interface.verify import verifyObject
>>> from zope.location.interfaces import ILocationInfo
>>> from zope.location.traversing import RootPhysicallyLocatable
>>> info = RootPhysicallyLocatable(None)
>>> verifyObject(ILocationInfo, info)
True
.. automethod:: getRoot
No need to search for root when our context is already root :)
.. doctest::
>>> from zope.location.traversing import RootPhysicallyLocatable
>>> o1 = object()
>>> RootPhysicallyLocatable(o1).getRoot() is o1
True
.. automethod:: getPath
Root object is at the top of the tree, so always return ``/``.
.. doctest::
>>> from zope.location.traversing import RootPhysicallyLocatable
>>> o1 = object()
>>> print(RootPhysicallyLocatable(o1).getPath())
/
.. automethod:: getParent
Returns None if the object is a containment root.
Raises TypeError if the object doesn't have enough context to get the
parent.
.. doctest::
>>> from zope.location.traversing import RootPhysicallyLocatable
>>> o1 = object()
>>> RootPhysicallyLocatable(o1).getParent() is None
True
.. automethod:: getParents
There's no parents for the root object, return empty list.
.. doctest::
>>> from zope.location.traversing import RootPhysicallyLocatable
>>> o1 = object()
>>> RootPhysicallyLocatable(o1).getParents()
[]
.. automethod:: getName
Always return empty unicode string for the root object
.. doctest::
>>> from zope.location.traversing import RootPhysicallyLocatable
>>> o1 = object()
>>> RootPhysicallyLocatable(o1).getName() == u''
True
.. automethod:: getNearestSite
Return object itself as the nearest site, because there's no
other place to look for. It's also usual that the root is the
site as well.
.. doctest::
>>> from zope.location.traversing import RootPhysicallyLocatable
>>> o1 = object()
>>> RootPhysicallyLocatable(o1).getNearestSite() is o1
True
| zope.location | /zope.location-5.0.tar.gz/zope.location-5.0/docs/api.rst | api.rst |
Hacking on :mod:`zope.location`
===============================
Getting the Code
################
The main repository for :mod:`zope.location` is in the Zope Foundation
Github repository:
https://github.com/zopefoundation/zope.location
You can get a read-only checkout from there:
.. code-block:: sh
$ git clone https://github.com/zopefoundation/zope.location.git
or fork it and get a writeable checkout of your fork:
.. code-block:: sh
$ git clone [email protected]/jrandom/zope.location.git
The project also mirrors the trunk from the Github repository as a
Bazaar branch on Launchpad:
https://code.launchpad.net/zope.location
You can branch the trunk from there using Bazaar:
.. code-block:: sh
$ bzr branch lp:zope.location
Working in a ``virtualenv``
###########################
Installing
----------
If you use the ``virtualenv`` package to create lightweight Python
development environments, you can run the tests using nothing more
than the ``python`` binary in a virtualenv. First, create a scratch
environment:
.. code-block:: sh
$ /path/to/virtualenv --no-site-packages /tmp/hack-zope.location
Next, get this package registered as a "development egg" in the
environment:
.. code-block:: sh
$ /tmp/hack-zope.location/bin/python setup.py develop
Running the tests
-----------------
Then, you canrun the tests using the build-in ``setuptools`` testrunner:
.. code-block:: sh
$ /tmp/hack-zope.location/bin/python setup.py test -q
...............................................................................
----------------------------------------------------------------------
Ran 83 tests in 0.037s
OK
If you have the :mod:`nose` package installed in the virtualenv, you can
use its testrunner too:
.. code-block:: sh
$ /tmp/hack-zope.location/bin/nosetests
.......................................................................................
----------------------------------------------------------------------
Ran 87 tests in 0.037s
OK
If you have the :mod:`coverage` pacakge installed in the virtualenv,
you can see how well the tests cover the code:
.. code-block:: sh
$ /tmp/hack-zope.location/bin/easy_install nose coverage
...
$ /tmp/hack-zope.location/bin/nosetests --with coverage
.......................................................................................
Name Stmts Miss Cover Missing
--------------------------------------------------------
zope.location 5 0 100%
zope.location._compat 2 0 100%
zope.location.interfaces 23 0 100%
zope.location.location 61 0 100%
zope.location.pickling 14 0 100%
zope.location.traversing 80 0 100%
--------------------------------------------------------
TOTAL 185 0 100%
----------------------------------------------------------------------
Ran 87 tests in 0.315s
OK
Building the documentation
--------------------------
:mod:`zope.location` uses the nifty :mod:`Sphinx` documentation system
for building its docs. Using the same virtualenv you set up to run the
tests, you can build the docs:
.. code-block:: sh
$ /tmp/hack-zope.location/bin/easy_install \
Sphinx repoze.sphinx.autoitnerface zope.component
...
$ cd docs
$ PATH=/tmp/hack-zope.location/bin:$PATH make html
sphinx-build -b html -d _build/doctrees . _build/html
...
build succeeded.
Build finished. The HTML pages are in _build/html.
You can also test the code snippets in the documentation:
.. code-block:: sh
$ PATH=/tmp/hack-zope.location/bin:$PATH make doctest
sphinx-build -b doctest -d _build/doctrees . _build/doctest
...
running tests...
...
Doctest summary
===============
187 tests
0 failures in tests
0 failures in setup code
0 failures in cleanup code
build succeeded.
Testing of doctests in the sources finished, look at the results in _build/doctest/output.txt.
Using :mod:`zc.buildout`
########################
Setting up the buildout
-----------------------
:mod:`zope.location` ships with its own :file:`buildout.cfg` file and
:file:`bootstrap.py` for setting up a development buildout:
.. code-block:: sh
$ /path/to/python2.7 bootstrap.py
...
Generated script '.../bin/buildout'
$ bin/buildout
Develop: '/home/jrandom/projects/Zope/zope.location/.'
...
Got coverage 3.7.1
Running the tests
-----------------
You can now run the tests:
.. code-block:: sh
$ bin/test --all
Running zope.testing.testrunner.layer.UnitTests tests:
Set up zope.testing.testrunner.layer.UnitTests in 0.000 seconds.
Ran 79 tests with 0 failures and 0 errors in 0.000 seconds.
Tearing down left over layers:
Tear down zope.testing.testrunner.layer.UnitTests in 0.000 seconds.
Using :mod:`tox`
################
Running Tests on Multiple Python Versions
-----------------------------------------
`tox <http://tox.testrun.org/latest/>`_ is a Python-based test automation
tool designed to run tests against multiple Python versions. It creates
a ``virtualenv`` for each configured version, installs the current package
and configured dependencies into each ``virtualenv``, and then runs the
configured commands.
:mod:`zope.location` configures the following :mod:`tox` environments via
its ``tox.ini`` file:
- The ``py26``, ``py27``, ``py33``, ``py34``, and ``pypy`` environments
builds a ``virtualenv`` with ``pypy``,
installs :mod:`zope.location` and dependencies, and runs the tests
via ``python setup.py test -q``.
- The ``coverage`` environment builds a ``virtualenv`` with ``python2.6``,
installs :mod:`zope.location`, installs
:mod:`nose` and :mod:`coverage`, and runs ``nosetests`` with statement
coverage.
- The ``docs`` environment builds a virtualenv with ``python2.6``, installs
:mod:`zope.location`, installs ``Sphinx`` and
dependencies, and then builds the docs and exercises the doctest snippets.
This example requires that you have a working ``python2.6`` on your path,
as well as installing ``tox``:
.. code-block:: sh
$ tox -e py26
GLOB sdist-make: /home/jrandom/projects/Zope/Z3/zope.location/setup.py
py26 create: /home/jrandom/projects/Zope/Z3/zope.location/.tox/py26
py26 installdeps: zope.configuration, zope.copy, zope.interface, zope.proxy, zope.schema
py26 inst: /home/jrandom/projects/Zope/Z3/zope.location/.tox/dist/zope.location-4.0.4.dev0.zip
py26 runtests: PYTHONHASHSEED='3489368878'
py26 runtests: commands[0] | python setup.py test -q
running test
...
...................................................................................
----------------------------------------------------------------------
Ran 83 tests in 0.066s
OK
___________________________________ summary ____________________________________
py26: commands succeeded
congratulations :)
Running ``tox`` with no arguments runs all the configured environments,
including building the docs and testing their snippets:
.. code-block:: sh
$ tox
GLOB sdist-make: .../zope.location/setup.py
py26 sdist-reinst: .../zope.location/.tox/dist/zope.location-4.0.2dev.zip
...
Doctest summary
===============
187 tests
0 failures in tests
0 failures in setup code
0 failures in cleanup code
build succeeded.
___________________________________ summary ____________________________________
py26: commands succeeded
py27: commands succeeded
py32: commands succeeded
py33: commands succeeded
py34: commands succeeded
pypy: commands succeeded
coverage: commands succeeded
docs: commands succeeded
congratulations :)
Contributing to :mod:`zope.location`
####################################
Submitting a Bug Report
-----------------------
:mod:`zope.location` tracks its bugs on Github:
https://github.com/zopefoundation/zope.location/issues
Please submit bug reports and feature requests there.
Sharing Your Changes
--------------------
.. note::
Please ensure that all tests are passing before you submit your code.
If possible, your submission should include new tests for new features
or bug fixes, although it is possible that you may have tested your
new code by updating existing tests.
If have made a change you would like to share, the best route is to fork
the Githb repository, check out your fork, make your changes on a branch
in your fork, and push it. You can then submit a pull request from your
branch:
https://github.com/zopefoundation/zope.location/pulls
If you branched the code from Launchpad using Bazaar, you have another
option: you can "push" your branch to Launchpad:
.. code-block:: sh
$ bzr push lp:~jrandom/zope.location/cool_feature
After pushing your branch, you can link it to a bug report on Github,
or request that the maintainers merge your branch using the Launchpad
"merge request" feature.
| zope.location | /zope.location-5.0.tar.gz/zope.location-5.0/docs/hacking.rst | hacking.rst |
Using :mod:`zope.location`
==========================
:class:`~zope.location.location.Location`
-----------------------------------------
The ``Location`` base class is a mix-in that defines ``__parent__`` and
``__name__`` attributes.
Usage within an Object field:
.. doctest::
>>> from zope.interface import implementer, Interface
>>> from zope.schema import Object
>>> from zope.schema.fieldproperty import FieldProperty
>>> from zope.location.interfaces import ILocation
>>> from zope.location.location import Location
>>> class IA(Interface):
... location = Object(schema=ILocation, required=False, default=None)
>>> @implementer(IA)
... class A(object):
... location = FieldProperty(IA['location'])
>>> a = A()
>>> a.location = Location()
>>> loc = Location(); loc.__name__ = u'foo'
>>> a.location = loc
>>> loc = Location(); loc.__name__ = None
>>> a.location = loc
>>> loc = Location(); loc.__name__ = b'foo'
>>> a.location = loc
Traceback (most recent call last):
...
SchemaNotCorrectlyImplemented: ([WrongType('foo', <type 'unicode'>, '__name__')], 'location')
:func:`~zope.location.location.inside`
--------------------------------------
The ``inside`` function tells if l1 is inside l2. L1 is inside l2 if l2 is an
ancestor of l1.
.. doctest::
>>> o1 = Location()
>>> o2 = Location(); o2.__parent__ = o1
>>> o3 = Location(); o3.__parent__ = o2
>>> o4 = Location(); o4.__parent__ = o3
>>> from zope.location.location import inside
>>> inside(o1, o1)
True
>>> inside(o2, o1)
True
>>> inside(o3, o1)
True
>>> inside(o4, o1)
True
>>> inside(o1, o4)
False
>>> inside(o1, None)
False
:class:`~zope.location.location.LocationProxy`
----------------------------------------------
``LocationProxy`` is a non-picklable proxy that can be put around
objects that don't implement ``ILocation``.
.. doctest::
>>> from zope.location.location import LocationProxy
>>> l = [1, 2, 3]
>>> ILocation.providedBy(l)
False
>>> p = LocationProxy(l, "Dad", "p")
>>> p
[1, 2, 3]
>>> ILocation.providedBy(p)
True
>>> p.__parent__
'Dad'
>>> p.__name__
'p'
>>> import pickle
>>> p2 = pickle.dumps(p)
Traceback (most recent call last):
...
TypeError: Not picklable
Proxies should get their doc strings from the object they proxy:
.. doctest::
>>> p.__doc__ == l.__doc__
True
If we get a "located class" somehow, its doc string well be available
through proxy as well:
.. doctest::
>>> class LocalClass(object):
... """This is class that can be located"""
>>> p = LocationProxy(LocalClass)
>>> p.__doc__ == LocalClass.__doc__
True
:func:`~zope.location.location.LocationInterator`
-------------------------------------------------
This function allows us to iterate over object and all its parents.
.. doctest::
>>> from zope.location.location import LocationIterator
>>> o1 = Location()
>>> o2 = Location()
>>> o3 = Location()
>>> o3.__parent__ = o2
>>> o2.__parent__ = o1
>>> iter = LocationIterator(o3)
>>> next(iter) is o3
True
>>> next(iter) is o2
True
>>> next(iter) is o1
True
>>> next(iter)
Traceback (most recent call last):
...
StopIteration
:func:`~zope.location.location.located`
---------------------------------------
``located`` locates an object in another and returns it:
.. doctest::
>>> from zope.location.location import located
>>> a = Location()
>>> parent = Location()
>>> a_located = located(a, parent, 'a')
>>> a_located is a
True
>>> a_located.__parent__ is parent
True
>>> a_located.__name__
'a'
If we locate the object again, nothing special happens:
.. doctest::
>>> a_located_2 = located(a_located, parent, 'a')
>>> a_located_2 is a_located
True
If the object does not provide ILocation an adapter can be provided:
.. doctest::
>>> import zope.interface
>>> import zope.component
>>> sm = zope.component.getGlobalSiteManager()
>>> sm.registerAdapter(LocationProxy, required=(zope.interface.Interface,))
>>> l = [1, 2, 3]
>>> parent = Location()
>>> l_located = located(l, parent, 'l')
>>> l_located.__parent__ is parent
True
>>> l_located.__name__
'l'
>>> l_located is l
False
>>> type(l_located)
<class 'zope.location.location.LocationProxy'>
>>> l_located_2 = located(l_located, parent, 'l')
>>> l_located_2 is l_located
True
When changing the name, we still do not get a different proxied object:
.. doctest::
>>> l_located_3 = located(l_located, parent, 'new-name')
>>> l_located_3 is l_located_2
True
>>> sm.unregisterAdapter(LocationProxy, required=(zope.interface.Interface,))
True
| zope.location | /zope.location-5.0.tar.gz/zope.location-5.0/docs/narr.rst | narr.rst |
=======
Changes
=======
2.1.0 (2020-04-15)
==================
- Fix DeprecationWarnings for ObjectEvent.
- Add support for Python 3.7 and 3.8.
- Drop support for Python 3.3 and 3.4.
2.0.0 (2018-01-23)
==================
- Python 3 compatibility.
- Note: The browser views and related code where removed. You need to provide
those in application-level code now.
- Package the zcml files.
- Updated dependencies.
- Revived from svn.zope.org
1.2.2 (2011-01-31)
==================
- Consolidate duplicate evolution code.
- Split generations config into its own zcml file.
1.2.1 (2010-01-20)
==================
- Bug fix: the generation added in 1.2 did not properly clean up
expired tokens, and could leave the token utility in an inconsistent
state.
1.2 (2009-11-23)
================
- Bug fix: tokens were stored in a manner that prevented them from
being cleaned up properly in the utility's _principal_ids mapping.
Make zope.locking.tokens.Token orderable to fix this, as tokens
are stored as keys in BTrees.
- Add a zope.app.generations Schema Manager to clean up any lingering
tokens due to this bug. Token utilities not accessible through the
component registry can be cleaned up manually with
zope.locking.generations.fix_token_utility.
- TokenUtility's register method will now add the token to the utility's
database connection if the token provides IPersistent.
- Clean up the tests and docs and move some common code to testing.py.
- Fix some missing imports.
1.1
===
(series for Zope 3.4; eggs)
1.1b
====
- converted to use eggs
1.0
===
(series for Zope 3.3; no dependencies on Zope eggs)
1.0b
====
Initial non-dev release
| zope.locking | /zope.locking-2.1.0.tar.gz/zope.locking-2.1.0/CHANGES.rst | CHANGES.rst |
=======================================================================
Advisory exclusive locks, shared locks, and freezes (locked to no-one).
=======================================================================
The zope.locking package provides three main features:
- advisory exclusive locks for individual objects;
- advisory shared locks for individual objects; and
- frozen objects (locked to no one).
Locks and freezes by themselves are advisory tokens and inherently
meaningless. They must be given meaning by other software, such as a security
policy.
This package approaches these features primarily from the perspective of a
system API, largely free of policy; and then provides a set of adapters for
more common interaction with users, with some access policy. We will first
look at the system API, and then explain the policy and suggested use of the
provided adapters.
| zope.locking | /zope.locking-2.1.0.tar.gz/zope.locking-2.1.0/README.rst | README.rst |
Zope Public License (ZPL) Version 2.1
A copyright notice accompanies this license document that identifies the
copyright holders.
This license has been certified as open source. It has also been designated as
GPL compatible by the Free Software Foundation (FSF).
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions in source code must retain the accompanying copyright
notice, this list of conditions, and the following disclaimer.
2. Redistributions in binary form must reproduce the accompanying copyright
notice, this list of conditions, and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Names of the copyright holders must not be used to endorse or promote
products derived from this software without prior written permission from the
copyright holders.
4. The right to distribute this software or to use it for any purpose does not
give you the right to use Servicemarks (sm) or Trademarks (tm) of the
copyright
holders. Use of them is covered by separate agreement with the copyright
holders.
5. If any files are modified, you must cause the modified files to carry
prominent notices stating that you changed the files and the date of any
change.
Disclaimer
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| zope.locking | /zope.locking-2.1.0.tar.gz/zope.locking-2.1.0/LICENSE.rst | LICENSE.rst |
==========
System API
==========
The central approach for the package is that locks and freeze tokens must be
created and then registered by a token utility. The tokens will not work
until they have been registered. This gives the ability to definitively know,
and thus manipulate, all active tokens in a system.
The first object we'll introduce, then, is the TokenUtility: the utility that
is responsible for the registration and the retrieving of tokens.
>>> from zope import component, interface
>>> from zope.locking import interfaces, utility, tokens
>>> util = utility.TokenUtility()
>>> from zope.interface.verify import verifyObject
>>> verifyObject(interfaces.ITokenUtility, util)
True
The utility only has a few methods--`get`, `iterForPrincipalId`,
`__iter__`, and `register`--which we will look at below. It is expected to be
persistent, and the included implementation is in fact persistent.Persistent,
and expects to be installed as a local utility. The utility needs a
connection to the database before it can register persistent tokens.
>>> from zope.locking.testing import Demo
>>> lock = tokens.ExclusiveLock(Demo(), 'Fantomas')
>>> util.register(lock)
Traceback (most recent call last):
...
AttributeError: 'NoneType' object has no attribute 'add'
>>> conn = get_connection()
>>> conn.add(util)
If the token provides IPersistent, the utility will add it to its connection.
>>> lock._p_jar is None
True
>>> lock = util.register(lock)
>>> lock._p_jar is util._p_jar
True
>>> lock.end()
>>> lock = util.register(lock)
The standard token utility can accept tokens for any object that is adaptable
to IKeyReference.
>>> import datetime
>>> import pytz
>>> before_creation = datetime.datetime.now(pytz.utc)
>>> demo = Demo()
Now, with an instance of the demo class, it is possible to register lock and
freeze tokens for demo instances with the token utility.
As mentioned above, the general pattern for making a lock or freeze token is
to create it--at which point most of its methods and attributes are
unusable--and then to register it with the token utility. After registration,
the lock is effective and in place.
The TokenUtility can actually be used with anything that implements
zope.locking.interfaces.IAbstractToken, but we'll look at the four tokens that
come with the zope.locking package: an exclusive lock, a shared lock, a
permanent freeze, and an endable freeze.
Exclusive Locks
===============
Exclusive locks are tokens that are owned by a single principal. No principal
may be added or removed: the lock token must be ended and another started for
another principal to get the benefits of the lock (whatever they have been
configured to be).
Here's an example of creating and registering an exclusive lock: the principal
with an id of 'john' locks the demo object.
>>> lock = tokens.ExclusiveLock(demo, 'john')
>>> res = util.register(lock)
>>> res is lock
True
The lock token is now in effect. Registering the token (the lock) fired an
ITokenStartedEvent, which we'll look at now.
(Note that this example uses an events list to look at events that have fired.
This is simply a list whose `append` method has been added as a subscriber
to the zope.event.subscribers list. It's included as a global when this file
is run as a test.)
>>> from zope.component.eventtesting import events
>>> ev = events[-1]
>>> verifyObject(interfaces.ITokenStartedEvent, ev)
True
>>> ev.object is lock
True
Now that the lock token is created and registered, the token utility knows
about it. The utilities `get` method simply returns the active token for an
object or None--it never returns an ended token, and in fact none of the
utility methods do.
>>> util.get(demo) is lock
True
>>> util.get(Demo()) is None
True
Note that `get` accepts alternate defaults, like a dictionary.get:
>>> util.get(Demo(), util) is util
True
The `iterForPrincipalId` method returns an iterator of active locks for the
given principal id.
>>> list(util.iterForPrincipalId('john')) == [lock]
True
>>> list(util.iterForPrincipalId('mary')) == []
True
The util's `__iter__` method simply iterates over all active (non-ended)
tokens.
>>> list(util) == [lock]
True
The token utility disallows registration of multiple active tokens for the
same object.
>>> util.register(tokens.ExclusiveLock(demo, 'mary'))
... # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.RegistrationError: ...
>>> util.register(tokens.SharedLock(demo, ('mary', 'jane')))
... # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.RegistrationError: ...
>>> util.register(tokens.Freeze(demo))
... # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.RegistrationError: ...
It's also worth looking at the lock token itself. The registered lock token
implements IExclusiveLock.
>>> verifyObject(interfaces.IExclusiveLock, lock)
True
It provides a number of capabilities. Arguably the most important attribute is
whether the token is in effect or not: `ended`. This token is active, so it
has not yet ended:
>>> lock.ended is None
True
When it does end, the ended attribute is a datetime in UTC of when the token
ended. We'll demonstrate that below.
Later, the `creation`, `expiration`, `duration`, and `remaining_duration` will
be important; for now we merely note their existence.
>>> before_creation <= lock.started <= datetime.datetime.now(pytz.utc)
True
>>> lock.expiration is None # == forever
True
>>> lock.duration is None # == forever
True
>>> lock.remaining_duration is None # == forever
True
The `end` method and the related ending and expiration attributes are all part
of the IEndable interface--an interface that not all tokens must implement,
as we will also discuss later.
>>> interfaces.IEndable.providedBy(lock)
True
The `context` and `__parent__` attributes point to the locked object--demo in
our case. `context` is the intended standard API for obtaining the object,
but `__parent__` is important for the Zope 3 security set up, as discussed
towards the end of this document.
>>> lock.context is demo
True
>>> lock.__parent__ is demo # important for security
True
Registering the lock with the token utility set the utility attribute and
initialized the started attribute to the datetime that the lock began. The
utility attribute should never be set by any code other than the token
utility.
>>> lock.utility is util
True
Tokens always provide a `principal_ids` attribute that provides an iterable of
the principals that are part of a token. In our case, this is an exclusive
lock for 'john', so the value is simple.
>>> sorted(lock.principal_ids)
['john']
The only method on a basic token like the exclusive lock is `end`. Calling it
without arguments permanently and explicitly ends the life of the token.
>>> lock.end()
Like registering a token, ending a token fires an event.
>>> ev = events[-1]
>>> verifyObject(interfaces.ITokenEndedEvent, ev)
True
>>> ev.object is lock
True
It affects attributes on the token. Again, the most important of these is
ended, which is now the datetime of ending.
>>> lock.ended >= lock.started
True
>>> lock.remaining_duration == datetime.timedelta()
True
It also affects queries of the token utility.
>>> util.get(demo) is None
True
>>> list(util.iterForPrincipalId('john')) == []
True
>>> list(util) == []
True
Don't try to end an already-ended token.
>>> lock.end()
Traceback (most recent call last):
...
zope.locking.interfaces.EndedError
The other way of ending a token is with an expiration datetime. As we'll see,
one of the most important caveats about working with timeouts is that a token
that expires because of a timeout does not fire any expiration event. It
simply starts providing the `expiration` value for the `ended` attribute.
>>> one = datetime.timedelta(hours=1)
>>> two = datetime.timedelta(hours=2)
>>> three = datetime.timedelta(hours=3)
>>> four = datetime.timedelta(hours=4)
>>> lock = util.register(tokens.ExclusiveLock(demo, 'john', three))
>>> lock.duration
datetime.timedelta(seconds=10800)
>>> three >= lock.remaining_duration >= two
True
>>> lock.ended is None
True
>>> util.get(demo) is lock
True
>>> list(util.iterForPrincipalId('john')) == [lock]
True
>>> list(util) == [lock]
True
The expiration time of an endable token is always the creation date plus the
timeout.
>>> lock.expiration == lock.started + lock.duration
True
>>> ((before_creation + three) <=
... (lock.expiration) <= # this value is the expiration date
... (before_creation + four))
True
Expirations can be changed while a lock is still active, using any of
the `expiration`, `remaining_duration` or `duration` attributes. All changes
fire events. First we'll change the expiration attribute.
>>> lock.expiration = lock.started + one
>>> lock.expiration == lock.started + one
True
>>> lock.duration == one
True
>>> ev = events[-1]
>>> verifyObject(interfaces.IExpirationChangedEvent, ev)
True
>>> ev.object is lock
True
>>> ev.old == lock.started + three
True
Next we'll change the duration attribute.
>>> lock.duration = four
>>> lock.duration
datetime.timedelta(seconds=14400)
>>> four >= lock.remaining_duration >= three
True
>>> ev = events[-1]
>>> verifyObject(interfaces.IExpirationChangedEvent, ev)
True
>>> ev.object is lock
True
>>> ev.old == lock.started + one
True
Now we'll hack our code to make it think that it is two hours later, and then
check and modify the remaining_duration attribute.
>>> def hackNow():
... return (datetime.datetime.now(pytz.utc) +
... datetime.timedelta(hours=2))
...
>>> import zope.locking.utils
>>> oldNow = zope.locking.utils.now
>>> zope.locking.utils.now = hackNow # make code think it's 2 hours later
>>> lock.duration
datetime.timedelta(seconds=14400)
>>> two >= lock.remaining_duration >= one
True
>>> lock.remaining_duration -= one
>>> one >= lock.remaining_duration >= datetime.timedelta()
True
>>> three + datetime.timedelta(minutes=1) >= lock.duration >= three
True
>>> ev = events[-1]
>>> verifyObject(interfaces.IExpirationChangedEvent, ev)
True
>>> ev.object is lock
True
>>> ev.old == lock.started + four
True
Now, we'll hack our code to make it think that it's a day later. It is very
important to remember that a lock ending with a timeout ends silently--that
is, no event is fired.
>>> def hackNow():
... return (
... datetime.datetime.now(pytz.utc) + datetime.timedelta(days=1))
...
>>> zope.locking.utils.now = hackNow # make code think it is a day later
>>> lock.ended == lock.expiration
True
>>> util.get(demo) is None
True
>>> util.get(demo, util) is util # alternate default works
True
>>> lock.remaining_duration == datetime.timedelta()
True
>>> lock.end()
Traceback (most recent call last):
...
zope.locking.interfaces.EndedError
Once a lock has ended, the timeout can no longer be changed.
>>> lock.duration = datetime.timedelta(days=2)
Traceback (most recent call last):
...
zope.locking.interfaces.EndedError
We'll undo the hacks, and also end the lock (that is no longer ended once
the hack is finished).
>>> zope.locking.utils.now = oldNow # undo the hack
>>> lock.end()
Make sure to register tokens. Creating a lock but not registering it puts it
in a state that is not fully initialized.
>>> lock = tokens.ExclusiveLock(demo, 'john')
>>> lock.started # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.UnregisteredError: ...
>>> lock.ended # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.UnregisteredError: ...
Shared Locks
============
Shared locks are very similar to exclusive locks, but take an iterable of one
or more principals at creation, and can have principals added or removed while
they are active.
In this example, also notice a convenient characteristic of the TokenUtility
`register` method: it also returns the token, so creation, registration, and
variable assignment can be chained, if desired.
>>> lock = util.register(tokens.SharedLock(demo, ('john', 'mary')))
>>> ev = events[-1]
>>> verifyObject(interfaces.ITokenStartedEvent, ev)
True
>>> ev.object is lock
True
Here, principals with ids of 'john' and 'mary' have locked the demo object.
The returned token implements ISharedLock and provides a superset of the
IExclusiveLock capabilities. These next operations should all look familiar
from the discussion of the ExclusiveLock tokens above.
>>> verifyObject(interfaces.ISharedLock, lock)
True
>>> lock.context is demo
True
>>> lock.__parent__ is demo # important for security
True
>>> lock.utility is util
True
>>> sorted(lock.principal_ids)
['john', 'mary']
>>> lock.ended is None
True
>>> before_creation <= lock.started <= datetime.datetime.now(pytz.utc)
True
>>> lock.expiration is None
True
>>> lock.duration is None
True
>>> lock.remaining_duration is None
True
>>> lock.end()
>>> lock.ended >= lock.started
True
As mentioned, though, the SharedLock capabilities are a superset of the
ExclusiveLock ones. There are two extra methods: `add` and `remove`. These
are able to add and remove principal ids as shared owners of the lock token.
>>> lock = util.register(tokens.SharedLock(demo, ('john',)))
>>> sorted(lock.principal_ids)
['john']
>>> lock.add(('mary',))
>>> sorted(lock.principal_ids)
['john', 'mary']
>>> lock.add(('alice',))
>>> sorted(lock.principal_ids)
['alice', 'john', 'mary']
>>> lock.remove(('john',))
>>> sorted(lock.principal_ids)
['alice', 'mary']
>>> lock.remove(('mary',))
>>> sorted(lock.principal_ids)
['alice']
Adding and removing principals fires appropriate events, as you might expect.
>>> lock.add(('mary',))
>>> sorted(lock.principal_ids)
['alice', 'mary']
>>> ev = events[-1]
>>> verifyObject(interfaces.IPrincipalsChangedEvent, ev)
True
>>> ev.object is lock
True
>>> sorted(ev.old)
['alice']
>>> lock.remove(('alice',))
>>> sorted(lock.principal_ids)
['mary']
>>> ev = events[-1]
>>> verifyObject(interfaces.IPrincipalsChangedEvent, ev)
True
>>> ev.object is lock
True
>>> sorted(ev.old)
['alice', 'mary']
Removing all participants in a lock ends the lock, making it ended.
>>> lock.remove(('mary',))
>>> sorted(lock.principal_ids)
[]
>>> lock.ended >= lock.started
True
>>> ev = events[-1]
>>> verifyObject(interfaces.IPrincipalsChangedEvent, ev)
True
>>> ev.object is lock
True
>>> sorted(ev.old)
['mary']
>>> ev = events[-2]
>>> verifyObject(interfaces.ITokenEndedEvent, ev)
True
>>> ev.object is lock
True
As you might expect, trying to add (or remove!) users from an ended lock is
an error.
>>> lock.add(('john',))
Traceback (most recent call last):
...
zope.locking.interfaces.EndedError
>>> lock.remove(('john',))
Traceback (most recent call last):
...
zope.locking.interfaces.EndedError
The token utility keeps track of shared lock tokens the same as exclusive lock
tokens. Here's a quick summary in code.
>>> lock = util.register(tokens.SharedLock(demo, ('john', 'mary')))
>>> util.get(demo) is lock
True
>>> list(util.iterForPrincipalId('john')) == [lock]
True
>>> list(util.iterForPrincipalId('mary')) == [lock]
True
>>> list(util) == [lock]
True
>>> util.register(tokens.ExclusiveLock(demo, 'mary'))
... # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.RegistrationError: ...
>>> util.register(tokens.SharedLock(demo, ('mary', 'jane')))
... # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.RegistrationError: ...
>>> util.register(tokens.Freeze(demo))
... # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.RegistrationError: ...
>>> lock.end()
Timed expirations work the same as with exclusive locks. We won't repeat that
here, though look in the annoying.txt document in this package for the actual
repeated tests.
EndableFreezes
==============
An endable freeze token is similar to a lock token except that it grants the
'lock' to no one.
>>> token = util.register(tokens.EndableFreeze(demo))
>>> verifyObject(interfaces.IEndableFreeze, token)
True
>>> ev = events[-1]
>>> verifyObject(interfaces.ITokenStartedEvent, ev)
True
>>> ev.object is token
True
>>> sorted(token.principal_ids)
[]
>>> token.end()
Endable freezes are otherwise identical to exclusive locks. See annoying.txt
for the comprehensive copy-and-paste tests duplicating the exclusive lock
tests. Notice that an EndableFreeze will never be a part of an iterable of
tokens by principal: by definition, a freeze is associated with no principals.
Freezes
=======
Freezes are similar to EndableFreezes, except they are not endable. They are
intended to be used by system level operations that should permanently disable
certain changes, such as changes to the content of an archived object version.
Creating them is the same...
>>> token = util.register(tokens.Freeze(demo))
>>> verifyObject(interfaces.IFreeze, token)
True
>>> ev = events[-1]
>>> verifyObject(interfaces.ITokenStartedEvent, ev)
True
>>> ev.object is token
True
>>> sorted(token.principal_ids)
[]
But they can't go away...
>>> token.end()
Traceback (most recent call last):
...
AttributeError: 'Freeze' object has no attribute 'end'
They also do not have expirations, duration, remaining durations, or ended
dates. They are permanent, unless you go into the database to muck with
implementation-specific data structures.
There is no API way to end a Freeze. We'll need to make a new object for the
rest of our demonstrations, and this token will exist through the
remaining examples.
>>> old_demo = demo
>>> demo = Demo()
===============================
User API, Adapters and Security
===============================
The API discussed so far makes few concessions to some of the common use cases
for locking. Here are some particular needs as yet unfulfilled by the
discussion so far.
- It should be possible to allow and deny per object whether users may
create and register tokens for the object.
- It should often be easier to register an endable token than a permanent
token.
- All users should be able to unlock or modify some aspects of their own
tokens, or remove their own participation in shared tokens; but it should be
possible to restrict access to ending tokens that users do not own (often
called "breaking locks").
In the context of the Zope 3 security model, the first two needs are intended
to be addressed by the ITokenBroker interface, and associated adapter; the last
need is intended to be addressed by the ITokenHandler, and associated
adapters.
TokenBrokers
============
Token brokers adapt an object, which is the object whose tokens are
brokered, and uses this object as a security context. They provide a few
useful methods: `lock`, `lockShared`, `freeze`, and `get`. The TokenBroker
expects to be a trusted adapter.
lock
----
The lock method creates and registers an exclusive lock. Without arguments,
it tries to create it for the user in the current interaction.
This won't work without an interaction, of course. Notice that we start the
example by registering the utility. We would normally be required to put the
utility in a site package, so that it would be persistent, but for this
demonstration we are simplifying the registration.
>>> component.provideUtility(util, provides=interfaces.ITokenUtility)
>>> import zope.interface.interfaces
>>> @interface.implementer(zope.interface.interfaces.IComponentLookup)
... @component.adapter(interface.Interface)
... def siteManager(obj):
... return component.getGlobalSiteManager()
...
>>> component.provideAdapter(siteManager)
>>> from zope.locking import adapters
>>> component.provideAdapter(adapters.TokenBroker)
>>> broker = interfaces.ITokenBroker(demo)
>>> broker.lock()
Traceback (most recent call last):
...
ValueError
>>> broker.lock('joe')
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError
If we set up an interaction with one participation, the lock will have a
better chance.
>>> import zope.security.interfaces
>>> @interface.implementer(zope.security.interfaces.IPrincipal)
... class DemoPrincipal(object):
... def __init__(self, id, title=None, description=None):
... self.id = id
... self.title = title
... self.description = description
...
>>> joe = DemoPrincipal('joe')
>>> import zope.security.management
>>> @interface.implementer(zope.security.interfaces.IParticipation)
... class DemoParticipation(object):
... def __init__(self, principal):
... self.principal = principal
... self.interaction = None
...
>>> zope.security.management.endInteraction()
>>> zope.security.management.newInteraction(DemoParticipation(joe))
>>> token = broker.lock()
>>> interfaces.IExclusiveLock.providedBy(token)
True
>>> token.context is demo
True
>>> token.__parent__ is demo
True
>>> sorted(token.principal_ids)
['joe']
>>> token.started is not None
True
>>> util.get(demo) is token
True
>>> token.end()
You can only specify principals that are in the current interaction.
>>> token = broker.lock('joe')
>>> sorted(token.principal_ids)
['joe']
>>> token.end()
>>> broker.lock('mary')
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError
The method can take a duration.
>>> token = broker.lock(duration=two)
>>> token.duration == two
True
>>> token.end()
If the interaction has more than one principal, a principal (in the
interaction) must be specified.
>>> mary = DemoPrincipal('mary')
>>> participation = DemoParticipation(mary)
>>> zope.security.management.getInteraction().add(participation)
>>> broker.lock()
Traceback (most recent call last):
...
ValueError
>>> broker.lock('susan')
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError
>>> token = broker.lock('joe')
>>> sorted(token.principal_ids)
['joe']
>>> token.end()
>>> token = broker.lock('mary')
>>> sorted(token.principal_ids)
['mary']
>>> token.end()
>>> zope.security.management.endInteraction()
lockShared
----------
The `lockShared` method has similar characteristics, except that it can handle
multiple principals.
Without an interaction, principals are either not found, or not part of the
interaction:
>>> broker.lockShared()
Traceback (most recent call last):
...
ValueError
>>> broker.lockShared(('joe',))
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError
With an interaction, the principals get the lock by default.
>>> zope.security.management.newInteraction(DemoParticipation(joe))
>>> token = broker.lockShared()
>>> interfaces.ISharedLock.providedBy(token)
True
>>> token.context is demo
True
>>> token.__parent__ is demo
True
>>> sorted(token.principal_ids)
['joe']
>>> token.started is not None
True
>>> util.get(demo) is token
True
>>> token.end()
You can only specify principals that are in the current interaction.
>>> token = broker.lockShared(('joe',))
>>> sorted(token.principal_ids)
['joe']
>>> token.end()
>>> broker.lockShared(('mary',))
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError
The method can take a duration.
>>> token = broker.lockShared(duration=two)
>>> token.duration == two
True
>>> token.end()
If the interaction has more than one principal, all are included, unless some
are singled out.
>>> participation = DemoParticipation(mary)
>>> zope.security.management.getInteraction().add(participation)
>>> token = broker.lockShared()
>>> sorted(token.principal_ids)
['joe', 'mary']
>>> token.end()
>>> token = broker.lockShared(('joe',))
>>> sorted(token.principal_ids)
['joe']
>>> token.end()
>>> token = broker.lockShared(('mary',))
>>> sorted(token.principal_ids)
['mary']
>>> token.end()
>>> zope.security.management.endInteraction()
freeze
------
The `freeze` method allows users to create an endable freeze. It has no
requirements on the interaction. It should be protected carefully, from a
security perspective.
>>> token = broker.freeze()
>>> interfaces.IEndableFreeze.providedBy(token)
True
>>> token.context is demo
True
>>> token.__parent__ is demo
True
>>> sorted(token.principal_ids)
[]
>>> token.started is not None
True
>>> util.get(demo) is token
True
>>> token.end()
The method can take a duration.
>>> token = broker.freeze(duration=two)
>>> token.duration == two
True
>>> token.end()
get
---
The `get` method is exactly equivalent to the token utility's get method:
it returns the current active token for the object, or None. It is useful
for protected code, since utilities typically do not get security assertions,
and this method can get its security assertions from the object, which is
often the right place.
Again, the TokenBroker does embody some policy; if it is not good policy for
your application, build your own interfaces and adapters that do.
TokenHandlers
=============
TokenHandlers are useful for endable tokens with one or more principals--that
is, locks, but not freezes. They are intended to be protected with a lower
external security permission then the usual token methods and attributes, and
then impose their own checks on the basis of the current interaction. They are
very much policy, and other approaches may be useful. They are intended to be
registered as trusted adapters.
For exclusive locks and shared locks, then, we have token handlers.
Generally, token handlers give access to all of the same capabilities as their
corresponding tokens, with the following additional constraints and
capabilities:
- `expiration`, `duration`, and `remaining_duration` all may be set only if
all the principals in the current interaction are owners of the wrapped
token; and
- `release` removes some or all of the principals in the interaction if all
the principals in the current interaction are owners of the wrapped token.
Note that `end` is unaffected: this is effectively "break lock", while
`release` is effectively "unlock". Permissions should be set accordingly.
Shared lock handlers have two additional methods that are discussed in their
section.
ExclusiveLockHandlers
---------------------
Given the general constraints described above, exclusive lock handlers will
generally only allow access to their special capabilities if the operation
is in an interaction with only the lock owner.
>>> zope.security.management.newInteraction(DemoParticipation(joe))
>>> component.provideAdapter(adapters.ExclusiveLockHandler)
>>> lock = broker.lock()
>>> handler = interfaces.IExclusiveLockHandler(lock)
>>> verifyObject(interfaces.IExclusiveLockHandler, handler)
True
>>> handler.__parent__ is lock
True
>>> handler.expiration is None
True
>>> handler.duration = two
>>> lock.duration == two
True
>>> handler.expiration = handler.started + three
>>> lock.expiration == handler.started + three
True
>>> handler.remaining_duration = two
>>> lock.remaining_duration <= two
True
>>> handler.release()
>>> handler.ended >= handler.started
True
>>> lock.ended >= lock.started
True
>>> lock = util.register(tokens.ExclusiveLock(demo, 'mary'))
>>> handler = interfaces.ITokenHandler(lock) # for joe's interaction still
>>> handler.duration = two # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError: ...
>>> handler.expiration = handler.started + three # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError: ...
>>> handler.remaining_duration = two # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError: ...
>>> handler.release() # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError: ...
>>> lock.end()
SharedLockHandlers
------------------
Shared lock handlers let anyone who is an owner of a token set the expiration,
duration, and remaining_duration values. This is a 'get out of the way' policy
that relies on social interactions to make sure all the participants are
represented as they want. Other policies could be written in other adapters.
>>> component.provideAdapter(adapters.SharedLockHandler)
>>> lock = util.register(tokens.SharedLock(demo, ('joe', 'mary')))
>>> handler = interfaces.ITokenHandler(lock) # for joe's interaction still
>>> verifyObject(interfaces.ISharedLockHandler, handler)
True
>>> handler.__parent__ is lock
True
>>> handler.expiration is None
True
>>> handler.duration = two
>>> lock.duration == two
True
>>> handler.expiration = handler.started + three
>>> lock.expiration == handler.started + three
True
>>> handler.remaining_duration = two
>>> lock.remaining_duration <= two
True
>>> sorted(handler.principal_ids)
['joe', 'mary']
>>> handler.release()
>>> sorted(handler.principal_ids)
['mary']
>>> handler.duration = two # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError: ...
>>> handler.expiration = handler.started + three # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError: ...
>>> handler.remaining_duration = two # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError: ...
>>> handler.release() # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError: ...
The shared lock handler adds two additional methods to a standard handler:
`join` and `add`. They do similar jobs, but are separate to allow separate
security settings for each. The `join` method lets some or all of the
principals in the current interaction join.
>>> handler.join()
>>> sorted(handler.principal_ids)
['joe', 'mary']
>>> handler.join(('susan',))
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError
The `add` method lets any principal ids be added to the lock, but all
principals in the current interaction must be a part of the lock.
>>> handler.add(('susan',))
>>> sorted(handler.principal_ids)
['joe', 'mary', 'susan']
>>> handler.release()
>>> handler.add('jake') # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.ParticipationError: ...
>>> lock.end()
>>> zope.security.management.endInteraction()
Warnings
========
* The token utility will register a token for an object if it can. It does not
check to see if it is actually the local token utility for the given object.
This should be arranged by clients of the token utility, and verified
externally if desired.
* Tokens are stored as keys in BTrees, and therefore must be orderable
(i.e., they must implement __cmp__).
Intended Security Configuration
===============================
Utilities are typically unprotected in Zope 3--or more accurately, have
no security assertions and are used with no security proxy--and the token
utility expects to be so. As such, the broker and handler objects are
expected to be the objects used by view code, and so associated with security
proxies. All should have appropriate __parent__ attribute values. The
ability to mutate the tokens--`end`, `add` and `remove` methods, for
instance--should be protected with an administrator-type permission such as
'zope.Security'. Setting the timeout properties on the token should be
protected in the same way. Setting the handlers attributes can have a less
restrictive setting, since they calculate security themselves on the basis of
lock membership.
On the adapter, the `end` method should be protected with the same or
similar permission. Calling methods such as lock and lockShared should be
protected with something like 'zope.ManageContent'. Getting attributes should
be 'zope.View' or 'zope.Public', and unlocking and setting the timeouts, since
they are already protected to make sure the principal is a member of the lock,
can probably be 'zope.Public'.
These settings can be abused relatively easily to create an insecure
system--for instance, if a user can get an adapter to IPrincipalLockable for
another principal--but are a reasonable start.
>>> broker.__parent__ is demo
True
>>> handler.__parent__ is lock
True
Random Thoughts
===============
As a side effect of the design, it is conceivable that multiple lock utilities
could be in use at once, governing different aspects of an object; however,
this may never itself be of use.
| zope.locking | /zope.locking-2.1.0.tar.gz/zope.locking-2.1.0/src/zope/locking/README.rst | README.rst |
from zope import interface, schema
from zope.interface.interfaces import IObjectEvent, ObjectEvent
##############################################################################
# Token utility
##############################################################################
class ITokenUtility(interface.Interface):
"""Responsible for initializing, registering, and finding all active tokens
"""
def get(obj, default=None):
"""For obj, return active IToken or default.
Token must be active (not ended), or else return default.
"""
def iterForPrincipalId(principal_id):
"""Return an iterable of all active tokens held by the principal id.
"""
def __iter__():
"""Return iterable of active tokens managed by utility.
"""
def register(token):
"""register an IToken, or a change to a previously-registered token.
If the token has not yet been assigned a `utility` value, sets the
`utility` attribute of the token to self, to mark registration.
Raises ValueError if token has been registered to another utility.
If lock has never been registered before, fires TokenStartedEvent.
"""
##############################################################################
# General (abstract) token interfaces
##############################################################################
class IAbstractToken(interface.Interface):
"""A token. Must be registered with token utility to start.
This is the core token interface. This core interface is mostly readonly.
It is used as a base by both tokens and token handlers.
"""
__parent__ = interface.Attribute(
"""the security context for the token.""")
context = interface.Attribute(
"""the actual locked object. readonly.""")
utility = interface.Attribute(
"""The lock utility in charge of this lock.
Should *only* ever be set once by ILockUtility.register method.
When the utility sets this attribute, the `start` attribute should
be set and the token should be considered active (potentially; see
IEndable).""")
principal_ids = interface.Attribute(
"""An immutable iterable of the principal ids that own the lock;
or None if the object is not locked. If object is frozen, returns
an iterable with no members. Readonly.""")
started = schema.Datetime(
description=(u"""the date and time, with utc timezone, that the token
was registered with the token utility and became effective. Required
after the token has been registered."""),
required=False, readonly=True)
class IEndable(interface.Interface):
"""A mixin for tokens that may be ended explicitly or timed out.
Some tokens are endable; locks, for instance, are endable. Freezes may be
permanent, so some are not IEndable.
"""
ended = schema.Datetime(
description=(u"""the date and time, with utc timezone, that the token
ended, explicitly or from expiration."""),
required=False, readonly=True)
expiration = schema.Datetime(
description=(
u"""the expiration time, with utc timezone.
None indicates no expiration.
Readonly (but see extending interfaces).
"""),
required=False)
duration = schema.Timedelta(
description=(
u"""the duration of the token timeout from its start.
None indicates no expiration.
Readonly (but see extending interfaces).
"""),
required=False)
remaining_duration = schema.Timedelta(
description=(
u"""the remaining effective duration for the token from "now".
None indicates no expiration. If the token has ended, return
a datetime.timedelta of no time.
Readonly (but see extending interfaces).
"""),
required=False)
def end():
"""explicitly expire the token.
fires TokenEndedEvent if successful, or raises EndedError
if the token has already ended."""
##############################################################################
# Token interfaces: registered by token utility
##############################################################################
# Abstract token interfaces
class IToken(IAbstractToken):
"""a token that actually stores data.
This is the sort of token that should be used in the token utility."""
__parent__ = interface.Attribute(
"""the locked object. readonly. Important for security.""")
annotations = interface.Attribute(
"""Stores arbitrary application data under package-unique keys.
By "package-unique keys", we mean keys that are are unique by
virtue of including the dotted name of a package as a prefix. A
package name is used to limit the authority for picking names for
a package to the people using that package.
""")
class IEndableToken(IToken, IEndable):
"""A standard endable token."""
expiration = schema.Datetime(
description=(
u"""the expiration time, with utc timezone.
None indicates no expiration.
When setting, if token has ended then raise EndedError.
Otherwise call utility.register, fire ExpirationChangedEvent.
"""),
required=False)
duration = schema.Timedelta(
description=(
u"""the duration of the token timeout from its start.
None indicates no expiration.
When setting, if token has ended then raise EndedError.
Otherwise call utility.register, fire ExpirationChangedEvent.
"""),
required=False)
remaining_duration = schema.Timedelta(
description=(
u"""the remaining effective duration for the token from "now".
None indicates no expiration. If the token has ended, return
a datetime.timedelta of no time.
When setting, if token has ended then raise EndedError.
Otherwise call utility.register, fire ExpirationChangedEvent.
"""),
required=False)
# Concrete token interfaces
class IExclusiveLock(IEndableToken):
"""a lock held to one and only one principal.
principal_ids must always have one and only one member."""
class ISharedLock(IEndableToken):
"a lock held by one or more principals"
def add(principal_ids):
"""Share this lock with principal_ids.
Adding principals that already are part of the lock can be ignored.
If ended, raise EndedError.
"""
def remove(principal_ids):
"""Remove principal_ids from lock.
Removing all principals removes the lock: there may not be an effective
shared lock shared to no one.
Removing principals that are not part of the lock can be ignored.
If ended, raise EndedError."""
class IFreeze(IToken):
"""principal_ids must always be empty.
May not be ended."""
class IEndableFreeze(IFreeze, IEndableToken):
"""May be ended."""
##############################################################################
# Token broker interface
##############################################################################
class ITokenBroker(interface.Interface):
"""for one object, create standard endable tokens and get active ITokens.
Convenient adapter model for security: broker is in context of affected
object, so security settings for the object can be obtained automatically.
"""
context = interface.Attribute(
'The object whose tokens are brokered. readonly.')
__parent__ = interface.Attribute(
"""the context. readonly. Important for security.""")
def lock(principal_id=None, duration=None):
"""lock context, and return token.
if principal_id is None, use interaction's principal; if interaction
does not have one and only one principal, raise ValueError.
if principal_id is not None, principal_id must be in interaction,
or else raise ParticipationError.
Same constraints as token utility's register method.
"""
def lockShared(principal_ids=None, duration=None):
"""lock context with a shared lock, and return token.
if principal_ids is None, use interaction's principals; if interaction
does not have any principals, raise ValueError.
if principal_ids is not None, principal_ids must be in interaction,
or else raise ParticipationError. Must be at least one id.
Same constraints as token utility's register method.
"""
def freeze(duration=None):
"""freeze context with an endable freeze, and return token.
"""
def get():
"""Get context's active IToken, or None.
"""
##############################################################################
# Token handler interfaces
##############################################################################
# Abstract token handler interfaces.
class ITokenHandler(IAbstractToken, IEndable):
"""give appropriate increased access in a security system.
Appropriate for endable tokens with one or more principals (for instance,
neither freezes nor endable freezes."""
__parent__ = interface.Attribute(
"""the actual token. readonly. Important for security.""")
token = interface.Attribute(
"""the registered IToken that this adapter uses for actual
data storage""")
expiration = schema.Datetime(
description=(
u"""the expiration time, with utc timezone.
None indicates no expiration.
When setting, if token has ended then raise EndedError.
If all of the principals in the current interaction are not owners
of the current token (in principal_ids), raise ParticipationError.
Otherwise call utility.register, fire ExpirationChangedEvent.
"""),
required=False)
duration = schema.Timedelta(
description=(
u"""the duration of the token timeout from its start.
None indicates no expiration.
When setting, if token has ended then raise EndedError.
If all of the principals in the current interaction are not owners
of the current token (in principal_ids), raise ParticipationError.
Otherwise call utility.register, fire ExpirationChangedEvent.
"""),
required=False)
remaining_duration = schema.Timedelta(
description=(
u"""the remaining effective duration for the token from "now".
None indicates no expiration. If the token has ended, return
a datetime.timedelta of no time.
When setting, if token has ended then raise EndedError.
If all of the principals in the current interaction are not owners
of the current token (in principal_ids), raise ParticipationError.
Otherwise call utility.register, fire ExpirationChangedEvent.
"""),
required=False)
def release(principal_ids=None): # may only remove ids in interaction.
"""remove the given principal_ids from the token, or all in interaction.
All explicitly given principal_ids must be in interaction. Silently
ignores requests to remove principals who are not currently part of
token.
Ends the lock if the removed principals were the only principals.
Raises EndedError if lock has already ended.
"""
# Concrete principal token interfaces.
class IExclusiveLockHandler(ITokenHandler):
"""an exclusive lock"""
class ISharedLockHandler(ITokenHandler):
"""a shared lock"""
def join(principal_ids=None):
"""add the given principal_ids to the token, or all in interaction.
All explicitly given principal_ids must be in interaction. Silently
ignores requests to add principal_ids that are already part of the
token.
Raises EndedError if lock has already ended.
"""
def add(principal_ids):
"""Share current shared lock with principal_ids.
If all of the principals in the current interaction are not owners
of the current token (in principal_ids), raise ParticipationError."""
##############################################################################
# Events
##############################################################################
# event interfaces
class ITokenEvent(IObjectEvent):
"""a token event"""
class ITokenStartedEvent(ITokenEvent):
"""An token has started"""
class ITokenEndedEvent(ITokenEvent):
"""A token has been explicitly ended.
Note that this is not fired when a lock expires."""
class IPrincipalsChangedEvent(ITokenEvent):
"""Principals have changed for a token"""
old = interface.Attribute('a frozenset of the old principals')
class IExpirationChangedEvent(ITokenEvent):
"""Expiration value changed for a token"""
old = interface.Attribute('the old expiration value')
# events
@interface.implementer(ITokenStartedEvent)
class TokenStartedEvent(ObjectEvent):
pass
@interface.implementer(ITokenEndedEvent)
class TokenEndedEvent(ObjectEvent):
pass
@interface.implementer(IPrincipalsChangedEvent)
class PrincipalsChangedEvent(ObjectEvent):
def __init__(self, object, old):
super(PrincipalsChangedEvent, self).__init__(object)
self.old = frozenset(old)
@interface.implementer(IExpirationChangedEvent)
class ExpirationChangedEvent(ObjectEvent):
def __init__(self, object, old):
super(ExpirationChangedEvent, self).__init__(object)
self.old = old
##############################################################################
# Exceptions
##############################################################################
class TokenRuntimeError(RuntimeError):
"""A general runtime error in the token code."""
class EndedError(TokenRuntimeError):
"""The token has ended"""
class UnregisteredError(TokenRuntimeError):
"""The token has not yet been registered"""
class ParticipationError(TokenRuntimeError):
"""Some or all of the principals in the current interaction do not
participate in the token"""
class RegistrationError(TokenRuntimeError):
"""The token may not be registered""" | zope.locking | /zope.locking-2.1.0.tar.gz/zope.locking-2.1.0/src/zope/locking/interfaces.py | interfaces.py |
import zope.security.management
from zope import interface, component
from zope.locking import interfaces, tokens
@component.adapter(interface.Interface)
@interface.implementer(interfaces.ITokenBroker)
class TokenBroker(object):
def __init__(self, context):
self.context = self.__parent__ = context
self.utility = component.getUtility(
interfaces.ITokenUtility, context=context)
# for subclasses to call, to avoid duplicating code
def _getLockPrincipalId(self, principal_id):
interaction_principals = getInteractionPrincipals()
if principal_id is None:
if (interaction_principals is None
or len(interaction_principals) != 1):
raise ValueError
principal_id = next(iter(interaction_principals))
elif (interaction_principals is None or
principal_id not in interaction_principals):
raise interfaces.ParticipationError
return principal_id
def lock(self, principal_id=None, duration=None):
principal_id = self._getLockPrincipalId(principal_id)
return self.utility.register(
tokens.ExclusiveLock(self.context, principal_id, duration))
# for subclasses to call, to avoid duplicating code
def _getSharedLockPrincipalIds(self, principal_ids):
interaction_principals = getInteractionPrincipals()
if principal_ids is None:
if (interaction_principals is None
or len(interaction_principals) < 1):
raise ValueError
principal_ids = interaction_principals
elif (interaction_principals is None or
set(principal_ids).difference(interaction_principals)):
raise interfaces.ParticipationError
return principal_ids
def lockShared(self, principal_ids=None, duration=None):
principal_ids = self._getSharedLockPrincipalIds(principal_ids)
return self.utility.register(
tokens.SharedLock(self.context, principal_ids, duration))
def freeze(self, duration=None):
return self.utility.register(
tokens.EndableFreeze(self.context, duration))
def get(self):
return self.utility.get(self.context)
def getInteractionPrincipals():
interaction = zope.security.management.queryInteraction()
if interaction is not None:
return set(p.principal.id for p in interaction.participations)
class TokenHandler(object):
def __init__(self, token):
self.__parent__ = self.token = token
def __getattr__(self, name):
return getattr(self.token, name)
def _checkInteraction(self):
if self.token.ended is not None:
raise interfaces.ExpirationChangedEvent
interaction_principals = getInteractionPrincipals()
token_principals = frozenset(self.token.principal_ids)
if interaction_principals is not None:
omitted = interaction_principals.difference(token_principals)
if omitted:
raise interfaces.ParticipationError(omitted)
return interaction_principals, token_principals
def _getPrincipalIds(self, principal_ids):
interaction_principals, token_principals = self._checkInteraction()
if principal_ids is None:
principal_ids = interaction_principals or ()
else:
for p in principal_ids:
if p not in interaction_principals:
raise ValueError(p)
return principal_ids, interaction_principals, token_principals
@property
def expiration(self):
return self.token.expiration
@expiration.setter
def expiration(self, value):
self._checkInteraction()
self.token.expiration = value
@property
def duration(self):
return self.token.duration
@duration.setter
def duration(self, value):
self._checkInteraction()
self.token.duration = value
@property
def remaining_duration(self):
return self.token.remaining_duration
@remaining_duration.setter
def remaining_duration(self, value):
self._checkInteraction()
self.token.remaining_duration = value
def release(self, principal_ids=None):
raise NotImplementedError
@component.adapter(interfaces.IExclusiveLock)
@interface.implementer(interfaces.IExclusiveLockHandler)
class ExclusiveLockHandler(TokenHandler):
def release(self, principal_ids=None):
pids, interaction_pids, token_pids = self._getPrincipalIds(
principal_ids)
remaining = token_pids.difference(pids)
if not remaining:
self.token.end()
@component.adapter(interfaces.ISharedLock)
@interface.implementer(interfaces.ISharedLockHandler)
class SharedLockHandler(TokenHandler):
def release(self, principal_ids=None):
pids, interaction_pids, token_pids = self._getPrincipalIds(
principal_ids)
self.token.remove(pids)
def join(self, principal_ids=None):
interaction_principals = getInteractionPrincipals()
if principal_ids is None:
if interaction_principals is None:
raise ValueError
principal_ids = interaction_principals
elif set(principal_ids).difference(interaction_principals):
raise interfaces.ParticipationError
self.token.add(principal_ids)
def add(self, principal_ids):
self._checkInteraction()
self.token.add(principal_ids) | zope.locking | /zope.locking-2.1.0.tar.gz/zope.locking-2.1.0/src/zope/locking/adapters.py | adapters.py |
This file is for annoying tests that were not appropriate for the README but
should be included for completeness.
This is some setup that the tests need.
>>> from zope.locking import utility, interfaces, tokens
>>> util = utility.TokenUtility()
>>> from zope.interface.verify import verifyObject
>>> verifyObject(interfaces.ITokenUtility, util)
True
>>> from zope import interface, component
>>> conn = get_connection()
>>> conn.add(util)
>>> import datetime
>>> import pytz
>>> before_creation = datetime.datetime.now(pytz.utc)
>>> from zope.locking.testing import Demo
>>> demo = Demo()
----------------------------------
Timed Expirations for Shared Locks
----------------------------------
Timed expirations work the same as with exclusive locks.
>>> one = datetime.timedelta(hours=1)
>>> two = datetime.timedelta(hours=2)
>>> three = datetime.timedelta(hours=3)
>>> four = datetime.timedelta(hours=4)
>>> lock = util.register(
... tokens.SharedLock(demo, ('john', 'mary'), duration=three))
>>> lock.duration
datetime.timedelta(seconds=10800)
>>> three >= lock.remaining_duration >= two
True
>>> lock.expiration == lock.started + lock.duration
True
>>> ((before_creation + three) <=
... (lock.expiration) <=
... (before_creation + four))
True
>>> lock.ended is None
True
>>> util.get(demo) is lock
True
>>> list(util.iterForPrincipalId('john')) == [lock]
True
>>> list(util.iterForPrincipalId('mary')) == [lock]
True
>>> list(util) == [lock]
True
Again, expirations can be changed while a lock is still active, using any of
the `expiration`, `remaining_duration` or `duration` attributes. All changes
fire events. First we'll change the expiration attribute.
>>> lock.expiration = lock.started + one
>>> lock.expiration == lock.started + one
True
>>> lock.duration == one
True
>>> from zope.component.eventtesting import events
>>> ev = events[-1]
>>> verifyObject(interfaces.IExpirationChangedEvent, ev)
True
>>> ev.object is lock
True
>>> ev.old == lock.started + three
True
Next we'll change the duration attribute.
>>> lock.duration = four
>>> lock.duration
datetime.timedelta(seconds=14400)
>>> four >= lock.remaining_duration >= three
True
>>> ev = events[-1]
>>> verifyObject(interfaces.IExpirationChangedEvent, ev)
True
>>> ev.object is lock
True
>>> ev.old == lock.started + one
True
Now we'll hack our code to make it think that it is two hours later, and then
check and modify the remaining_duration attribute.
>>> def hackNow():
... return (datetime.datetime.now(pytz.utc) +
... datetime.timedelta(hours=2))
...
>>> import zope.locking.utils
>>> oldNow = zope.locking.utils.now
>>> zope.locking.utils.now = hackNow # make code think it's 2 hours later
>>> lock.duration
datetime.timedelta(seconds=14400)
>>> two >= lock.remaining_duration >= one
True
>>> lock.remaining_duration -= datetime.timedelta(hours=1)
>>> one >= lock.remaining_duration >= datetime.timedelta()
True
>>> three + datetime.timedelta(minutes=1) >= lock.duration >= three
True
>>> ev = events[-1]
>>> verifyObject(interfaces.IExpirationChangedEvent, ev)
True
>>> ev.object is lock
True
>>> ev.old == lock.started + four
True
Now, we'll hack our code to make it think that it's a day later. It is very
important to remember that a lock ending with a timeout ends silently--that
is, no event is fired.
>>> def hackNow():
... return (
... datetime.datetime.now(pytz.utc) + datetime.timedelta(days=1))
...
>>> zope.locking.utils.now = hackNow # make code think it is a day later
>>> lock.ended >= lock.started
True
>>> util.get(demo) is None
True
>>> lock.remaining_duration == datetime.timedelta()
True
>>> list(util.iterForPrincipalId('john')) == []
True
>>> list(util.iterForPrincipalId('mary')) == []
True
>>> list(util) == []
True
>>> lock.end()
Traceback (most recent call last):
...
zope.locking.interfaces.EndedError
Once a lock has ended, the timeout can no longer be changed.
>>> lock.duration = datetime.timedelta(days=2)
Traceback (most recent call last):
...
zope.locking.interfaces.EndedError
We'll undo the hacks, and also end the lock (that is no longer ended once
the hack is finished).
>>> zope.locking.utils.now = oldNow # undo the hack
>>> lock.end()
--------------
EndableFreezes
--------------
An endable freeze token is similar to a lock token except that it grants the
'lock' to no one.
>>> token = util.register(tokens.EndableFreeze(demo))
>>> ev = events[-1]
>>> verifyObject(interfaces.ITokenStartedEvent, ev)
True
>>> ev.object is token
True
>>> sorted(token.principal_ids)
[]
Freezes are otherwise identical to exclusive locks.
The returned token implements IEndableFreeze and provides the same
capabilities as IExclusiveLock.
>>> verifyObject(interfaces.IEndableFreeze, token)
True
>>> token.context is demo
True
>>> token.__parent__ is demo # important for security
True
>>> token.utility is util
True
>>> token.ended is None
True
>>> before_creation <= token.started <= datetime.datetime.now(pytz.utc)
True
>>> token.expiration is None
True
>>> token.duration is None
True
>>> token.remaining_duration is None
True
>>> token.end()
>>> token.ended >= token.started
True
>>> util.get(demo) is None
True
Once a token is created, the token utility knows about it. Notice that an
EndableFreeze will never be a part of an iterable of tokens by principal: by
definition, a freeze is associated with no principals.
>>> token = util.register(tokens.EndableFreeze(demo))
>>> util.get(demo) is token
True
>>> list(util) == [token]
True
As part of that knowledge, it disallows another lock or freeze on the same
object.
>>> util.register(tokens.ExclusiveLock(demo, 'mary'))
... # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.RegistrationError: ...
>>> util.register(tokens.SharedLock(demo, ('mary', 'jane')))
... # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.RegistrationError: ...
>>> util.register(tokens.EndableFreeze(demo))
... # doctest: +ELLIPSIS
Traceback (most recent call last):
...
zope.locking.interfaces.RegistrationError: ...
>>> token.end()
>>> util.get(demo) is None
True
The other way of ending a token is with an expiration datetime. As we'll see,
one of the most important caveats about working with timeouts is that a token
that expires because of a timeout does not fire any expiration event. It
simply starts answering `True` for the `ended` attribute.
>>> one = datetime.timedelta(hours=1)
>>> two = datetime.timedelta(hours=2)
>>> three = datetime.timedelta(hours=3)
>>> four = datetime.timedelta(hours=4)
>>> token = util.register(tokens.EndableFreeze(demo, three))
>>> token.duration
datetime.timedelta(seconds=10800)
>>> three >= token.remaining_duration >= two
True
>>> token.ended is None
True
>>> util.get(demo) is token
True
>>> list(util) == [token]
True
The expiration time of a token is always the creation date plus the timeout.
>>> token.expiration == token.started + token.duration
True
>>> ((before_creation + three) <=
... (token.expiration) <= # this value is the expiration date
... (before_creation + four))
True
Expirations can be changed while a token is still active, using any of
the `expiration`, `remaining_duration` or `duration` attributes. All changes
fire events. First we'll change the expiration attribute.
>>> token.expiration = token.started + one
>>> token.expiration == token.started + one
True
>>> token.duration == one
True
>>> ev = events[-1]
>>> verifyObject(interfaces.IExpirationChangedEvent, ev)
True
>>> ev.object is token
True
>>> ev.old == token.started + three
True
Next we'll change the duration attribute.
>>> token.duration = four
>>> token.duration
datetime.timedelta(seconds=14400)
>>> four >= token.remaining_duration >= three
True
>>> ev = events[-1]
>>> verifyObject(interfaces.IExpirationChangedEvent, ev)
True
>>> ev.object is token
True
>>> ev.old == token.started + one
True
Now we'll hack our code to make it think that it is two hours later, and then
check and modify the remaining_duration attribute.
>>> def hackNow():
... return (datetime.datetime.now(pytz.utc) +
... datetime.timedelta(hours=2))
...
>>> import zope.locking.utils
>>> oldNow = zope.locking.utils.now
>>> zope.locking.utils.now = hackNow # make code think it's 2 hours later
>>> token.duration
datetime.timedelta(seconds=14400)
>>> two >= token.remaining_duration >= one
True
>>> token.remaining_duration -= one
>>> one >= token.remaining_duration >= datetime.timedelta()
True
>>> three + datetime.timedelta(minutes=1) >= token.duration >= three
True
>>> ev = events[-1]
>>> verifyObject(interfaces.IExpirationChangedEvent, ev)
True
>>> ev.object is token
True
>>> ev.old == token.started + four
True
Now, we'll hack our code to make it think that it's a day later. It is very
important to remember that a token ending with a timeout ends silently--that
is, no event is fired.
>>> def hackNow():
... return (
... datetime.datetime.now(pytz.utc) + datetime.timedelta(days=1))
...
>>> zope.locking.utils.now = hackNow # make code think it is a day later
>>> token.ended >= token.started
True
>>> util.get(demo) is None
True
>>> token.remaining_duration == datetime.timedelta()
True
>>> token.end()
Traceback (most recent call last):
...
zope.locking.interfaces.EndedError
Once a token has ended, the timeout can no longer be changed.
>>> token.duration = datetime.timedelta(days=2)
Traceback (most recent call last):
...
zope.locking.interfaces.EndedError
We'll undo the hacks, and also end the token (that is no longer ended once
the hack is finished).
>>> zope.locking.utils.now = oldNow # undo the hack
>>> token.end()
| zope.locking | /zope.locking-2.1.0.tar.gz/zope.locking-2.1.0/src/zope/locking/annoying.rst | annoying.rst |
import BTrees.OOBTree
import zope.generations.interfaces
import zope.interface
import zope.locking.interfaces
import zope.locking.utils
@zope.interface.implementer(
zope.generations.interfaces.IInstallableSchemaManager)
class SchemaManager(object):
minimum_generation = 2
generation = 2
def install(self, context):
# Clean up cruft in any existing token utilities.
# This is done here because zope.locking didn't have a
# schema manager prior to 1.2.
clean_locks(context)
def evolve(self, context, generation):
if generation == 2:
# Going from generation 1 -> 2, we need to run the token
# utility fixer again because of a deficiency it had in 1.2.
clean_locks(context)
schemaManager = SchemaManager()
def get_site_managers(app_root):
def _get_site_managers(sm):
yield sm
for sm in sm.subs:
for _sm in _get_site_managers(sm):
yield _sm
return _get_site_managers(app_root.getSiteManager())
def clean_locks(context):
"""Clean out old locks from token utilities."""
app = context.connection.root().get('Application')
if app is not None:
for util in find_token_utilities(app):
fix_token_utility(util)
def find_token_utilities(app_root):
for sm in get_site_managers(app_root):
for registration in sm.registeredUtilities():
if registration.provided is zope.locking.interfaces.ITokenUtility:
yield registration.component
def fix_token_utility(util):
""" A bug in versions of zope.locking prior to 1.2 could cause
token utilities to keep references to expired/ended locks.
This function cleans up any old locks lingering in a token
utility due to this issue.
"""
for pid in list(util._principal_ids):
# iterForPrincipalId only returns non-ended locks, so we know
# they're still good.
new_tree = BTrees.OOBTree.OOTreeSet(util.iterForPrincipalId(pid))
if new_tree:
util._principal_ids[pid] = new_tree
else:
del util._principal_ids[pid]
now = zope.locking.utils.now()
for dt, tree in list(util._expirations.items()):
if dt > now:
util._expirations[dt] = BTrees.OOBTree.OOTreeSet(tree)
else:
del util._expirations[dt]
for token in tree:
# Okay, we could just adapt token.context to IKeyReference
# here...but we don't want to touch token.context,
# because some wonky objects need a site set before
# they can be unpickled.
for key_ref, (_token, _, _) in list(util._locks.items()):
if token is _token:
del util._locks[key_ref]
break | zope.locking | /zope.locking-2.1.0.tar.gz/zope.locking-2.1.0/src/zope/locking/generations.py | generations.py |
import persistent
import persistent.interfaces
from BTrees.OOBTree import OOBTree, OOTreeSet
from zope import interface, event
from zope.keyreference.interfaces import IKeyReference
from zope.location import Location
from zope.locking import interfaces, utils
@interface.implementer(interfaces.ITokenUtility)
class TokenUtility(persistent.Persistent, Location):
def __init__(self):
self._locks = OOBTree()
self._expirations = OOBTree()
self._principal_ids = OOBTree()
def _del(self, tree, token, value):
"""remove a token for a value within either of the two index trees"""
reg = tree[value]
reg.remove(token)
if not reg:
del tree[value]
def _add(self, tree, token, value):
"""add a token for a value within either of the two index trees"""
reg = tree.get(value)
if reg is None:
reg = tree[value] = OOTreeSet()
reg.insert(token)
def _cleanup(self):
"clean out expired keys"
expiredkeys = []
for k in self._expirations.keys(max=utils.now()):
for token in self._expirations[k]:
assert token.ended
for p in token.principal_ids:
self._del(self._principal_ids, token, p)
key_ref = IKeyReference(token.context)
del self._locks[key_ref]
expiredkeys.append(k)
for k in expiredkeys:
del self._expirations[k]
def register(self, token):
assert interfaces.IToken.providedBy(token)
if token.utility is None:
token.utility = self
elif token.utility is not self:
raise ValueError('Lock is already registered with another utility')
if persistent.interfaces.IPersistent.providedBy(token):
self._p_jar.add(token)
key_ref = IKeyReference(token.context)
current = self._locks.get(key_ref)
if current is not None:
current, principal_ids, expiration = current
current_endable = interfaces.IEndable.providedBy(current)
if current is not token:
if current_endable and not current.ended:
raise interfaces.RegistrationError(token)
# expired token: clean up indexes and fall through
if current_endable and expiration is not None:
self._del(self._expirations, current, expiration)
for p in principal_ids:
self._del(self._principal_ids, current, p)
else:
# current is token; reindex and return
if current_endable and token.ended:
if expiration is not None:
self._del(self._expirations, token, expiration)
for p in principal_ids:
self._del(self._principal_ids, token, p)
del self._locks[key_ref]
else:
if current_endable and token.expiration != expiration:
# reindex timeout
if expiration is not None:
self._del(self._expirations, token, expiration)
if token.expiration is not None:
self._add(
self._expirations, token, token.expiration)
orig = frozenset(principal_ids)
new = frozenset(token.principal_ids)
removed = orig.difference(new)
added = new.difference(orig)
for p in removed:
self._del(self._principal_ids, token, p)
for p in added:
self._add(self._principal_ids, token, p)
self._locks[key_ref] = (
token,
frozenset(token.principal_ids),
current_endable and token.expiration or None)
self._cleanup()
return token
# expired current token or no current token; this is new
endable = interfaces.IEndable.providedBy(token)
self._locks[key_ref] = (
token,
frozenset(token.principal_ids),
endable and token.expiration or None)
if (endable and
token.expiration is not None):
self._add(self._expirations, token, token.expiration)
for p in token.principal_ids:
self._add(self._principal_ids, token, p)
self._cleanup()
event.notify(interfaces.TokenStartedEvent(token))
return token
def get(self, obj, default=None):
res = self._locks.get(IKeyReference(obj))
if res is not None and (
not interfaces.IEndable.providedBy(res[0])
or not res[0].ended):
return res[0]
return default
def iterForPrincipalId(self, principal_id):
locks = self._principal_ids.get(principal_id, ())
for l in locks:
assert principal_id in frozenset(l.principal_ids)
if not l.ended:
yield l
def __iter__(self):
for l in self._locks.values():
if not interfaces.IEndable.providedBy(l[0]) or not l[0].ended:
yield l[0] | zope.locking | /zope.locking-2.1.0.tar.gz/zope.locking-2.1.0/src/zope/locking/utility.py | utility.py |
import datetime
import functools
import persistent
from BTrees.OOBTree import OOBTree
from zope import interface, event
from zope.locking import interfaces, utils
NO_DURATION = datetime.timedelta()
class AnnotationsMapping(OOBTree):
"""a class on which security settings may be hung."""
@functools.total_ordering
class Token(persistent.Persistent):
def __init__(self, target):
self.context = self.__parent__ = target
self.annotations = AnnotationsMapping()
self.annotations.__parent__ = self # for security.
_principal_ids = frozenset()
@property
def principal_ids(self):
return self._principal_ids
_started = None
@property
def started(self):
if self._utility is None:
raise interfaces.UnregisteredError(self)
return self._started
_utility = None
@property
def utility(self):
return self._utility
@utility.setter
def utility(self, value):
if self._utility is not None:
if value is not self._utility:
raise ValueError('cannot reset utility')
else:
assert interfaces.ITokenUtility.providedBy(value)
self._utility = value
assert self._started is None
self._started = utils.now()
def __eq__(self, other):
return (
(self._p_jar.db().database_name, self._p_oid) ==
(other._p_jar.db().database_name, other._p_oid))
def __lt__(self, other):
return (
(self._p_jar.db().database_name, self._p_oid) <
(other._p_jar.db().database_name, other._p_oid))
class EndableToken(Token):
def __init__(self, target, duration=None):
super(EndableToken, self).__init__(target)
self._duration = duration
@property
def utility(self):
return self._utility
@utility.setter
def utility(self, value):
if self._utility is not None:
if value is not self._utility:
raise ValueError('cannot reset utility')
else:
assert interfaces.ITokenUtility.providedBy(value)
self._utility = value
assert self._started is None
self._started = utils.now()
if self._duration is not None:
self._expiration = self._started + self._duration
del self._duration # to catch bugs.
_expiration = _duration = None
@property
def expiration(self):
if self._started is None:
raise interfaces.UnregisteredError(self)
return self._expiration
@expiration.setter
def expiration(self, value):
if self._started is None:
raise interfaces.UnregisteredError(self)
if self.ended:
raise interfaces.EndedError
if value is not None:
if not isinstance(value, datetime.datetime):
raise ValueError('expiration must be datetime.datetime')
elif value.tzinfo is None:
raise ValueError('expiration must be timezone-aware')
old = self._expiration
self._expiration = value
if old != self._expiration:
self.utility.register(self)
event.notify(interfaces.ExpirationChangedEvent(self, old))
@property
def duration(self):
if self._started is None:
return self._duration
if self._expiration is None:
return None
return self._expiration - self._started
@duration.setter
def duration(self, value):
if self._started is None:
self._duration = value
else:
if self.ended:
raise interfaces.EndedError
old = self._expiration
if value is None:
self._expiration = value
elif not isinstance(value, datetime.timedelta):
raise ValueError('duration must be datetime.timedelta')
else:
if value < NO_DURATION:
raise ValueError('duration may not be negative')
self._expiration = self._started + value
if old != self._expiration:
self.utility.register(self)
event.notify(interfaces.ExpirationChangedEvent(self, old))
@property
def remaining_duration(self):
if self._started is None:
raise interfaces.UnregisteredError(self)
if self.ended is not None:
return NO_DURATION
if self._expiration is None:
return None
return self._expiration - utils.now()
@remaining_duration.setter
def remaining_duration(self, value):
if self._started is None:
raise interfaces.UnregisteredError(self)
if self.ended:
raise interfaces.EndedError
old = self._expiration
if value is None:
self._expiration = value
elif not isinstance(value, datetime.timedelta):
raise ValueError('duration must be datetime.timedelta')
else:
if value < NO_DURATION:
raise ValueError('duration may not be negative')
self._expiration = utils.now() + value
if old != self._expiration:
self.utility.register(self)
event.notify(interfaces.ExpirationChangedEvent(self, old))
_ended = None
@property
def ended(self):
if self._utility is None:
raise interfaces.UnregisteredError(self)
if self._ended is not None:
return self._ended
if (self._expiration is not None and
self._expiration <= utils.now()):
return self._expiration
def end(self):
if self.ended:
raise interfaces.EndedError
self._ended = utils.now()
self.utility.register(self)
event.notify(interfaces.TokenEndedEvent(self))
@interface.implementer(interfaces.IExclusiveLock)
class ExclusiveLock(EndableToken):
def __init__(self, target, principal_id, duration=None):
self._principal_ids = frozenset((principal_id,))
super(ExclusiveLock, self).__init__(target, duration)
@interface.implementer(interfaces.ISharedLock)
class SharedLock(EndableToken):
def __init__(self, target, principal_ids, duration=None):
self._principal_ids = frozenset(principal_ids)
super(SharedLock, self).__init__(target, duration)
def add(self, principal_ids):
if self.ended:
raise interfaces.EndedError
old = self._principal_ids
self._principal_ids = self._principal_ids.union(principal_ids)
if old != self._principal_ids:
self.utility.register(self)
event.notify(interfaces.PrincipalsChangedEvent(self, old))
def remove(self, principal_ids):
if self.ended:
raise interfaces.EndedError
old = self._principal_ids
self._principal_ids = self._principal_ids.difference(principal_ids)
if not self._principal_ids:
self.end()
elif old != self._principal_ids:
self.utility.register(self)
else:
return
# principals changed if you got here
event.notify(interfaces.PrincipalsChangedEvent(self, old))
@interface.implementer(interfaces.IEndableFreeze)
class EndableFreeze(EndableToken):
pass
@interface.implementer(interfaces.IFreeze)
class Freeze(Token):
pass | zope.locking | /zope.locking-2.1.0.tar.gz/zope.locking-2.1.0/src/zope/locking/tokens.py | tokens.py |
This file explores the cleanup mechanisms of the token utility. It looks
at implementation-specific details, rather than interface usage. It will
probably only be of interest to package maintainers, rather than package
users.
The token utility keeps three indexes of the tokens. The primary index,
`_locks`, is a mapping of
<key reference to content object>: (
<token>,
<frozenset of token principal ids>,
<token's expiration (datetime or None)>)
The utility's `get` method uses this data structure, for instance.
Another index, `_principal_ids`, maps <principal id> to <set of <tokens>>.
Its use is the `iterForPrincipalId` methods.
The last index, `_expirations`, maps <token expiration datetimes> to <set of
<tokens>>. Its use is cleaning up expired tokens: every time a new
token is registered, the utility gets rid of expired tokens from all data
structures.
There are three cases in which these data structures need to be updated:
- a new token must be added to the indexes;
- expired tokens should be found and deleted (done at the same time as new
tokens are added currently); and
- a token changes and needs to be reindexed.
Let's run through some examples and check the data structures as we go. We'll
need to start with some setup.
>>> from zope.locking import utility, interfaces, tokens
>>> from zope.keyreference.interfaces import IKeyReference
>>> util = utility.TokenUtility()
>>> conn = get_connection()
>>> conn.add(util)
>>> from zope.interface.verify import verifyObject
>>> verifyObject(interfaces.ITokenUtility, util)
True
>>> import datetime
>>> import pytz
>>> before_creation = datetime.datetime.now(pytz.utc)
>>> from zope.locking.testing import Demo
>>> demo = Demo()
>>> NO_TIME = datetime.timedelta()
>>> ONE_HOUR = datetime.timedelta(hours=1)
>>> TWO_HOURS = datetime.timedelta(hours=2)
>>> THREE_HOURS = datetime.timedelta(hours=3)
>>> FOUR_HOURS = datetime.timedelta(hours=4)
As with other files, we will hack the utils module to make the package think
that time has passed.
>>> offset = NO_TIME
>>> def hackNow():
... return (datetime.datetime.now(pytz.utc) + offset)
...
>>> import zope.locking.utils
>>> oldNow = zope.locking.utils.now
>>> zope.locking.utils.now = hackNow # make code think it's two hours later
Now we simply need to set the `offset` variable to different timedelta values
to make the package think that time has passed.
Initial Token Indexing
----------------------
Let's create a lock.
>>> lock = util.register(
... tokens.SharedLock(demo, ('john', 'mary'), duration=ONE_HOUR))
Now `_locks` has a single entry: keyreference to (token, principals,
expiration).
>>> len(util._locks)
1
>>> key_ref = next(iter(util._locks))
>>> key_ref() is demo
True
>>> token, principal_ids, expiration = util._locks[key_ref]
>>> token is lock
True
>>> sorted(principal_ids)
['john', 'mary']
>>> expiration == lock.expiration
True
Similarly, `_principal_ids` has two entries now: one for each principal, which
hold a set of the current locks.
>>> sorted(util._principal_ids)
['john', 'mary']
>>> list(util._principal_ids['john']) == [lock]
True
>>> list(util._principal_ids['mary']) == [lock]
True
And `_expirations` has a single entry: the one hour duration, mapped to a set
of the one lock.
>>> len(util._expirations)
1
>>> next(iter(util._expirations)) == lock.expiration
True
>>> list(util._expirations[lock.expiration]) == [lock]
True
Token Modification
------------------
If we modify some of the token values, the indexes should be updated
accordingly.
>>> lock.duration=TWO_HOURS
>>> lock.add(('susan',))
>>> lock.remove(('mary', 'john'))
The `_locks` index still has a single entry.
>>> len(util._locks)
1
>>> key_ref = next(iter(util._locks))
>>> key_ref() is demo
True
>>> token, principal_ids, expiration = util._locks[key_ref]
>>> token is lock
True
>>> sorted(principal_ids)
['susan']
>>> expiration == token.started + TWO_HOURS == token.expiration
True
The `_principal_ids` index also has only one entry now, since susan is the
only lock owner.
>>> sorted(util._principal_ids)
['susan']
>>> list(util._principal_ids['susan']) == [lock]
True
And `_expirations` has a single entry: the two hour duration, mapped to a set
of the one lock.
>>> len(util._expirations)
1
>>> next(iter(util._expirations)) == lock.expiration
True
>>> list(util._expirations[lock.expiration]) == [lock]
True
Adding a Freeze
---------------
Let's add a freeze to look at the opposite extreme of indexing: no principals,
and no duration.
>>> frozen = Demo()
>>> freeze = util.register(tokens.EndableFreeze(frozen))
Now `_locks` has two indexed objects.
>>> len(util._locks)
2
>>> token, principals, expiration = util._locks[IKeyReference(frozen)]
>>> token is freeze
True
>>> len(principals)
0
>>> expiration is None
True
The other indexes should not have changed, though.
>>> sorted(util._principal_ids)
['susan']
>>> len(util._expirations)
1
>>> list(util._expirations[lock.expiration]) == [lock]
True
Expiration
----------
Now we'll make the lock expire by pushing the package's effective time two
hours in the future.
>>> offset = TWO_HOURS
The lock should have ended now.
>>> lock.ended == lock.expiration
True
>>> util.get(demo) is None
True
>>> list(iter(util)) == [freeze]
True
>>> list(util.iterForPrincipalId('susan'))
[]
However, if we look at the indexes, no changes have been made yet.
>>> len(util._locks)
2
>>> token, principals, expiration = util._locks[IKeyReference(demo)]
>>> token is lock
True
>>> sorted(principals)
['susan']
>>> expiration == token.expiration == token.started + TWO_HOURS
True
>>> sorted(util._principal_ids)
['susan']
>>> len(util._expirations)
1
>>> list(util._expirations[lock.expiration]) == [lock]
True
The changes won't be made for the expired lock until we register a new lock.
We'll make this one expire an hour later.
>>> another_demo = Demo()
>>> lock = util.register(
... tokens.ExclusiveLock(another_demo, 'john', ONE_HOUR))
Now all the indexes should have removed the references to the old lock.
>>> sorted(util._locks) == sorted((IKeyReference(frozen),
... IKeyReference(another_demo)))
True
>>> sorted(util._principal_ids)
['john']
>>> len(util._expirations)
1
>>> list(util._expirations[lock.expiration]) == [lock]
True
We just looked at adding a token for one object that removed the index of
an expired token of another object. Let's make sure that the story holds true
if the new token is the same as an old, expired token--the code paths are a
bit different.
We'll extend the offset by another hour to expire the new lock. As before, no
changes will have been made.
>>> offset = THREE_HOURS
>>> lock.ended == lock.expiration
True
>>> len(util._locks)
2
>>> token, principals, expiration = util._locks[
... IKeyReference(another_demo)]
>>> token is lock
True
>>> sorted(principals)
['john']
>>> expiration == token.expiration == token.started + ONE_HOUR
True
>>> sorted(util._principal_ids)
['john']
>>> len(util._expirations)
1
>>> list(util._expirations[lock.expiration]) == [lock]
True
Now, when we create a new token for the same object, the indexes are again
cleared appropriately.
>>> new_lock = util.register(
... tokens.ExclusiveLock(another_demo, 'mary', THREE_HOURS))
>>> len(util._locks)
2
>>> token, principals, expiration = util._locks[
... IKeyReference(another_demo)]
>>> token is new_lock
True
>>> sorted(principals)
['mary']
>>> expiration == token.expiration == token.started + THREE_HOURS
True
>>> sorted(util._principal_ids)
['mary']
>>> len(util._expirations)
1
>>> list(util._expirations[new_lock.expiration]) == [new_lock]
True
An issue arose when two or more expired locks are stored in the utility. When
we tried to add a third lock token the cleanup method incorrectly tried to
clean up the the lock token we were trying to add.
>>> second_demo = Demo()
>>> second_lock = util.register(
... tokens.ExclusiveLock(second_demo, 'john', THREE_HOURS))
>>> len(util._expirations)
2
Now expire the two registered tokens. The offset is currently 3 hours from now
and the tokens have a duration of 3 hours so increase by 7 hours.
>>> offset = THREE_HOURS + FOUR_HOURS
Register the third lock token.
>>> third_demo = Demo()
>>> third_lock = util.register(
... tokens.ExclusiveLock(third_demo, 'michael', ONE_HOUR))
>>> len(util._expirations)
1
>>> list(util._expirations[third_lock.expiration]) == [third_lock]
True
Explicit Ending
---------------
If I end all the tokens, it should remove all records from the indexes.
>>> freeze.end()
>>> third_lock.end()
>>> len(util._locks)
0
>>> len(util._principal_ids)
0
>>> len(util._expirations)
0
Demo
----
The following is a regression test for a bug which prevented the token
utility from cleaning up expired tokens correctly; perhaps it is also a
somewhat more realistic demonstration of some interactions with the utility
in that it uses multiple connections to the database.
>>> offset = NO_TIME
>>> import persistent
>>> import transaction
>>> def populate(principal, conn, duration=None, n=100):
... """Add n tokens for principal to the db using conn as the connection
... to the db.
... """
... t = conn.transaction_manager.begin()
... util = token_util(conn)
... for i in range(n):
... obj = persistent.Persistent()
... conn.add(obj)
... lock = tokens.ExclusiveLock(obj, principal, duration=duration)
... ignored = util.register(lock)
... t.commit()
>>> def end(principal, conn, n=None):
... """End n tokens for the given principal using conn as the connection
... to the db.
... """
... t = conn.transaction_manager.begin()
... locks = list(token_util(conn).iterForPrincipalId(principal))
... res = len([l.end() for l in locks[:n]])
... t.commit()
... return res
>>> def get_locks(principal, conn):
... """Retrieves a list of locks for the principal using conn as the
... connection to the db.
... """
... t = conn.transaction_manager.begin()
... try:
... return list(token_util(conn)._principal_ids[principal])
... except KeyError:
... return []
>>> tm1 = transaction.TransactionManager()
>>> tm2 = transaction.TransactionManager()
>>> conn1 = get_db().open(transaction_manager=tm1)
>>> conn2 = get_db().open(transaction_manager=tm2)
We "install" the token utility.
>>> conn1.root()['token_util'] = zope.locking.utility.TokenUtility()
>>> token_util = lambda conn: conn.root()['token_util']
>>> tm1.commit()
First, we fill the token utility with 100 locks through connection 1
under the principal id of 'Dwight Holly'.
>>> populate('Dwight Holly', conn1)
Via connection 2, we end 50 of Dwight's locks.
>>> n = end('Dwight Holly', conn2, 50)
In connection 1, we verify that 50 locks have been removed.
>>> len(get_locks('Dwight Holly', conn1)) == 100 - n
True
Now we end the rest of the locks through connection 2.
>>> ignored = end('Dwight Holly', conn2)
And verify through connection 1 that Dwight now has no locks in the utility.
>>> get_locks('Dwight Holly', conn1) == []
True
>>> 'Dwight Holly' in token_util(conn1)._principal_ids
False
Dwight gets 100 more locks through connection 1, however this time they are
all set to expire in 10 minutes.
>>> populate('Dwight Holly', conn1, duration=datetime.timedelta(minutes=10))
We sync connection 2 so we can see that the locks are indeed there.
>>> conn2.sync()
>>> util = token_util(conn2)
>>> 'Dwight Holly' in util._principal_ids
True
>>> len(util._expirations) > 0
True
Now we time-travel one hour into the future, where Dwight's locks have long
since expired.
>>> offset = ONE_HOUR
Adding a new lock through connection 2 will trigger a cleanup...
>>> populate('Pete Bondurant', conn2)
...at which point we can see via connection 1 that all of Dwight's locks
are gone.
>>> conn1.sync()
>>> util = token_util(conn1)
>>> len(util._expirations)
0
>>> 'Dwight Holly' in util._principal_ids
False
>>> conn1.close()
>>> conn2.close()
Clean Up
--------
>>> zope.locking.utils.now = oldNow # undo the time hack
| zope.locking | /zope.locking-2.1.0.tar.gz/zope.locking-2.1.0/src/zope/locking/cleanup.rst | cleanup.rst |
=========
Changes
=========
3.0 (2023-08-23)
================
- Drop support for Python 2.7, 3.5, 3.6.
- Add support for Python 3.11.
2.2 (2022-04-29)
================
- Add support for Python 3.7, 3.8, 3.9, 3.10.
2.1.0 (2017-09-01)
==================
- Add support for Python 3.5 and 3.6.
- Drop support for Python 2.6 and 3.3.
- Host documentation at https://zopelogin.readthedocs.io/
2.0.0 (2014-12-24)
==================
- Add support for PyPy and PyPy3.
- Add support for Python 3.4.
- Add support for testing on Travis.
- Add support for Python 3.3.
- Replace deprecated ``zope.interface.implements`` usage with equivalent
``zope.interface.implementer`` decorator.
- Drop support for Python 2.4 and 2.5.
1.0.0 (2009-12-31)
==================
- Extracted BasicAuthAdapter and FTPAuth adapters from zope.publisher. They
should have never gone into that package in the first place.
| zope.login | /zope.login-3.0.tar.gz/zope.login-3.0/CHANGES.rst | CHANGES.rst |
================
``zope.login``
================
.. image:: https://img.shields.io/pypi/v/zope.login.svg
:target: https://pypi.python.org/pypi/zope.login/
:alt: Latest release
.. image:: https://img.shields.io/pypi/pyversions/zope.login.svg
:target: https://pypi.org/project/zope.login/
:alt: Supported Python versions
.. image:: https://github.com/zopefoundation/zope.login/actions/workflows/tests.yml/badge.svg
:target: https://github.com/zopefoundation/zope.login/actions/workflows/tests.yml
.. image:: https://coveralls.io/repos/github/zopefoundation/zope.login/badge.svg?branch=master
:target: https://coveralls.io/github/zopefoundation/zope.login?branch=master
.. image:: https://readthedocs.org/projects/zopelogin/badge/?version=latest
:target: https://zopelogin.readthedocs.io/en/latest/
:alt: Documentation Status
This package provides login helpers for `zope.publisher
<https://zopepublisher.readethedocs.io/>`_ based on the concepts of
`zope.authentication <https://zopeauthentication.readthedocs.io>`_.
This includes support for HTTP password logins and FTP logins.
Documentation is hosted at https://zopelogin.readthedocs.io
| zope.login | /zope.login-3.0.tar.gz/zope.login-3.0/README.rst | README.rst |
=========
Changes
=========
3.0 (2023-02-27)
================
- Add support for Python 3.9, 3.10, 3.11.
- Drop support for Python 2.7, 3.5, 3.6.
- Drop support for deprecated ``python setup.py test``.
2.5.1 (2021-04-15)
==================
- Fix test compatibility with zope.interface 5.4.
2.5.0 (2020-03-30)
==================
- Add support for Python 3.8.
- Drop support for Python 3.4.
- Ensure all objects have consistent interface resolution orders. See
`issue 17 <https://github.com/zopefoundation/zope.mimetype/issues/17>`_.
2.4.0 (2018-10-16)
==================
- Add support for Python 3.7.
- Fix DeprecationWarnings for ``IObjectEvent`` and ``ObjectEvent`` by
importing them from ``zope.interface.interfaces``. See `issue 14
<https://github.com/zopefoundation/zope.mimetype/issues/14>`_.
2.3.2 (2018-07-30)
==================
- Documentation was moved to https://zopemimetype.readthedocs.io
- Fix an AttributeError accessing the ``preferredCharset`` of an
``ICodecTerm`` when no ``ICodecPreferredCharset`` was registered.
- Reach and automatically require 100% test coverage.
2.3.1 (2018-01-09)
==================
- Only try to register the browser stuff in the ZCA when `zope.formlib` is
available as it breaks otherwise.
2.3.0 (2017-09-28)
==================
- Drop support for Python 3.3.
- Move the dependencies on ``zope.browser``, ``zope.publisher`` and
``zope.formlib`` (only needed to use the ``source`` and ``widget``
modules) into a new ``browser`` extra.
See `PR 8 <https://github.com/zopefoundation/zope.mimetype/pull/8>`_.
2.2.0 (2017-04-24)
==================
- Fix `issue 6 <https://github.com/zopefoundation/zope.mimetype/issues/6>`_:
``typegetter.smartMimeTypeGuesser`` would raise ``TypeError`` on Python 3
when the data was ``bytes`` and the ``content_type`` was ``text/html``.
- Add support for Python 3.6.
2.1.0 (2016-08-09)
==================
- Add support for Python 3.5.
- Drop support for Python 2.6.
- Fix configuring the package via its included ZCML on Python 3.
2.0.0 (2014-12-24)
==================
- Add support for PyPy and PyPy3.
- Add support for Python 3.4.
- Restore the ability to write ``from zope.mimetype import types``.
- Make ``configure.zcml`` respect the renaming of the ``types`` module
so that it can be loaded.
2.0.0a1 (2013-02-27)
====================
- Add support for Python 3.3.
- Replace deprecated ``zope.component.adapts`` usage with equivalent
``zope.component.adapter`` decorator.
- Replace deprecated ``zope.interface.implements`` usage with equivalent
``zope.interface.implementer`` decorator.
- Rename ``zope.mimetype.types`` to ``zope.mimetype.mtypes``.
- Drop support for Python 2.4 and 2.5.
1.3.1 (2010-11-10)
==================
- No longer dependg on ``zope.app.form`` in ``configure.zcml`` by using
``zope.formlib`` instead, where the needed interfaces are living now.
1.3.0 (2010-06-26)
==================
- Add testing dependency on ``zope.component[test]``.
- Use zope.formlib instead of zope.app.form.browser for select widget.
- Conform to repository policy.
1.2.0 (2009-12-26)
==================
- Convert functional tests to unit tests and get rid of all extra test
dependencies as a result.
- Use the ITerms interface from zope.browser.
- Declare missing dependencies, resolved direct dependency on
zope.app.publisher.
- Import content-type parser from ``zope.contenttype``, adding a dependency on
that package.
1.1.2 (2009-05-22)
==================
- No longer depend on ``zope.app.component``.
1.1.1 (2009-04-03)
==================
- Fix wrong package version (version ``1.1.0`` was released as ``0.4.0`` at
`pypi` but as ``1.1dev`` at `download.zope.org/distribution`)
- Fix author email and home page address.
1.1.0 (2007-11-01)
==================
- Package data update.
- First public release.
1.0.0 (2007-??-??)
==================
- Initial release.
| zope.mimetype | /zope.mimetype-3.0.tar.gz/zope.mimetype-3.0/CHANGES.rst | CHANGES.rst |
zope.mimetype
=============
.. image:: https://img.shields.io/pypi/v/zope.mimetype.svg
:target: https://pypi.python.org/pypi/zope.mimetype/
:alt: Latest release
.. image:: https://img.shields.io/pypi/pyversions/zope.mimetype.svg
:target: https://pypi.org/project/zope.mimetype/
:alt: Supported Python versions
.. image:: https://travis-ci.com/zopefoundation/zope.mimetype.svg?branch=master
:target: https://travis-ci.com/zopefoundation/zope.mimetype
.. image:: https://coveralls.io/repos/github/zopefoundation/zope.mimetype/badge.svg?branch=master
:target: https://coveralls.io/github/zopefoundation/zope.mimetype?branch=master
.. image:: https://readthedocs.org/projects/zopemimetype/badge/?version=latest
:target: https://zopemimetype.readthedocs.io/en/latest/
:alt: Documentation Status
This package provides a way to work with MIME content types. There
are several interfaces defined here, many of which are used primarily
to look things up based on different bits of information.
See complete documentation at https://zopemimetype.readthedocs.io/en/latest/
| zope.mimetype | /zope.mimetype-3.0.tar.gz/zope.mimetype-3.0/README.rst | README.rst |
==============================
The Zope MIME Infrastructure
==============================
.. currentmodule:: zope.mimetype.interfaces
This package provides a way to work with MIME content types. There
are several interfaces defined here, many of which are used primarily
to look things up based on different bits of information.
The basic idea behind this is that content objects should provide an
interface based on the actual content type they implement. For
example, objects that represent text/xml or application/xml documents
should be marked mark with the `IContentTypeXml` interface. This can
allow additional views to be registered based on the content type, or
subscribers may be registered to perform other actions based on the
content type.
One aspect of the content type that's important for all documents is
that the content type interface determines whether the object data is
interpreted as an encoded text document. Encoded text documents, in
particular, can be decoded to obtain a single Unicode string. The
content type intefaces for encoded text must derive from
`IContentTypeEncoded`. (All content type interfaces derive from
`IContentType` and directly provide `IContentTypeInterface`.)
The default configuration provides direct support for a variety of
common document types found in office environments.
Supported lookups
=================
Several different queries are supported by this package:
- Given a MIME type expressed as a string, the associated interface,
if any, can be retrieved using::
# `mimeType` is the MIME type as a string
interface = queryUtility(IContentTypeInterface, mimeType)
- Given a charset name, the associated `ICodec` instance can be
retrieved using::
# `charsetName` is the charset name as a string
codec = queryUtility(ICharsetCodec, charsetName)
- Given a codec, the preferred charset name can be retrieved using::
# `codec` is an `ICodec` instance:
charsetName = getUtility(ICodecPreferredCharset, codec.name).name
- Given any combination of a suggested file name, file data, and
content type header, a guess at a reasonable MIME type can be made
using::
# `filename` is a suggested file name, or None
# `data` is uploaded data, or None
# `content_type` is a Content-Type header value, or None
#
mimeType = getUtility(IMimeTypeGetter)(
name=filename, data=data, content_type=content_type)
- Given any combination of a suggested file name, file data, and
content type header, a guess at a reasonable charset name can be
made using::
# `filename` is a suggested file name, or None
# `data` is uploaded data, or None
# `content_type` is a Content-Type header value, or None
#
charsetName = getUtility(ICharsetGetter)(
name=filename, data=data, content_type=content_type)
| zope.mimetype | /zope.mimetype-3.0.tar.gz/zope.mimetype-3.0/src/zope/mimetype/README.rst | README.rst |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.