code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from . import utils
from .user import BaseUser
from .asset import Asset
from .enums import TeamMembershipState, try_enum
__all__ = (
'Team',
'TeamMember',
)
class Team:
"""Represents an application team for a bot provided by Discord.
Attributes
-------------
id: :class:`int`
The team ID.
name: :class:`str`
The team name
icon: Optional[:class:`str`]
The icon hash, if it exists.
owner_id: :class:`int`
The team's owner ID.
members: List[:class:`TeamMember`]
A list of the members in the team
.. versionadded:: 1.3
"""
__slots__ = ('_state', 'id', 'name', 'icon', 'owner_id', 'members')
def __init__(self, state, data):
self._state = state
self.id = utils._get_as_snowflake(data, 'id')
self.name = data['name']
self.icon = data['icon']
self.owner_id = utils._get_as_snowflake(data, 'owner_user_id')
self.members = [TeamMember(self, self._state, member) for member in data['members']]
def __repr__(self):
return '<{0.__class__.__name__} id={0.id} name={0.name}>'.format(self)
@property
def icon_url(self):
""":class:`.Asset`: Retrieves the team's icon asset.
This is equivalent to calling :meth:`icon_url_as` with
the default parameters ('webp' format and a size of 1024).
"""
return self.icon_url_as()
def icon_url_as(self, *, format='webp', size=1024):
"""Returns an :class:`Asset` for the icon the team has.
The format must be one of 'webp', 'jpeg', 'jpg' or 'png'.
The size must be a power of 2 between 16 and 4096.
.. versionadded:: 2.0
Parameters
-----------
format: :class:`str`
The format to attempt to convert the icon to. Defaults to 'webp'.
size: :class:`int`
The size of the image to display.
Raises
------
InvalidArgument
Bad image format passed to ``format`` or invalid ``size``.
Returns
--------
:class:`Asset`
The resulting CDN asset.
"""
return Asset._from_icon(self._state, self, 'team', format=format, size=size)
@property
def owner(self):
"""Optional[:class:`TeamMember`]: The team's owner."""
return utils.get(self.members, id=self.owner_id)
class TeamMember(BaseUser):
"""Represents a team member in a team.
.. container:: operations
.. describe:: x == y
Checks if two team members are equal.
.. describe:: x != y
Checks if two team members are not equal.
.. describe:: hash(x)
Return the team member's hash.
.. describe:: str(x)
Returns the team member's name with discriminator.
.. versionadded:: 1.3
Attributes
-------------
name: :class:`str`
The team member's username.
id: :class:`int`
The team member's unique ID.
discriminator: :class:`str`
The team member's discriminator. This is given when the username has conflicts.
avatar: Optional[:class:`str`]
The avatar hash the team member has. Could be None.
bot: :class:`bool`
Specifies if the user is a bot account.
team: :class:`Team`
The team that the member is from.
membership_state: :class:`TeamMembershipState`
The membership state of the member (e.g. invited or accepted)
"""
__slots__ = BaseUser.__slots__ + ('team', 'membership_state', 'permissions')
def __init__(self, team, state, data):
self.team = team
self.membership_state = try_enum(TeamMembershipState, data['membership_state'])
self.permissions = data['permissions']
super().__init__(state=state, data=data['user'])
def __repr__(self):
return '<{0.__class__.__name__} id={0.id} name={0.name!r} ' \
'discriminator={0.discriminator!r} membership_state={0.membership_state!r}>'.format(self) | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/team.py | team.py |
import datetime
import inspect
import itertools
import sys
from operator import attrgetter
import discord.abc
from . import utils
from .errors import ClientException
from .user import BaseUser, User
from .activity import create_activity
from .permissions import Permissions
from .enums import Status, try_enum
from .colour import Colour
from .object import Object
class VoiceState:
"""Represents a Discord user's voice state.
Attributes
------------
deaf: :class:`bool`
Indicates if the user is currently deafened by the guild.
mute: :class:`bool`
Indicates if the user is currently muted by the guild.
self_mute: :class:`bool`
Indicates if the user is currently muted by their own accord.
self_deaf: :class:`bool`
Indicates if the user is currently deafened by their own accord.
self_stream: :class:`bool`
Indicates if the user is currently streaming via 'Go Live' feature.
.. versionadded:: 1.3
self_video: :class:`bool`
Indicates if the user is currently broadcasting video.
suppress: :class:`bool`
Indicates if the user is suppressed from speaking.
Only applies to stage channels.
.. versionadded:: 1.7
requested_to_speak_at: Optional[:class:`datetime.datetime`]
A datetime object that specifies the date and time in UTC that the member
requested to speak. It will be ``None`` if they are not requesting to speak
anymore or have been accepted to speak.
Only applicable to stage channels.
.. versionadded:: 1.7
afk: :class:`bool`
Indicates if the user is currently in the AFK channel in the guild.
channel: Optional[Union[:class:`VoiceChannel`, :class:`StageChannel`]]
The voice channel that the user is currently connected to. ``None`` if the user
is not currently in a voice channel.
"""
__slots__ = ('session_id', 'deaf', 'mute', 'self_mute',
'self_stream', 'self_video', 'self_deaf', 'afk', 'channel',
'requested_to_speak_at', 'suppress')
def __init__(self, *, data, channel=None):
self.session_id = data.get('session_id')
self._update(data, channel)
def _update(self, data, channel):
self.self_mute = data.get('self_mute', False)
self.self_deaf = data.get('self_deaf', False)
self.self_stream = data.get('self_stream', False)
self.self_video = data.get('self_video', False)
self.afk = data.get('suppress', False)
self.mute = data.get('mute', False)
self.deaf = data.get('deaf', False)
self.suppress = data.get('suppress', False)
self.requested_to_speak_at = utils.parse_time(data.get('request_to_speak_timestamp'))
self.channel = channel
def __repr__(self):
attrs = [
('self_mute', self.self_mute),
('self_deaf', self.self_deaf),
('self_stream', self.self_stream),
('suppress', self.suppress),
('requested_to_speak_at', self.requested_to_speak_at),
('channel', self.channel)
]
return '<%s %s>' % (self.__class__.__name__, ' '.join('%s=%r' % t for t in attrs))
def flatten_user(cls):
for attr, value in itertools.chain(BaseUser.__dict__.items(), User.__dict__.items()):
# ignore private/special methods
if attr.startswith('_'):
continue
# don't override what we already have
if attr in cls.__dict__:
continue
# if it's a slotted attribute or a property, redirect it
# slotted members are implemented as member_descriptors in Type.__dict__
if not hasattr(value, '__annotations__'):
getter = attrgetter('_user.' + attr)
setattr(cls, attr, property(getter, doc='Equivalent to :attr:`User.%s`' % attr))
else:
# Technically, this can also use attrgetter
# However I'm not sure how I feel about "functions" returning properties
# It probably breaks something in Sphinx.
# probably a member function by now
def generate_function(x):
# We want sphinx to properly show coroutine functions as coroutines
if inspect.iscoroutinefunction(value):
async def general(self, *args, **kwargs):
return await getattr(self._user, x)(*args, **kwargs)
else:
def general(self, *args, **kwargs):
return getattr(self._user, x)(*args, **kwargs)
general.__name__ = x
return general
func = generate_function(attr)
func = utils.copy_doc(value)(func)
setattr(cls, attr, func)
return cls
_BaseUser = discord.abc.User
@flatten_user
class Member(discord.abc.Messageable, _BaseUser):
"""Represents a Discord member to a :class:`Guild`.
This implements a lot of the functionality of :class:`User`.
.. container:: operations
.. describe:: x == y
Checks if two members are equal.
Note that this works with :class:`User` instances too.
.. describe:: x != y
Checks if two members are not equal.
Note that this works with :class:`User` instances too.
.. describe:: hash(x)
Returns the member's hash.
.. describe:: str(x)
Returns the member's name with the discriminator.
Attributes
----------
joined_at: Optional[:class:`datetime.datetime`]
A datetime object that specifies the date and time in UTC that the member joined the guild.
If the member left and rejoined the guild, this will be the latest date. In certain cases, this can be ``None``.
activities: Tuple[Union[:class:`BaseActivity`, :class:`Spotify`]]
The activities that the user is currently doing.
.. note::
Due to a Discord API limitation, a user's Spotify activity may not appear
if they are listening to a song with a title longer
than 128 characters. See :issue:`1738` for more information.
guild: :class:`Guild`
The guild that the member belongs to.
nick: Optional[:class:`str`]
The guild specific nickname of the user.
pending: :class:`bool`
Whether the member is pending member verification.
.. versionadded:: 1.6
premium_since: Optional[:class:`datetime.datetime`]
A datetime object that specifies the date and time in UTC when the member used their
Nitro boost on the guild, if available. This could be ``None``.
"""
__slots__ = ('_roles', 'joined_at', 'premium_since', '_client_status',
'activities', 'guild', 'pending', 'nick', '_user', '_state')
def __init__(self, *, data, guild, state):
self._state = state
self._user = state.store_user(data['user'])
self.guild = guild
self.joined_at = utils.parse_time(data.get('joined_at'))
self.premium_since = utils.parse_time(data.get('premium_since'))
self._update_roles(data)
self._client_status = {
None: 'offline'
}
self.activities = tuple(map(create_activity, data.get('activities', [])))
self.nick = data.get('nick', None)
self.pending = data.get('pending', False)
def __str__(self):
return str(self._user)
def __repr__(self):
return '<Member id={1.id} name={1.name!r} discriminator={1.discriminator!r}' \
' bot={1.bot} nick={0.nick!r} guild={0.guild!r}>'.format(self, self._user)
def __eq__(self, other):
return isinstance(other, _BaseUser) and other.id == self.id
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self._user)
@classmethod
def _from_message(cls, *, message, data):
author = message.author
data['user'] = author._to_minimal_user_json()
return cls(data=data, guild=message.guild, state=message._state)
def _update_from_message(self, data):
self.joined_at = utils.parse_time(data.get('joined_at'))
self.premium_since = utils.parse_time(data.get('premium_since'))
self._update_roles(data)
self.nick = data.get('nick', None)
self.pending = data.get('pending', False)
@classmethod
def _try_upgrade(cls, *, data, guild, state):
# A User object with a 'member' key
try:
member_data = data.pop('member')
except KeyError:
return state.store_user(data)
else:
member_data['user'] = data
return cls(data=member_data, guild=guild, state=state)
@classmethod
def _from_presence_update(cls, *, data, guild, state):
clone = cls(data=data, guild=guild, state=state)
to_return = cls(data=data, guild=guild, state=state)
to_return._client_status = {
sys.intern(key): sys.intern(value)
for key, value in data.get('client_status', {}).items()
}
to_return._client_status[None] = sys.intern(data['status'])
return to_return, clone
@classmethod
def _copy(cls, member):
self = cls.__new__(cls) # to bypass __init__
self._roles = utils.SnowflakeList(member._roles, is_sorted=True)
self.joined_at = member.joined_at
self.premium_since = member.premium_since
self._client_status = member._client_status.copy()
self.guild = member.guild
self.nick = member.nick
self.pending = member.pending
self.activities = member.activities
self._state = member._state
# Reference will not be copied unless necessary by PRESENCE_UPDATE
# See below
self._user = member._user
return self
async def _get_channel(self):
ch = await self.create_dm()
return ch
def _update_roles(self, data):
self._roles = utils.SnowflakeList(map(int, data['roles']))
def _update(self, data):
# the nickname change is optional,
# if it isn't in the payload then it didn't change
try:
self.nick = data['nick']
except KeyError:
pass
try:
self.pending = data['pending']
except KeyError:
pass
self.premium_since = utils.parse_time(data.get('premium_since'))
self._update_roles(data)
def _presence_update(self, data, user):
self.activities = tuple(map(create_activity, data.get('activities', [])))
self._client_status = {
sys.intern(key): sys.intern(value)
for key, value in data.get('client_status', {}).items()
}
self._client_status[None] = sys.intern(data['status'])
if len(user) > 1:
return self._update_inner_user(user)
return False
def _update_inner_user(self, user):
u = self._user
original = (u.name, u.avatar, u.discriminator, u._public_flags)
# These keys seem to always be available
modified = (user['username'], user['avatar'], user['discriminator'], user.get('public_flags', 0))
if original != modified:
to_return = User._copy(self._user)
u.name, u.avatar, u.discriminator, u._public_flags = modified
# Signal to dispatch on_user_update
return to_return, u
@property
def status(self):
""":class:`Status`: The member's overall status. If the value is unknown, then it will be a :class:`str` instead."""
return try_enum(Status, self._client_status[None])
@property
def raw_status(self):
""":class:`str`: The member's overall status as a string value.
.. versionadded:: 1.5
"""
return self._client_status[None]
@status.setter
def status(self, value):
# internal use only
self._client_status[None] = str(value)
@property
def mobile_status(self):
""":class:`Status`: The member's status on a mobile device, if applicable."""
return try_enum(Status, self._client_status.get('mobile', 'offline'))
@property
def desktop_status(self):
""":class:`Status`: The member's status on the desktop client, if applicable."""
return try_enum(Status, self._client_status.get('desktop', 'offline'))
@property
def web_status(self):
""":class:`Status`: The member's status on the web client, if applicable."""
return try_enum(Status, self._client_status.get('web', 'offline'))
def is_on_mobile(self):
""":class:`bool`: A helper function that determines if a member is active on a mobile device."""
return 'mobile' in self._client_status
@property
def colour(self):
""":class:`Colour`: A property that returns a colour denoting the rendered colour
for the member. If the default colour is the one rendered then an instance
of :meth:`Colour.default` is returned.
There is an alias for this named :attr:`color`.
"""
roles = self.roles[1:] # remove @everyone
# highest order of the colour is the one that gets rendered.
# if the highest is the default colour then the next one with a colour
# is chosen instead
for role in reversed(roles):
if role.colour.value:
return role.colour
return Colour.default()
@property
def color(self):
""":class:`Colour`: A property that returns a color denoting the rendered color for
the member. If the default color is the one rendered then an instance of :meth:`Colour.default`
is returned.
There is an alias for this named :attr:`colour`.
"""
return self.colour
@property
def roles(self):
"""List[:class:`Role`]: A :class:`list` of :class:`Role` that the member belongs to. Note
that the first element of this list is always the default '@everyone'
role.
These roles are sorted by their position in the role hierarchy.
"""
result = []
g = self.guild
for role_id in self._roles:
role = g.get_role(role_id)
if role:
result.append(role)
result.append(g.default_role)
result.sort()
return result
@property
def mention(self):
""":class:`str`: Returns a string that allows you to mention the member."""
if self.nick:
return '<@!%s>' % self.id
return '<@%s>' % self.id
@property
def display_name(self):
""":class:`str`: Returns the user's display name.
For regular users this is just their username, but
if they have a guild specific nickname then that
is returned instead.
"""
return self.nick or self.name
@property
def activity(self):
"""Union[:class:`BaseActivity`, :class:`Spotify`]: Returns the primary
activity the user is currently doing. Could be ``None`` if no activity is being done.
.. note::
Due to a Discord API limitation, this may be ``None`` if
the user is listening to a song on Spotify with a title longer
than 128 characters. See :issue:`1738` for more information.
.. note::
A user may have multiple activities, these can be accessed under :attr:`activities`.
"""
if self.activities:
return self.activities[0]
def mentioned_in(self, message):
"""Checks if the member is mentioned in the specified message.
Parameters
-----------
message: :class:`Message`
The message to check if you're mentioned in.
Returns
-------
:class:`bool`
Indicates if the member is mentioned in the message.
"""
if message.guild is None or message.guild.id != self.guild.id:
return False
if self._user.mentioned_in(message):
return True
return any(self._roles.has(role.id) for role in message.role_mentions)
def permissions_in(self, channel):
"""An alias for :meth:`abc.GuildChannel.permissions_for`.
Basically equivalent to:
.. code-block:: python3
channel.permissions_for(self)
Parameters
-----------
channel: :class:`abc.GuildChannel`
The channel to check your permissions for.
Returns
-------
:class:`Permissions`
The resolved permissions for the member.
"""
return channel.permissions_for(self)
@property
def top_role(self):
""":class:`Role`: Returns the member's highest role.
This is useful for figuring where a member stands in the role
hierarchy chain.
"""
guild = self.guild
if len(self._roles) == 0:
return guild.default_role
return max(guild.get_role(rid) or guild.default_role for rid in self._roles)
@property
def guild_permissions(self):
""":class:`Permissions`: Returns the member's guild permissions.
This only takes into consideration the guild permissions
and not most of the implied permissions or any of the
channel permission overwrites. For 100% accurate permission
calculation, please use either :meth:`permissions_in` or
:meth:`abc.GuildChannel.permissions_for`.
This does take into consideration guild ownership and the
administrator implication.
"""
if self.guild.owner_id == self.id:
return Permissions.all()
base = Permissions.none()
for r in self.roles:
base.value |= r.permissions.value
if base.administrator:
return Permissions.all()
return base
@property
def voice(self):
"""Optional[:class:`VoiceState`]: Returns the member's current voice state."""
return self.guild._voice_state_for(self._user.id)
async def ban(self, **kwargs):
"""|coro|
Bans this member. Equivalent to :meth:`Guild.ban`.
"""
await self.guild.ban(self, **kwargs)
async def unban(self, *, reason=None):
"""|coro|
Unbans this member. Equivalent to :meth:`Guild.unban`.
"""
await self.guild.unban(self, reason=reason)
async def kick(self, *, reason=None):
"""|coro|
Kicks this member. Equivalent to :meth:`Guild.kick`.
"""
await self.guild.kick(self, reason=reason)
async def edit(self, *, reason=None, **fields):
"""|coro|
Edits the member's data.
Depending on the parameter passed, this requires different permissions listed below:
+---------------+--------------------------------------+
| Parameter | Permission |
+---------------+--------------------------------------+
| nick | :attr:`Permissions.manage_nicknames` |
+---------------+--------------------------------------+
| mute | :attr:`Permissions.mute_members` |
+---------------+--------------------------------------+
| deafen | :attr:`Permissions.deafen_members` |
+---------------+--------------------------------------+
| roles | :attr:`Permissions.manage_roles` |
+---------------+--------------------------------------+
| voice_channel | :attr:`Permissions.move_members` |
+---------------+--------------------------------------+
All parameters are optional.
.. versionchanged:: 1.1
Can now pass ``None`` to ``voice_channel`` to kick a member from voice.
Parameters
-----------
nick: Optional[:class:`str`]
The member's new nickname. Use ``None`` to remove the nickname.
mute: :class:`bool`
Indicates if the member should be guild muted or un-muted.
deafen: :class:`bool`
Indicates if the member should be guild deafened or un-deafened.
suppress: :class:`bool`
Indicates if the member should be suppressed in stage channels.
.. versionadded:: 1.7
roles: Optional[List[:class:`Role`]]
The member's new list of roles. This *replaces* the roles.
voice_channel: Optional[:class:`VoiceChannel`]
The voice channel to move the member to.
Pass ``None`` to kick them from voice.
reason: Optional[:class:`str`]
The reason for editing this member. Shows up on the audit log.
Raises
-------
Forbidden
You do not have the proper permissions to the action requested.
HTTPException
The operation failed.
"""
http = self._state.http
guild_id = self.guild.id
me = self._state.self_id == self.id
payload = {}
try:
nick = fields['nick']
except KeyError:
# nick not present so...
pass
else:
nick = nick or ''
if me:
await http.change_my_nickname(guild_id, nick, reason=reason)
else:
payload['nick'] = nick
deafen = fields.get('deafen')
if deafen is not None:
payload['deaf'] = deafen
mute = fields.get('mute')
if mute is not None:
payload['mute'] = mute
suppress = fields.get('suppress')
if suppress is not None:
voice_state_payload = {
'channel_id': self.voice.channel.id,
'suppress': suppress,
}
if suppress or self.bot:
voice_state_payload['request_to_speak_timestamp'] = None
if me:
await http.edit_my_voice_state(guild_id, voice_state_payload)
else:
if not suppress:
voice_state_payload['request_to_speak_timestamp'] = datetime.datetime.utcnow().isoformat()
await http.edit_voice_state(guild_id, self.id, voice_state_payload)
try:
vc = fields['voice_channel']
except KeyError:
pass
else:
payload['channel_id'] = vc and vc.id
try:
roles = fields['roles']
except KeyError:
pass
else:
payload['roles'] = tuple(r.id for r in roles)
if payload:
await http.edit_member(guild_id, self.id, reason=reason, **payload)
# TODO: wait for WS event for modify-in-place behaviour
async def request_to_speak(self):
"""|coro|
Request to speak in the connected channel.
Only applies to stage channels.
.. note::
Requesting members that are not the client is equivalent
to :attr:`.edit` providing ``suppress`` as ``False``.
.. versionadded:: 1.7
Raises
-------
Forbidden
You do not have the proper permissions to the action requested.
HTTPException
The operation failed.
"""
payload = {
'channel_id': self.voice.channel.id,
'request_to_speak_timestamp': datetime.datetime.utcnow().isoformat(),
}
if self._state.self_id != self.id:
payload['suppress'] = False
await self._state.http.edit_voice_state(self.guild.id, self.id, payload)
else:
await self._state.http.edit_my_voice_state(self.guild.id, payload)
async def move_to(self, channel, *, reason=None):
"""|coro|
Moves a member to a new voice channel (they must be connected first).
You must have the :attr:`~Permissions.move_members` permission to
use this.
This raises the same exceptions as :meth:`edit`.
.. versionchanged:: 1.1
Can now pass ``None`` to kick a member from voice.
Parameters
-----------
channel: Optional[:class:`VoiceChannel`]
The new voice channel to move the member to.
Pass ``None`` to kick them from voice.
reason: Optional[:class:`str`]
The reason for doing this action. Shows up on the audit log.
"""
await self.edit(voice_channel=channel, reason=reason)
async def add_roles(self, *roles, reason=None, atomic=True):
r"""|coro|
Gives the member a number of :class:`Role`\s.
You must have the :attr:`~Permissions.manage_roles` permission to
use this, and the added :class:`Role`\s must appear lower in the list
of roles than the highest role of the member.
Parameters
-----------
\*roles: :class:`abc.Snowflake`
An argument list of :class:`abc.Snowflake` representing a :class:`Role`
to give to the member.
reason: Optional[:class:`str`]
The reason for adding these roles. Shows up on the audit log.
atomic: :class:`bool`
Whether to atomically add roles. This will ensure that multiple
operations will always be applied regardless of the current
state of the cache.
Raises
-------
Forbidden
You do not have permissions to add these roles.
HTTPException
Adding roles failed.
"""
if not atomic:
new_roles = utils._unique(Object(id=r.id) for s in (self.roles[1:], roles) for r in s)
await self.edit(roles=new_roles, reason=reason)
else:
req = self._state.http.add_role
guild_id = self.guild.id
user_id = self.id
for role in roles:
await req(guild_id, user_id, role.id, reason=reason)
async def remove_roles(self, *roles, reason=None, atomic=True):
r"""|coro|
Removes :class:`Role`\s from this member.
You must have the :attr:`~Permissions.manage_roles` permission to
use this, and the removed :class:`Role`\s must appear lower in the list
of roles than the highest role of the member.
Parameters
-----------
\*roles: :class:`abc.Snowflake`
An argument list of :class:`abc.Snowflake` representing a :class:`Role`
to remove from the member.
reason: Optional[:class:`str`]
The reason for removing these roles. Shows up on the audit log.
atomic: :class:`bool`
Whether to atomically remove roles. This will ensure that multiple
operations will always be applied regardless of the current
state of the cache.
Raises
-------
Forbidden
You do not have permissions to remove these roles.
HTTPException
Removing the roles failed.
"""
if not atomic:
new_roles = [Object(id=r.id) for r in self.roles[1:]] # remove @everyone
for role in roles:
try:
new_roles.remove(Object(id=role.id))
except ValueError:
pass
await self.edit(roles=new_roles, reason=reason)
else:
req = self._state.http.remove_role
guild_id = self.guild.id
user_id = self.id
for role in roles:
await req(guild_id, user_id, role.id, reason=reason) | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/member.py | member.py |
from .flags import BaseFlags, flag_value, fill_with_flags, alias_flag_value
__all__ = (
'Permissions',
'PermissionOverwrite',
)
# A permission alias works like a regular flag but is marked
# So the PermissionOverwrite knows to work with it
class permission_alias(alias_flag_value):
pass
def make_permission_alias(alias):
def decorator(func):
ret = permission_alias(func)
ret.alias = alias
return ret
return decorator
@fill_with_flags()
class Permissions(BaseFlags):
"""Wraps up the Discord permission value.
The properties provided are two way. You can set and retrieve individual
bits using the properties as if they were regular bools. This allows
you to edit permissions.
.. versionchanged:: 1.3
You can now use keyword arguments to initialize :class:`Permissions`
similar to :meth:`update`.
.. container:: operations
.. describe:: x == y
Checks if two permissions are equal.
.. describe:: x != y
Checks if two permissions are not equal.
.. describe:: x <= y
Checks if a permission is a subset of another permission.
.. describe:: x >= y
Checks if a permission is a superset of another permission.
.. describe:: x < y
Checks if a permission is a strict subset of another permission.
.. describe:: x > y
Checks if a permission is a strict superset of another permission.
.. describe:: hash(x)
Return the permission's hash.
.. describe:: iter(x)
Returns an iterator of ``(perm, value)`` pairs. This allows it
to be, for example, constructed as a dict or a list of pairs.
Note that aliases are not shown.
Attributes
-----------
value: :class:`int`
The raw value. This value is a bit array field of a 53-bit integer
representing the currently available permissions. You should query
permissions via the properties rather than using this raw value.
"""
__slots__ = ()
def __init__(self, permissions=0, **kwargs):
if not isinstance(permissions, int):
raise TypeError('Expected int parameter, received %s instead.' % permissions.__class__.__name__)
self.value = permissions
for key, value in kwargs.items():
if key not in self.VALID_FLAGS:
raise TypeError('%r is not a valid permission name.' % key)
setattr(self, key, value)
def is_subset(self, other):
"""Returns ``True`` if self has the same or fewer permissions as other."""
if isinstance(other, Permissions):
return (self.value & other.value) == self.value
else:
raise TypeError("cannot compare {} with {}".format(self.__class__.__name__, other.__class__.__name__))
def is_superset(self, other):
"""Returns ``True`` if self has the same or more permissions as other."""
if isinstance(other, Permissions):
return (self.value | other.value) == self.value
else:
raise TypeError("cannot compare {} with {}".format(self.__class__.__name__, other.__class__.__name__))
def is_strict_subset(self, other):
"""Returns ``True`` if the permissions on other are a strict subset of those on self."""
return self.is_subset(other) and self != other
def is_strict_superset(self, other):
"""Returns ``True`` if the permissions on other are a strict superset of those on self."""
return self.is_superset(other) and self != other
__le__ = is_subset
__ge__ = is_superset
__lt__ = is_strict_subset
__gt__ = is_strict_superset
@classmethod
def none(cls):
"""A factory method that creates a :class:`Permissions` with all
permissions set to ``False``."""
return cls(0)
@classmethod
def all(cls):
"""A factory method that creates a :class:`Permissions` with all
permissions set to ``True``.
"""
return cls(0b111111111111111111111111111111111)
@classmethod
def all_channel(cls):
"""A :class:`Permissions` with all channel-specific permissions set to
``True`` and the guild-specific ones set to ``False``. The guild-specific
permissions are currently:
- :attr:`manage_emojis`
- :attr:`view_audit_log`
- :attr:`view_guild_insights`
- :attr:`manage_guild`
- :attr:`change_nickname`
- :attr:`manage_nicknames`
- :attr:`kick_members`
- :attr:`ban_members`
- :attr:`administrator`
.. versionchanged:: 1.7
Added :attr:`stream`, :attr:`priority_speaker` and :attr:`use_slash_commands` permissions.
"""
return cls(0b10110011111101111111111101010001)
@classmethod
def general(cls):
"""A factory method that creates a :class:`Permissions` with all
"General" permissions from the official Discord UI set to ``True``.
.. versionchanged:: 1.7
Permission :attr:`read_messages` is now included in the general permissions, but
permissions :attr:`administrator`, :attr:`create_instant_invite`, :attr:`kick_members`,
:attr:`ban_members`, :attr:`change_nickname` and :attr:`manage_nicknames` are
no longer part of the general permissions.
"""
return cls(0b01110000000010000000010010110000)
@classmethod
def membership(cls):
"""A factory method that creates a :class:`Permissions` with all
"Membership" permissions from the official Discord UI set to ``True``.
.. versionadded:: 1.7
"""
return cls(0b00001100000000000000000000000111)
@classmethod
def text(cls):
"""A factory method that creates a :class:`Permissions` with all
"Text" permissions from the official Discord UI set to ``True``.
.. versionchanged:: 1.7
Permission :attr:`read_messages` is no longer part of the text permissions.
Added :attr:`use_slash_commands` permission.
"""
return cls(0b10000000000001111111100001000000)
@classmethod
def voice(cls):
"""A factory method that creates a :class:`Permissions` with all
"Voice" permissions from the official Discord UI set to ``True``."""
return cls(0b00000011111100000000001100000000)
@classmethod
def stage(cls):
"""A factory method that creates a :class:`Permissions` with all
"Stage Channel" permissions from the official Discord UI set to ``True``.
.. versionadded:: 1.7
"""
return cls(1 << 32)
@classmethod
def stage_moderator(cls):
"""A factory method that creates a :class:`Permissions` with all
"Stage Moderator" permissions from the official Discord UI set to ``True``.
.. versionadded:: 1.7
"""
return cls(0b100000001010000000000000000000000)
@classmethod
def advanced(cls):
"""A factory method that creates a :class:`Permissions` with all
"Advanced" permissions from the official Discord UI set to ``True``.
.. versionadded:: 1.7
"""
return cls(1 << 3)
def update(self, **kwargs):
r"""Bulk updates this permission object.
Allows you to set multiple attributes by using keyword
arguments. The names must be equivalent to the properties
listed. Extraneous key/value pairs will be silently ignored.
Parameters
------------
\*\*kwargs
A list of key/value pairs to bulk update permissions with.
"""
for key, value in kwargs.items():
if key in self.VALID_FLAGS:
setattr(self, key, value)
def handle_overwrite(self, allow, deny):
# Basically this is what's happening here.
# We have an original bit array, e.g. 1010
# Then we have another bit array that is 'denied', e.g. 1111
# And then we have the last one which is 'allowed', e.g. 0101
# We want original OP denied to end up resulting in
# whatever is in denied to be set to 0.
# So 1010 OP 1111 -> 0000
# Then we take this value and look at the allowed values.
# And whatever is allowed is set to 1.
# So 0000 OP2 0101 -> 0101
# The OP is base & ~denied.
# The OP2 is base | allowed.
self.value = (self.value & ~deny) | allow
@flag_value
def create_instant_invite(self):
""":class:`bool`: Returns ``True`` if the user can create instant invites."""
return 1 << 0
@flag_value
def kick_members(self):
""":class:`bool`: Returns ``True`` if the user can kick users from the guild."""
return 1 << 1
@flag_value
def ban_members(self):
""":class:`bool`: Returns ``True`` if a user can ban users from the guild."""
return 1 << 2
@flag_value
def administrator(self):
""":class:`bool`: Returns ``True`` if a user is an administrator. This role overrides all other permissions.
This also bypasses all channel-specific overrides.
"""
return 1 << 3
@flag_value
def manage_channels(self):
""":class:`bool`: Returns ``True`` if a user can edit, delete, or create channels in the guild.
This also corresponds to the "Manage Channel" channel-specific override."""
return 1 << 4
@flag_value
def manage_guild(self):
""":class:`bool`: Returns ``True`` if a user can edit guild properties."""
return 1 << 5
@flag_value
def add_reactions(self):
""":class:`bool`: Returns ``True`` if a user can add reactions to messages."""
return 1 << 6
@flag_value
def view_audit_log(self):
""":class:`bool`: Returns ``True`` if a user can view the guild's audit log."""
return 1 << 7
@flag_value
def priority_speaker(self):
""":class:`bool`: Returns ``True`` if a user can be more easily heard while talking."""
return 1 << 8
@flag_value
def stream(self):
""":class:`bool`: Returns ``True`` if a user can stream in a voice channel."""
return 1 << 9
@flag_value
def read_messages(self):
""":class:`bool`: Returns ``True`` if a user can read messages from all or specific text channels."""
return 1 << 10
@make_permission_alias('read_messages')
def view_channel(self):
""":class:`bool`: An alias for :attr:`read_messages`.
.. versionadded:: 1.3
"""
return 1 << 10
@flag_value
def send_messages(self):
""":class:`bool`: Returns ``True`` if a user can send messages from all or specific text channels."""
return 1 << 11
@flag_value
def send_tts_messages(self):
""":class:`bool`: Returns ``True`` if a user can send TTS messages from all or specific text channels."""
return 1 << 12
@flag_value
def manage_messages(self):
""":class:`bool`: Returns ``True`` if a user can delete or pin messages in a text channel.
.. note::
Note that there are currently no ways to edit other people's messages.
"""
return 1 << 13
@flag_value
def embed_links(self):
""":class:`bool`: Returns ``True`` if a user's messages will automatically be embedded by Discord."""
return 1 << 14
@flag_value
def attach_files(self):
""":class:`bool`: Returns ``True`` if a user can send files in their messages."""
return 1 << 15
@flag_value
def read_message_history(self):
""":class:`bool`: Returns ``True`` if a user can read a text channel's previous messages."""
return 1 << 16
@flag_value
def mention_everyone(self):
""":class:`bool`: Returns ``True`` if a user's @everyone or @here will mention everyone in the text channel."""
return 1 << 17
@flag_value
def external_emojis(self):
""":class:`bool`: Returns ``True`` if a user can use emojis from other guilds."""
return 1 << 18
@make_permission_alias('external_emojis')
def use_external_emojis(self):
""":class:`bool`: An alias for :attr:`external_emojis`.
.. versionadded:: 1.3
"""
return 1 << 18
@flag_value
def view_guild_insights(self):
""":class:`bool`: Returns ``True`` if a user can view the guild's insights.
.. versionadded:: 1.3
"""
return 1 << 19
@flag_value
def connect(self):
""":class:`bool`: Returns ``True`` if a user can connect to a voice channel."""
return 1 << 20
@flag_value
def speak(self):
""":class:`bool`: Returns ``True`` if a user can speak in a voice channel."""
return 1 << 21
@flag_value
def mute_members(self):
""":class:`bool`: Returns ``True`` if a user can mute other users."""
return 1 << 22
@flag_value
def deafen_members(self):
""":class:`bool`: Returns ``True`` if a user can deafen other users."""
return 1 << 23
@flag_value
def move_members(self):
""":class:`bool`: Returns ``True`` if a user can move users between other voice channels."""
return 1 << 24
@flag_value
def use_voice_activation(self):
""":class:`bool`: Returns ``True`` if a user can use voice activation in voice channels."""
return 1 << 25
@flag_value
def change_nickname(self):
""":class:`bool`: Returns ``True`` if a user can change their nickname in the guild."""
return 1 << 26
@flag_value
def manage_nicknames(self):
""":class:`bool`: Returns ``True`` if a user can change other user's nickname in the guild."""
return 1 << 27
@flag_value
def manage_roles(self):
""":class:`bool`: Returns ``True`` if a user can create or edit roles less than their role's position.
This also corresponds to the "Manage Permissions" channel-specific override.
"""
return 1 << 28
@make_permission_alias('manage_roles')
def manage_permissions(self):
""":class:`bool`: An alias for :attr:`manage_roles`.
.. versionadded:: 1.3
"""
return 1 << 28
@flag_value
def manage_webhooks(self):
""":class:`bool`: Returns ``True`` if a user can create, edit, or delete webhooks."""
return 1 << 29
@flag_value
def manage_emojis(self):
""":class:`bool`: Returns ``True`` if a user can create, edit, or delete emojis."""
return 1 << 30
@flag_value
def use_slash_commands(self):
""":class:`bool`: Returns ``True`` if a user can use slash commands.
.. versionadded:: 1.7
"""
return 1 << 31
@flag_value
def request_to_speak(self):
""":class:`bool`: Returns ``True`` if a user can request to speak in a stage channel.
.. versionadded:: 1.7
"""
return 1 << 32
def augment_from_permissions(cls):
cls.VALID_NAMES = set(Permissions.VALID_FLAGS)
aliases = set()
# make descriptors for all the valid names and aliases
for name, value in Permissions.__dict__.items():
if isinstance(value, permission_alias):
key = value.alias
aliases.add(name)
elif isinstance(value, flag_value):
key = name
else:
continue
# god bless Python
def getter(self, x=key):
return self._values.get(x)
def setter(self, value, x=key):
self._set(x, value)
prop = property(getter, setter)
setattr(cls, name, prop)
cls.PURE_FLAGS = cls.VALID_NAMES - aliases
return cls
@augment_from_permissions
class PermissionOverwrite:
r"""A type that is used to represent a channel specific permission.
Unlike a regular :class:`Permissions`\, the default value of a
permission is equivalent to ``None`` and not ``False``. Setting
a value to ``False`` is **explicitly** denying that permission,
while setting a value to ``True`` is **explicitly** allowing
that permission.
The values supported by this are the same as :class:`Permissions`
with the added possibility of it being set to ``None``.
.. container:: operations
.. describe:: x == y
Checks if two overwrites are equal.
.. describe:: x != y
Checks if two overwrites are not equal.
.. describe:: iter(x)
Returns an iterator of ``(perm, value)`` pairs. This allows it
to be, for example, constructed as a dict or a list of pairs.
Note that aliases are not shown.
Parameters
-----------
\*\*kwargs
Set the value of permissions by their name.
"""
__slots__ = ('_values',)
def __init__(self, **kwargs):
self._values = {}
for key, value in kwargs.items():
if key not in self.VALID_NAMES:
raise ValueError('no permission called {0}.'.format(key))
setattr(self, key, value)
def __eq__(self, other):
return isinstance(other, PermissionOverwrite) and self._values == other._values
def _set(self, key, value):
if value not in (True, None, False):
raise TypeError('Expected bool or NoneType, received {0.__class__.__name__}'.format(value))
if value is None:
self._values.pop(key, None)
else:
self._values[key] = value
def pair(self):
"""Tuple[:class:`Permissions`, :class:`Permissions`]: Returns the (allow, deny) pair from this overwrite."""
allow = Permissions.none()
deny = Permissions.none()
for key, value in self._values.items():
if value is True:
setattr(allow, key, True)
elif value is False:
setattr(deny, key, True)
return allow, deny
@classmethod
def from_pair(cls, allow, deny):
"""Creates an overwrite from an allow/deny pair of :class:`Permissions`."""
ret = cls()
for key, value in allow:
if value is True:
setattr(ret, key, True)
for key, value in deny:
if value is True:
setattr(ret, key, False)
return ret
def is_empty(self):
"""Checks if the permission overwrite is currently empty.
An empty permission overwrite is one that has no overwrites set
to ``True`` or ``False``.
Returns
-------
:class:`bool`
Indicates if the overwrite is empty.
"""
return len(self._values) == 0
def update(self, **kwargs):
r"""Bulk updates this permission overwrite object.
Allows you to set multiple attributes by using keyword
arguments. The names must be equivalent to the properties
listed. Extraneous key/value pairs will be silently ignored.
Parameters
------------
\*\*kwargs
A list of key/value pairs to bulk update with.
"""
for key, value in kwargs.items():
if key not in self.VALID_NAMES:
continue
setattr(self, key, value)
def __iter__(self):
for key in self.PURE_FLAGS:
yield key, self._values.get(key) | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/permissions.py | permissions.py |
from . import utils
from .user import User
from .asset import Asset
from .team import Team
class AppInfo:
"""Represents the application info for the bot provided by Discord.
Attributes
-------------
id: :class:`int`
The application ID.
name: :class:`str`
The application name.
owner: :class:`User`
The application owner.
team: Optional[:class:`Team`]
The application's team.
.. versionadded:: 1.3
icon: Optional[:class:`str`]
The icon hash, if it exists.
description: Optional[:class:`str`]
The application description.
bot_public: :class:`bool`
Whether the bot can be invited by anyone or if it is locked
to the application owner.
bot_require_code_grant: :class:`bool`
Whether the bot requires the completion of the full oauth2 code
grant flow to join.
rpc_origins: Optional[List[:class:`str`]]
A list of RPC origin URLs, if RPC is enabled.
summary: :class:`str`
If this application is a game sold on Discord,
this field will be the summary field for the store page of its primary SKU.
.. versionadded:: 1.3
verify_key: :class:`str`
The hex encoded key for verification in interactions and the
GameSDK's `GetTicket <https://discord.com/developers/docs/game-sdk/applications#getticket>`_.
.. versionadded:: 1.3
guild_id: Optional[:class:`int`]
If this application is a game sold on Discord,
this field will be the guild to which it has been linked to.
.. versionadded:: 1.3
primary_sku_id: Optional[:class:`int`]
If this application is a game sold on Discord,
this field will be the id of the "Game SKU" that is created,
if it exists.
.. versionadded:: 1.3
slug: Optional[:class:`str`]
If this application is a game sold on Discord,
this field will be the URL slug that links to the store page.
.. versionadded:: 1.3
cover_image: Optional[:class:`str`]
If this application is a game sold on Discord,
this field will be the hash of the image on store embeds
.. versionadded:: 1.3
"""
__slots__ = ('_state', 'description', 'id', 'name', 'rpc_origins',
'bot_public', 'bot_require_code_grant', 'owner', 'icon',
'summary', 'verify_key', 'team', 'guild_id', 'primary_sku_id',
'slug', 'cover_image')
def __init__(self, state, data):
self._state = state
self.id = int(data['id'])
self.name = data['name']
self.description = data['description']
self.icon = data['icon']
self.rpc_origins = data['rpc_origins']
self.bot_public = data['bot_public']
self.bot_require_code_grant = data['bot_require_code_grant']
self.owner = User(state=self._state, data=data['owner'])
team = data.get('team')
self.team = Team(state, team) if team else None
self.summary = data['summary']
self.verify_key = data['verify_key']
self.guild_id = utils._get_as_snowflake(data, 'guild_id')
self.primary_sku_id = utils._get_as_snowflake(data, 'primary_sku_id')
self.slug = data.get('slug')
self.cover_image = data.get('cover_image')
def __repr__(self):
return '<{0.__class__.__name__} id={0.id} name={0.name!r} description={0.description!r} public={0.bot_public} ' \
'owner={0.owner!r}>'.format(self)
@property
def icon_url(self):
""":class:`.Asset`: Retrieves the application's icon asset.
This is equivalent to calling :meth:`icon_url_as` with
the default parameters ('webp' format and a size of 1024).
.. versionadded:: 1.3
"""
return self.icon_url_as()
def icon_url_as(self, *, format='webp', size=1024):
"""Returns an :class:`Asset` for the icon the application has.
The format must be one of 'webp', 'jpeg', 'jpg' or 'png'.
The size must be a power of 2 between 16 and 4096.
.. versionadded:: 1.6
Parameters
-----------
format: :class:`str`
The format to attempt to convert the icon to. Defaults to 'webp'.
size: :class:`int`
The size of the image to display.
Raises
------
InvalidArgument
Bad image format passed to ``format`` or invalid ``size``.
Returns
--------
:class:`Asset`
The resulting CDN asset.
"""
return Asset._from_icon(self._state, self, 'app', format=format, size=size)
@property
def cover_image_url(self):
""":class:`.Asset`: Retrieves the cover image on a store embed.
This is equivalent to calling :meth:`cover_image_url_as` with
the default parameters ('webp' format and a size of 1024).
.. versionadded:: 1.3
"""
return self.cover_image_url_as()
def cover_image_url_as(self, *, format='webp', size=1024):
"""Returns an :class:`Asset` for the image on store embeds
if this application is a game sold on Discord.
The format must be one of 'webp', 'jpeg', 'jpg' or 'png'.
The size must be a power of 2 between 16 and 4096.
.. versionadded:: 1.6
Parameters
-----------
format: :class:`str`
The format to attempt to convert the image to. Defaults to 'webp'.
size: :class:`int`
The size of the image to display.
Raises
------
InvalidArgument
Bad image format passed to ``format`` or invalid ``size``.
Returns
--------
:class:`Asset`
The resulting CDN asset.
"""
return Asset._from_cover_image(self._state, self, format=format, size=size)
@property
def guild(self):
"""Optional[:class:`Guild`]: If this application is a game sold on Discord,
this field will be the guild to which it has been linked
.. versionadded:: 1.3
"""
return self._state._get_guild(int(self.guild_id)) | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/appinfo.py | appinfo.py |
from .mixins import Hashable
from .asset import Asset
from .utils import snowflake_time
from .enums import StickerType, try_enum
class Sticker(Hashable):
"""Represents a sticker.
.. versionadded:: 1.6
.. container:: operations
.. describe:: str(x)
Returns the name of the sticker.
.. describe:: x == y
Checks if the sticker is equal to another sticker.
.. describe:: x != y
Checks if the sticker is not equal to another sticker.
Attributes
----------
name: :class:`str`
The sticker's name.
id: :class:`int`
The id of the sticker.
description: :class:`str`
The description of the sticker.
pack_id: :class:`int`
The id of the sticker's pack.
format: :class:`StickerType`
The format for the sticker's image.
image: :class:`str`
The sticker's image.
tags: List[:class:`str`]
A list of tags for the sticker.
preview_image: Optional[:class:`str`]
The sticker's preview asset hash.
"""
__slots__ = ('_state', 'id', 'name', 'description', 'pack_id', 'format', 'image', 'tags', 'preview_image')
def __init__(self, *, state, data):
self._state = state
self.id = int(data['id'])
self.name = data['name']
self.description = data['description']
self.pack_id = int(data.get('pack_id', 0))
self.format = try_enum(StickerType, data['format_type'])
self.image = data['asset']
try:
self.tags = [tag.strip() for tag in data['tags'].split(',')]
except KeyError:
self.tags = []
self.preview_image = data.get('preview_asset')
def __repr__(self):
return '<{0.__class__.__name__} id={0.id} name={0.name!r}>'.format(self)
def __str__(self):
return self.name
@property
def created_at(self):
""":class:`datetime.datetime`: Returns the sticker's creation time in UTC as a naive datetime."""
return snowflake_time(self.id)
@property
def image_url(self):
"""Returns an :class:`Asset` for the sticker's image.
.. note::
This will return ``None`` if the format is ``StickerType.lottie``.
Returns
-------
Optional[:class:`Asset`]
The resulting CDN asset.
"""
return self.image_url_as()
def image_url_as(self, *, size=1024):
"""Optionally returns an :class:`Asset` for the sticker's image.
The size must be a power of 2 between 16 and 4096.
.. note::
This will return ``None`` if the format is ``StickerType.lottie``.
Parameters
-----------
size: :class:`int`
The size of the image to display.
Raises
------
InvalidArgument
Invalid ``size``.
Returns
-------
Optional[:class:`Asset`]
The resulting CDN asset or ``None``.
"""
if self.format is StickerType.lottie:
return None
return Asset._from_sticker_url(self._state, self, size=size) | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/sticker.py | sticker.py |
import asyncio
from collections import namedtuple, deque
import concurrent.futures
import json
import logging
import struct
import sys
import time
import threading
import traceback
import zlib
import aiohttp
from . import utils
from .activity import BaseActivity
from .enums import SpeakingState
from .errors import ConnectionClosed, InvalidArgument
log = logging.getLogger(__name__)
__all__ = (
'DiscordWebSocket',
'KeepAliveHandler',
'VoiceKeepAliveHandler',
'DiscordVoiceWebSocket',
'ReconnectWebSocket',
)
class ReconnectWebSocket(Exception):
"""Signals to safely reconnect the websocket."""
def __init__(self, shard_id, *, resume=True):
self.shard_id = shard_id
self.resume = resume
self.op = 'RESUME' if resume else 'IDENTIFY'
class WebSocketClosure(Exception):
"""An exception to make up for the fact that aiohttp doesn't signal closure."""
pass
EventListener = namedtuple('EventListener', 'predicate event result future')
class GatewayRatelimiter:
def __init__(self, count=110, per=60.0):
# The default is 110 to give room for at least 10 heartbeats per minute
self.max = count
self.remaining = count
self.window = 0.0
self.per = per
self.lock = asyncio.Lock()
self.shard_id = None
def is_ratelimited(self):
current = time.time()
if current > self.window + self.per:
return False
return self.remaining == 0
def get_delay(self):
current = time.time()
if current > self.window + self.per:
self.remaining = self.max
if self.remaining == self.max:
self.window = current
if self.remaining == 0:
return self.per - (current - self.window)
self.remaining -= 1
if self.remaining == 0:
self.window = current
return 0.0
async def block(self):
async with self.lock:
delta = self.get_delay()
if delta:
log.warning('WebSocket in shard ID %s is ratelimited, waiting %.2f seconds', self.shard_id, delta)
await asyncio.sleep(delta)
class KeepAliveHandler(threading.Thread):
def __init__(self, *args, **kwargs):
ws = kwargs.pop('ws', None)
interval = kwargs.pop('interval', None)
shard_id = kwargs.pop('shard_id', None)
threading.Thread.__init__(self, *args, **kwargs)
self.ws = ws
self._main_thread_id = ws.thread_id
self.interval = interval
self.daemon = True
self.shard_id = shard_id
self.msg = 'Keeping shard ID %s websocket alive with sequence %s.'
self.block_msg = 'Shard ID %s heartbeat blocked for more than %s seconds.'
self.behind_msg = 'Can\'t keep up, shard ID %s websocket is %.1fs behind.'
self._stop_ev = threading.Event()
self._last_ack = time.perf_counter()
self._last_send = time.perf_counter()
self._last_recv = time.perf_counter()
self.latency = float('inf')
self.heartbeat_timeout = ws._max_heartbeat_timeout
def run(self):
while not self._stop_ev.wait(self.interval):
if self._last_recv + self.heartbeat_timeout < time.perf_counter():
log.warning("Shard ID %s has stopped responding to the gateway. Closing and restarting.", self.shard_id)
coro = self.ws.close(4000)
f = asyncio.run_coroutine_threadsafe(coro, loop=self.ws.loop)
try:
f.result()
except Exception:
log.exception('An error occurred while stopping the gateway. Ignoring.')
finally:
self.stop()
return
data = self.get_payload()
log.debug(self.msg, self.shard_id, data['d'])
coro = self.ws.send_heartbeat(data)
f = asyncio.run_coroutine_threadsafe(coro, loop=self.ws.loop)
try:
# block until sending is complete
total = 0
while True:
try:
f.result(10)
break
except concurrent.futures.TimeoutError:
total += 10
try:
frame = sys._current_frames()[self._main_thread_id]
except KeyError:
msg = self.block_msg
else:
stack = traceback.format_stack(frame)
msg = '%s\nLoop thread traceback (most recent call last):\n%s' % (self.block_msg, ''.join(stack))
log.warning(msg, self.shard_id, total)
except Exception:
self.stop()
else:
self._last_send = time.perf_counter()
def get_payload(self):
return {
'op': self.ws.HEARTBEAT,
'd': self.ws.sequence
}
def stop(self):
self._stop_ev.set()
def tick(self):
self._last_recv = time.perf_counter()
def ack(self):
ack_time = time.perf_counter()
self._last_ack = ack_time
self.latency = ack_time - self._last_send
if self.latency > 10:
log.warning(self.behind_msg, self.shard_id, self.latency)
class VoiceKeepAliveHandler(KeepAliveHandler):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.recent_ack_latencies = deque(maxlen=20)
self.msg = 'Keeping shard ID %s voice websocket alive with timestamp %s.'
self.block_msg = 'Shard ID %s voice heartbeat blocked for more than %s seconds'
self.behind_msg = 'High socket latency, shard ID %s heartbeat is %.1fs behind'
def get_payload(self):
return {
'op': self.ws.HEARTBEAT,
'd': int(time.time() * 1000)
}
def ack(self):
ack_time = time.perf_counter()
self._last_ack = ack_time
self._last_recv = ack_time
self.latency = ack_time - self._last_send
self.recent_ack_latencies.append(self.latency)
class DiscordClientWebSocketResponse(aiohttp.ClientWebSocketResponse):
async def close(self, *, code: int = 4000, message: bytes = b'') -> bool:
return await super().close(code=code, message=message)
class DiscordWebSocket:
"""Implements a WebSocket for Discord's gateway v6.
Attributes
-----------
DISPATCH
Receive only. Denotes an event to be sent to Discord, such as READY.
HEARTBEAT
When received tells Discord to keep the connection alive.
When sent asks if your connection is currently alive.
IDENTIFY
Send only. Starts a new session.
PRESENCE
Send only. Updates your presence.
VOICE_STATE
Send only. Starts a new connection to a voice guild.
VOICE_PING
Send only. Checks ping time to a voice guild, do not use.
RESUME
Send only. Resumes an existing connection.
RECONNECT
Receive only. Tells the client to reconnect to a new gateway.
REQUEST_MEMBERS
Send only. Asks for the full member list of a guild.
INVALIDATE_SESSION
Receive only. Tells the client to optionally invalidate the session
and IDENTIFY again.
HELLO
Receive only. Tells the client the heartbeat interval.
HEARTBEAT_ACK
Receive only. Confirms receiving of a heartbeat. Not having it implies
a connection issue.
GUILD_SYNC
Send only. Requests a guild sync.
gateway
The gateway we are currently connected to.
token
The authentication token for discord.
"""
DISPATCH = 0
HEARTBEAT = 1
IDENTIFY = 2
PRESENCE = 3
VOICE_STATE = 4
VOICE_PING = 5
RESUME = 6
RECONNECT = 7
REQUEST_MEMBERS = 8
INVALIDATE_SESSION = 9
HELLO = 10
HEARTBEAT_ACK = 11
GUILD_SYNC = 12
def __init__(self, socket, *, loop):
self.socket = socket
self.loop = loop
# an empty dispatcher to prevent crashes
self._dispatch = lambda *args: None
# generic event listeners
self._dispatch_listeners = []
# the keep alive
self._keep_alive = None
self.thread_id = threading.get_ident()
# ws related stuff
self.session_id = None
self.sequence = None
self._zlib = zlib.decompressobj()
self._buffer = bytearray()
self._close_code = None
self._rate_limiter = GatewayRatelimiter()
@property
def open(self):
return not self.socket.closed
def is_ratelimited(self):
return self._rate_limiter.is_ratelimited()
@classmethod
async def from_client(cls, client, *, initial=False, gateway=None, shard_id=None, session=None, sequence=None, resume=False):
"""Creates a main websocket for Discord from a :class:`Client`.
This is for internal use only.
"""
gateway = gateway or await client.http.get_gateway()
socket = await client.http.ws_connect(gateway)
ws = cls(socket, loop=client.loop)
# dynamically add attributes needed
ws.token = client.http.token
ws._connection = client._connection
ws._discord_parsers = client._connection.parsers
ws._dispatch = client.dispatch
ws.gateway = gateway
ws.call_hooks = client._connection.call_hooks
ws._initial_identify = initial
ws.shard_id = shard_id
ws._rate_limiter.shard_id = shard_id
ws.shard_count = client._connection.shard_count
ws.session_id = session
ws.sequence = sequence
ws._max_heartbeat_timeout = client._connection.heartbeat_timeout
client._connection._update_references(ws)
log.debug('Created websocket connected to %s', gateway)
# poll event for OP Hello
await ws.poll_event()
if not resume:
await ws.identify()
return ws
await ws.resume()
return ws
def wait_for(self, event, predicate, result=None):
"""Waits for a DISPATCH'd event that meets the predicate.
Parameters
-----------
event: :class:`str`
The event name in all upper case to wait for.
predicate
A function that takes a data parameter to check for event
properties. The data parameter is the 'd' key in the JSON message.
result
A function that takes the same data parameter and executes to send
the result to the future. If ``None``, returns the data.
Returns
--------
asyncio.Future
A future to wait for.
"""
future = self.loop.create_future()
entry = EventListener(event=event, predicate=predicate, result=result, future=future)
self._dispatch_listeners.append(entry)
return future
async def identify(self):
"""Sends the IDENTIFY packet."""
payload = {
'op': self.IDENTIFY,
'd': {
'token': self.token,
'properties': {
'$os': sys.platform,
'$browser': 'discord.py',
'$device': 'discord.py',
'$referrer': '',
'$referring_domain': ''
},
'compress': True,
'large_threshold': 250,
'guild_subscriptions': self._connection.guild_subscriptions,
'v': 3
}
}
if not self._connection.is_bot:
payload['d']['synced_guilds'] = []
if self.shard_id is not None and self.shard_count is not None:
payload['d']['shard'] = [self.shard_id, self.shard_count]
state = self._connection
if state._activity is not None or state._status is not None:
payload['d']['presence'] = {
'status': state._status,
'game': state._activity,
'since': 0,
'afk': False
}
if state._intents is not None:
payload['d']['intents'] = state._intents.value
await self.call_hooks('before_identify', self.shard_id, initial=self._initial_identify)
await self.send_as_json(payload)
log.info('Shard ID %s has sent the IDENTIFY payload.', self.shard_id)
async def resume(self):
"""Sends the RESUME packet."""
payload = {
'op': self.RESUME,
'd': {
'seq': self.sequence,
'session_id': self.session_id,
'token': self.token
}
}
await self.send_as_json(payload)
log.info('Shard ID %s has sent the RESUME payload.', self.shard_id)
async def received_message(self, msg):
self._dispatch('socket_raw_receive', msg)
if type(msg) is bytes:
self._buffer.extend(msg)
if len(msg) < 4 or msg[-4:] != b'\x00\x00\xff\xff':
return
msg = self._zlib.decompress(self._buffer)
msg = msg.decode('utf-8')
self._buffer = bytearray()
msg = json.loads(msg)
log.debug('For Shard ID %s: WebSocket Event: %s', self.shard_id, msg)
self._dispatch('socket_response', msg)
op = msg.get('op')
data = msg.get('d')
seq = msg.get('s')
if seq is not None:
self.sequence = seq
if self._keep_alive:
self._keep_alive.tick()
if op != self.DISPATCH:
if op == self.RECONNECT:
# "reconnect" can only be handled by the Client
# so we terminate our connection and raise an
# internal exception signalling to reconnect.
log.debug('Received RECONNECT opcode.')
await self.close()
raise ReconnectWebSocket(self.shard_id)
if op == self.HEARTBEAT_ACK:
if self._keep_alive:
self._keep_alive.ack()
return
if op == self.HEARTBEAT:
if self._keep_alive:
beat = self._keep_alive.get_payload()
await self.send_as_json(beat)
return
if op == self.HELLO:
interval = data['heartbeat_interval'] / 1000.0
self._keep_alive = KeepAliveHandler(ws=self, interval=interval, shard_id=self.shard_id)
# send a heartbeat immediately
await self.send_as_json(self._keep_alive.get_payload())
self._keep_alive.start()
return
if op == self.INVALIDATE_SESSION:
if data is True:
await self.close()
raise ReconnectWebSocket(self.shard_id)
self.sequence = None
self.session_id = None
log.info('Shard ID %s session has been invalidated.', self.shard_id)
await self.close(code=1000)
raise ReconnectWebSocket(self.shard_id, resume=False)
log.warning('Unknown OP code %s.', op)
return
event = msg.get('t')
if event == 'READY':
self._trace = trace = data.get('_trace', [])
self.sequence = msg['s']
self.session_id = data['session_id']
# pass back shard ID to ready handler
data['__shard_id__'] = self.shard_id
log.info('Shard ID %s has connected to Gateway: %s (Session ID: %s).',
self.shard_id, ', '.join(trace), self.session_id)
elif event == 'RESUMED':
self._trace = trace = data.get('_trace', [])
# pass back the shard ID to the resumed handler
data['__shard_id__'] = self.shard_id
log.info('Shard ID %s has successfully RESUMED session %s under trace %s.',
self.shard_id, self.session_id, ', '.join(trace))
try:
func = self._discord_parsers[event]
except KeyError:
log.debug('Unknown event %s.', event)
else:
func(data)
# remove the dispatched listeners
removed = []
for index, entry in enumerate(self._dispatch_listeners):
if entry.event != event:
continue
future = entry.future
if future.cancelled():
removed.append(index)
continue
try:
valid = entry.predicate(data)
except Exception as exc:
future.set_exception(exc)
removed.append(index)
else:
if valid:
ret = data if entry.result is None else entry.result(data)
future.set_result(ret)
removed.append(index)
for index in reversed(removed):
del self._dispatch_listeners[index]
@property
def latency(self):
""":class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds."""
heartbeat = self._keep_alive
return float('inf') if heartbeat is None else heartbeat.latency
def _can_handle_close(self):
code = self._close_code or self.socket.close_code
return code not in (1000, 4004, 4010, 4011, 4012, 4013, 4014)
async def poll_event(self):
"""Polls for a DISPATCH event and handles the general gateway loop.
Raises
------
ConnectionClosed
The websocket connection was terminated for unhandled reasons.
"""
try:
msg = await self.socket.receive(timeout=self._max_heartbeat_timeout)
if msg.type is aiohttp.WSMsgType.TEXT:
await self.received_message(msg.data)
elif msg.type is aiohttp.WSMsgType.BINARY:
await self.received_message(msg.data)
elif msg.type is aiohttp.WSMsgType.ERROR:
log.debug('Received %s', msg)
raise msg.data
elif msg.type in (aiohttp.WSMsgType.CLOSED, aiohttp.WSMsgType.CLOSING, aiohttp.WSMsgType.CLOSE):
log.debug('Received %s', msg)
raise WebSocketClosure
except (asyncio.TimeoutError, WebSocketClosure) as e:
# Ensure the keep alive handler is closed
if self._keep_alive:
self._keep_alive.stop()
self._keep_alive = None
if isinstance(e, asyncio.TimeoutError):
log.info('Timed out receiving packet. Attempting a reconnect.')
raise ReconnectWebSocket(self.shard_id) from None
code = self._close_code or self.socket.close_code
if self._can_handle_close():
log.info('Websocket closed with %s, attempting a reconnect.', code)
raise ReconnectWebSocket(self.shard_id) from None
else:
log.info('Websocket closed with %s, cannot reconnect.', code)
raise ConnectionClosed(self.socket, shard_id=self.shard_id, code=code) from None
async def send(self, data):
await self._rate_limiter.block()
self._dispatch('socket_raw_send', data)
await self.socket.send_str(data)
async def send_as_json(self, data):
try:
await self.send(utils.to_json(data))
except RuntimeError as exc:
if not self._can_handle_close():
raise ConnectionClosed(self.socket, shard_id=self.shard_id) from exc
async def send_heartbeat(self, data):
# This bypasses the rate limit handling code since it has a higher priority
try:
await self.socket.send_str(utils.to_json(data))
except RuntimeError as exc:
if not self._can_handle_close():
raise ConnectionClosed(self.socket, shard_id=self.shard_id) from exc
async def change_presence(self, *, activity=None, status=None, afk=False, since=0.0):
if activity is not None:
if not isinstance(activity, BaseActivity):
raise InvalidArgument('activity must derive from BaseActivity.')
activity = activity.to_dict()
if status == 'idle':
since = int(time.time() * 1000)
payload = {
'op': self.PRESENCE,
'd': {
'game': activity,
'afk': afk,
'since': since,
'status': status
}
}
sent = utils.to_json(payload)
log.debug('Sending "%s" to change status', sent)
await self.send(sent)
async def request_sync(self, guild_ids):
payload = {
'op': self.GUILD_SYNC,
'd': list(guild_ids)
}
await self.send_as_json(payload)
async def request_chunks(self, guild_id, query=None, *, limit, user_ids=None, presences=False, nonce=None):
payload = {
'op': self.REQUEST_MEMBERS,
'd': {
'guild_id': guild_id,
'presences': presences,
'limit': limit
}
}
if nonce:
payload['d']['nonce'] = nonce
if user_ids:
payload['d']['user_ids'] = user_ids
if query is not None:
payload['d']['query'] = query
await self.send_as_json(payload)
async def voice_state(self, guild_id, channel_id, self_mute=False, self_deaf=False):
payload = {
'op': self.VOICE_STATE,
'd': {
'guild_id': guild_id,
'channel_id': channel_id,
'self_mute': self_mute,
'self_deaf': self_deaf
}
}
log.debug('Updating our voice state to %s.', payload)
await self.send_as_json(payload)
async def close(self, code=4000):
if self._keep_alive:
self._keep_alive.stop()
self._keep_alive = None
self._close_code = code
await self.socket.close(code=code)
class DiscordVoiceWebSocket:
"""Implements the websocket protocol for handling voice connections.
Attributes
-----------
IDENTIFY
Send only. Starts a new voice session.
SELECT_PROTOCOL
Send only. Tells discord what encryption mode and how to connect for voice.
READY
Receive only. Tells the websocket that the initial connection has completed.
HEARTBEAT
Send only. Keeps your websocket connection alive.
SESSION_DESCRIPTION
Receive only. Gives you the secret key required for voice.
SPEAKING
Send only. Notifies the client if you are currently speaking.
HEARTBEAT_ACK
Receive only. Tells you your heartbeat has been acknowledged.
RESUME
Sent only. Tells the client to resume its session.
HELLO
Receive only. Tells you that your websocket connection was acknowledged.
RESUMED
Sent only. Tells you that your RESUME request has succeeded.
CLIENT_CONNECT
Indicates a user has connected to voice.
CLIENT_DISCONNECT
Receive only. Indicates a user has disconnected from voice.
"""
IDENTIFY = 0
SELECT_PROTOCOL = 1
READY = 2
HEARTBEAT = 3
SESSION_DESCRIPTION = 4
SPEAKING = 5
HEARTBEAT_ACK = 6
RESUME = 7
HELLO = 8
RESUMED = 9
CLIENT_CONNECT = 12
CLIENT_DISCONNECT = 13
def __init__(self, socket, loop):
self.ws = socket
self.loop = loop
self._keep_alive = None
self._close_code = None
self.secret_key = None
async def send_as_json(self, data):
log.debug('Sending voice websocket frame: %s.', data)
await self.ws.send_str(utils.to_json(data))
send_heartbeat = send_as_json
async def resume(self):
state = self._connection
payload = {
'op': self.RESUME,
'd': {
'token': state.token,
'server_id': str(state.server_id),
'session_id': state.session_id
}
}
await self.send_as_json(payload)
async def identify(self):
state = self._connection
payload = {
'op': self.IDENTIFY,
'd': {
'server_id': str(state.server_id),
'user_id': str(state.user.id),
'session_id': state.session_id,
'token': state.token
}
}
await self.send_as_json(payload)
@classmethod
async def from_client(cls, client, *, resume=False):
"""Creates a voice websocket for the :class:`VoiceClient`."""
gateway = 'wss://' + client.endpoint + '/?v=4'
http = client._state.http
socket = await http.ws_connect(gateway, compress=15)
ws = cls(socket, loop=client.loop)
ws.gateway = gateway
ws._connection = client
ws._max_heartbeat_timeout = 60.0
ws.thread_id = threading.get_ident()
if resume:
await ws.resume()
else:
await ws.identify()
return ws
async def select_protocol(self, ip, port, mode):
payload = {
'op': self.SELECT_PROTOCOL,
'd': {
'protocol': 'udp',
'data': {
'address': ip,
'port': port,
'mode': mode
}
}
}
await self.send_as_json(payload)
async def client_connect(self):
payload = {
'op': self.CLIENT_CONNECT,
'd': {
'audio_ssrc': self._connection.ssrc
}
}
await self.send_as_json(payload)
async def speak(self, state=SpeakingState.voice):
payload = {
'op': self.SPEAKING,
'd': {
'speaking': int(state),
'delay': 0
}
}
await self.send_as_json(payload)
async def received_message(self, msg):
log.debug('Voice websocket frame received: %s', msg)
op = msg['op']
data = msg.get('d')
if op == self.READY:
await self.initial_connection(data)
elif op == self.HEARTBEAT_ACK:
self._keep_alive.ack()
elif op == self.RESUMED:
log.info('Voice RESUME succeeded.')
elif op == self.SESSION_DESCRIPTION:
self._connection.mode = data['mode']
await self.load_secret_key(data)
elif op == self.HELLO:
interval = data['heartbeat_interval'] / 1000.0
self._keep_alive = VoiceKeepAliveHandler(ws=self, interval=min(interval, 5.0))
self._keep_alive.start()
async def initial_connection(self, data):
state = self._connection
state.ssrc = data['ssrc']
state.voice_port = data['port']
state.endpoint_ip = data['ip']
packet = bytearray(70)
struct.pack_into('>H', packet, 0, 1) # 1 = Send
struct.pack_into('>H', packet, 2, 70) # 70 = Length
struct.pack_into('>I', packet, 4, state.ssrc)
state.socket.sendto(packet, (state.endpoint_ip, state.voice_port))
recv = await self.loop.sock_recv(state.socket, 70)
log.debug('received packet in initial_connection: %s', recv)
# the ip is ascii starting at the 4th byte and ending at the first null
ip_start = 4
ip_end = recv.index(0, ip_start)
state.ip = recv[ip_start:ip_end].decode('ascii')
state.port = struct.unpack_from('>H', recv, len(recv) - 2)[0]
log.debug('detected ip: %s port: %s', state.ip, state.port)
# there *should* always be at least one supported mode (xsalsa20_poly1305)
modes = [mode for mode in data['modes'] if mode in self._connection.supported_modes]
log.debug('received supported encryption modes: %s', ", ".join(modes))
mode = modes[0]
await self.select_protocol(state.ip, state.port, mode)
log.info('selected the voice protocol for use (%s)', mode)
@property
def latency(self):
""":class:`float`: Latency between a HEARTBEAT and its HEARTBEAT_ACK in seconds."""
heartbeat = self._keep_alive
return float('inf') if heartbeat is None else heartbeat.latency
@property
def average_latency(self):
""":class:`list`: Average of last 20 HEARTBEAT latencies."""
heartbeat = self._keep_alive
if heartbeat is None or not heartbeat.recent_ack_latencies:
return float('inf')
return sum(heartbeat.recent_ack_latencies) / len(heartbeat.recent_ack_latencies)
async def load_secret_key(self, data):
log.info('received secret key for voice connection')
self.secret_key = self._connection.secret_key = data.get('secret_key')
await self.speak()
await self.speak(False)
async def poll_event(self):
# This exception is handled up the chain
msg = await asyncio.wait_for(self.ws.receive(), timeout=30.0)
if msg.type is aiohttp.WSMsgType.TEXT:
await self.received_message(json.loads(msg.data))
elif msg.type is aiohttp.WSMsgType.ERROR:
log.debug('Received %s', msg)
raise ConnectionClosed(self.ws, shard_id=None) from msg.data
elif msg.type in (aiohttp.WSMsgType.CLOSED, aiohttp.WSMsgType.CLOSE, aiohttp.WSMsgType.CLOSING):
log.debug('Received %s', msg)
raise ConnectionClosed(self.ws, shard_id=None, code=self._close_code)
async def close(self, code=1000):
if self._keep_alive is not None:
self._keep_alive.stop()
self._close_code = code
await self.ws.close(code=code) | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/gateway.py | gateway.py |
import colorsys
import random
class Colour:
"""Represents a Discord role colour. This class is similar
to a (red, green, blue) :class:`tuple`.
There is an alias for this called Color.
.. container:: operations
.. describe:: x == y
Checks if two colours are equal.
.. describe:: x != y
Checks if two colours are not equal.
.. describe:: hash(x)
Return the colour's hash.
.. describe:: str(x)
Returns the hex format for the colour.
Attributes
------------
value: :class:`int`
The raw integer colour value.
"""
__slots__ = ('value',)
def __init__(self, value):
if not isinstance(value, int):
raise TypeError('Expected int parameter, received %s instead.' % value.__class__.__name__)
self.value = value
def _get_byte(self, byte):
return (self.value >> (8 * byte)) & 0xff
def __eq__(self, other):
return isinstance(other, Colour) and self.value == other.value
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return '#{:0>6x}'.format(self.value)
def __repr__(self):
return '<Colour value=%s>' % self.value
def __hash__(self):
return hash(self.value)
@property
def r(self):
""":class:`int`: Returns the red component of the colour."""
return self._get_byte(2)
@property
def g(self):
""":class:`int`: Returns the green component of the colour."""
return self._get_byte(1)
@property
def b(self):
""":class:`int`: Returns the blue component of the colour."""
return self._get_byte(0)
def to_rgb(self):
"""Tuple[:class:`int`, :class:`int`, :class:`int`]: Returns an (r, g, b) tuple representing the colour."""
return (self.r, self.g, self.b)
@classmethod
def from_rgb(cls, r, g, b):
"""Constructs a :class:`Colour` from an RGB tuple."""
return cls((r << 16) + (g << 8) + b)
@classmethod
def from_hsv(cls, h, s, v):
"""Constructs a :class:`Colour` from an HSV tuple."""
rgb = colorsys.hsv_to_rgb(h, s, v)
return cls.from_rgb(*(int(x * 255) for x in rgb))
@classmethod
def default(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0``."""
return cls(0)
@classmethod
def random(cls, *, seed=None):
"""A factory method that returns a :class:`Colour` with a random hue.
.. note::
The random algorithm works by choosing a colour with a random hue but
with maxed out saturation and value.
.. versionadded:: 1.6
Parameters
------------
seed: Optional[Union[:class:`int`, :class:`str`, :class:`float`, :class:`bytes`, :class:`bytearray`]]
The seed to initialize the RNG with. If ``None`` is passed the default RNG is used.
.. versionadded:: 1.7
"""
rand = random if seed is None else random.Random(seed)
return cls.from_hsv(rand.random(), 1, 1)
@classmethod
def teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1abc9c``."""
return cls(0x1abc9c)
@classmethod
def dark_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x11806a``."""
return cls(0x11806a)
@classmethod
def green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2ecc71``."""
return cls(0x2ecc71)
@classmethod
def dark_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1f8b4c``."""
return cls(0x1f8b4c)
@classmethod
def blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3498db``."""
return cls(0x3498db)
@classmethod
def dark_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x206694``."""
return cls(0x206694)
@classmethod
def purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9b59b6``."""
return cls(0x9b59b6)
@classmethod
def dark_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x71368a``."""
return cls(0x71368a)
@classmethod
def magenta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe91e63``."""
return cls(0xe91e63)
@classmethod
def dark_magenta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xad1457``."""
return cls(0xad1457)
@classmethod
def gold(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf1c40f``."""
return cls(0xf1c40f)
@classmethod
def dark_gold(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc27c0e``."""
return cls(0xc27c0e)
@classmethod
def orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe67e22``."""
return cls(0xe67e22)
@classmethod
def dark_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa84300``."""
return cls(0xa84300)
@classmethod
def red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe74c3c``."""
return cls(0xe74c3c)
@classmethod
def dark_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x992d22``."""
return cls(0x992d22)
@classmethod
def lighter_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x95a5a6``."""
return cls(0x95a5a6)
lighter_gray = lighter_grey
@classmethod
def dark_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x607d8b``."""
return cls(0x607d8b)
dark_gray = dark_grey
@classmethod
def light_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x979c9f``."""
return cls(0x979c9f)
light_gray = light_grey
@classmethod
def darker_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x546e7a``."""
return cls(0x546e7a)
darker_gray = darker_grey
@classmethod
def old_blurple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x7289da``."""
return cls(0x7289da)
@classmethod
def blurple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x5e62f2``.
.. versionadded:: 1.7.2.1
"""
return cls(0x5e62f2)
@classmethod
def greyple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x99aab5``."""
return cls(0x99aab5)
@classmethod
def dark_theme(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x36393F``.
This will appear transparent on Discord's dark theme.
.. versionadded:: 1.5
"""
return cls(0x36393F)
@classmethod
def nitro_booster(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf47fff``.
.. versionadded:: 1.5.1.1"""
return cls(0xf47fff)
@classmethod
def rounded(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2F3136``.
This will appear transparent on Discord's dark theme.
.. versionadded:: 1.6
"""
return cls(0x2F3136)
Color = Colour | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/colour.py | colour.py |
import asyncio
import datetime
import aiohttp
import discord
import inspect
import logging
import sys
import traceback
from discord.backoff import ExponentialBackoff
log = logging.getLogger(__name__)
class Loop:
"""A background task helper that abstracts the loop and reconnection logic for you.
The main interface to create this is through :func:`loop`.
"""
def __init__(self, coro, seconds, hours, minutes, count, reconnect, loop):
self.coro = coro
self.reconnect = reconnect
self.loop = loop
self.count = count
self._current_loop = 0
self._task = None
self._injected = None
self._valid_exception = (
OSError,
discord.GatewayNotFound,
discord.ConnectionClosed,
aiohttp.ClientError,
asyncio.TimeoutError,
)
self._before_loop = None
self._after_loop = None
self._is_being_cancelled = False
self._has_failed = False
self._stop_next_iteration = False
if self.count is not None and self.count <= 0:
raise ValueError('count must be greater than 0 or None.')
self.change_interval(seconds=seconds, minutes=minutes, hours=hours)
self._last_iteration_failed = False
self._last_iteration = None
self._next_iteration = None
if not inspect.iscoroutinefunction(self.coro):
raise TypeError('Expected coroutine function, not {0.__name__!r}.'.format(type(self.coro)))
async def _call_loop_function(self, name, *args, **kwargs):
coro = getattr(self, '_' + name)
if coro is None:
return
if self._injected is not None:
await coro(self._injected, *args, **kwargs)
else:
await coro(*args, **kwargs)
async def _loop(self, *args, **kwargs):
backoff = ExponentialBackoff()
await self._call_loop_function('before_loop')
sleep_until = discord.utils.sleep_until
self._last_iteration_failed = False
self._next_iteration = datetime.datetime.now(datetime.timezone.utc)
try:
await asyncio.sleep(0) # allows canceling in before_loop
while True:
if not self._last_iteration_failed:
self._last_iteration = self._next_iteration
self._next_iteration = self._get_next_sleep_time()
try:
await self.coro(*args, **kwargs)
self._last_iteration_failed = False
now = datetime.datetime.now(datetime.timezone.utc)
if now > self._next_iteration:
self._next_iteration = now
except self._valid_exception:
self._last_iteration_failed = True
if not self.reconnect:
raise
await asyncio.sleep(backoff.delay())
else:
await sleep_until(self._next_iteration)
if self._stop_next_iteration:
return
self._current_loop += 1
if self._current_loop == self.count:
break
except asyncio.CancelledError:
self._is_being_cancelled = True
raise
except Exception as exc:
self._has_failed = True
await self._call_loop_function('error', exc)
raise exc
finally:
await self._call_loop_function('after_loop')
self._is_being_cancelled = False
self._current_loop = 0
self._stop_next_iteration = False
self._has_failed = False
def __get__(self, obj, objtype):
if obj is None:
return self
copy = Loop(self.coro, seconds=self.seconds, hours=self.hours, minutes=self.minutes,
count=self.count, reconnect=self.reconnect, loop=self.loop)
copy._injected = obj
copy._before_loop = self._before_loop
copy._after_loop = self._after_loop
copy._error = self._error
setattr(obj, self.coro.__name__, copy)
return copy
@property
def current_loop(self):
""":class:`int`: The current iteration of the loop."""
return self._current_loop
@property
def next_iteration(self):
"""Optional[:class:`datetime.datetime`]: When the next iteration of the loop will occur.
.. versionadded:: 1.3
"""
if self._task is None:
return None
elif self._task and self._task.done() or self._stop_next_iteration:
return None
return self._next_iteration
async def __call__(self, *args, **kwargs):
r"""|coro|
Calls the internal callback that the task holds.
.. versionadded:: 1.6
Parameters
------------
\*args
The arguments to use.
\*\*kwargs
The keyword arguments to use.
"""
if self._injected is not None:
args = (self._injected, *args)
return await self.coro(*args, **kwargs)
def start(self, *args, **kwargs):
r"""Starts the internal task in the event loop.
Parameters
------------
\*args
The arguments to use.
\*\*kwargs
The keyword arguments to use.
Raises
--------
RuntimeError
A task has already been launched and is running.
Returns
---------
:class:`asyncio.Task`
The task that has been created.
"""
if self._task is not None and not self._task.done():
raise RuntimeError('Task is already launched and is not completed.')
if self._injected is not None:
args = (self._injected, *args)
if self.loop is None:
self.loop = asyncio.get_event_loop()
self._task = self.loop.create_task(self._loop(*args, **kwargs))
return self._task
def stop(self):
r"""Gracefully stops the task from running.
Unlike :meth:`cancel`\, this allows the task to finish its
current iteration before gracefully exiting.
.. note::
If the internal function raises an error that can be
handled before finishing then it will retry until
it succeeds.
If this is undesirable, either remove the error handling
before stopping via :meth:`clear_exception_types` or
use :meth:`cancel` instead.
.. versionadded:: 1.2
"""
if self._task and not self._task.done():
self._stop_next_iteration = True
def _can_be_cancelled(self):
return not self._is_being_cancelled and self._task and not self._task.done()
def cancel(self):
"""Cancels the internal task, if it is running."""
if self._can_be_cancelled():
self._task.cancel()
def restart(self, *args, **kwargs):
r"""A convenience method to restart the internal task.
.. note::
Due to the way this function works, the task is not
returned like :meth:`start`.
Parameters
------------
\*args
The arguments to to use.
\*\*kwargs
The keyword arguments to use.
"""
def restart_when_over(fut, *, args=args, kwargs=kwargs):
self._task.remove_done_callback(restart_when_over)
self.start(*args, **kwargs)
if self._can_be_cancelled():
self._task.add_done_callback(restart_when_over)
self._task.cancel()
def add_exception_type(self, *exceptions):
r"""Adds exception types to be handled during the reconnect logic.
By default the exception types handled are those handled by
:meth:`discord.Client.connect`\, which includes a lot of internet disconnection
errors.
This function is useful if you're interacting with a 3rd party library that
raises its own set of exceptions.
Parameters
------------
\*exceptions: Type[:class:`BaseException`]
An argument list of exception classes to handle.
Raises
--------
TypeError
An exception passed is either not a class or not inherited from :class:`BaseException`.
"""
for exc in exceptions:
if not inspect.isclass(exc):
raise TypeError('{0!r} must be a class.'.format(exc))
if not issubclass(exc, BaseException):
raise TypeError('{0!r} must inherit from BaseException.'.format(exc))
self._valid_exception = (*self._valid_exception, *exceptions)
def clear_exception_types(self):
"""Removes all exception types that are handled.
.. note::
This operation obviously cannot be undone!
"""
self._valid_exception = tuple()
def remove_exception_type(self, *exceptions):
r"""Removes exception types from being handled during the reconnect logic.
Parameters
------------
\*exceptions: Type[:class:`BaseException`]
An argument list of exception classes to handle.
Returns
---------
:class:`bool`
Whether all exceptions were successfully removed.
"""
old_length = len(self._valid_exception)
self._valid_exception = tuple(x for x in self._valid_exception if x not in exceptions)
return len(self._valid_exception) == old_length - len(exceptions)
def get_task(self):
"""Optional[:class:`asyncio.Task`]: Fetches the internal task or ``None`` if there isn't one running."""
return self._task
def is_being_cancelled(self):
"""Whether the task is being cancelled."""
return self._is_being_cancelled
def failed(self):
""":class:`bool`: Whether the internal task has failed.
.. versionadded:: 1.2
"""
return self._has_failed
def is_running(self):
""":class:`bool`: Check if the task is currently running.
.. versionadded:: 1.4
"""
return not bool(self._task.done()) if self._task else False
async def _error(self, *args):
exception = args[-1]
print('Unhandled exception in internal background task {0.__name__!r}.'.format(self.coro), file=sys.stderr)
traceback.print_exception(type(exception), exception, exception.__traceback__, file=sys.stderr)
def before_loop(self, coro):
"""A decorator that registers a coroutine to be called before the loop starts running.
This is useful if you want to wait for some bot state before the loop starts,
such as :meth:`discord.Client.wait_until_ready`.
The coroutine must take no arguments (except ``self`` in a class context).
Parameters
------------
coro: :ref:`coroutine <coroutine>`
The coroutine to register before the loop runs.
Raises
-------
TypeError
The function was not a coroutine.
"""
if not inspect.iscoroutinefunction(coro):
raise TypeError('Expected coroutine function, received {0.__name__!r}.'.format(type(coro)))
self._before_loop = coro
return coro
def after_loop(self, coro):
"""A decorator that register a coroutine to be called after the loop finished running.
The coroutine must take no arguments (except ``self`` in a class context).
.. note::
This coroutine is called even during cancellation. If it is desirable
to tell apart whether something was cancelled or not, check to see
whether :meth:`is_being_cancelled` is ``True`` or not.
Parameters
------------
coro: :ref:`coroutine <coroutine>`
The coroutine to register after the loop finishes.
Raises
-------
TypeError
The function was not a coroutine.
"""
if not inspect.iscoroutinefunction(coro):
raise TypeError('Expected coroutine function, received {0.__name__!r}.'.format(type(coro)))
self._after_loop = coro
return coro
def error(self, coro):
"""A decorator that registers a coroutine to be called if the task encounters an unhandled exception.
The coroutine must take only one argument the exception raised (except ``self`` in a class context).
By default this prints to :data:`sys.stderr` however it could be
overridden to have a different implementation.
.. versionadded:: 1.4
Parameters
------------
coro: :ref:`coroutine <coroutine>`
The coroutine to register in the event of an unhandled exception.
Raises
-------
TypeError
The function was not a coroutine.
"""
if not inspect.iscoroutinefunction(coro):
raise TypeError('Expected coroutine function, received {0.__name__!r}.'.format(type(coro)))
self._error = coro
return coro
def _get_next_sleep_time(self):
return self._last_iteration + datetime.timedelta(seconds=self._sleep)
def change_interval(self, *, seconds=0, minutes=0, hours=0):
"""Changes the interval for the sleep time.
.. note::
This only applies on the next loop iteration. If it is desirable for the change of interval
to be applied right away, cancel the task with :meth:`cancel`.
.. versionadded:: 1.2
Parameters
------------
seconds: :class:`float`
The number of seconds between every iteration.
minutes: :class:`float`
The number of minutes between every iteration.
hours: :class:`float`
The number of hours between every iteration.
Raises
-------
ValueError
An invalid value was given.
"""
sleep = seconds + (minutes * 60.0) + (hours * 3600.0)
if sleep < 0:
raise ValueError('Total number of seconds cannot be less than zero.')
self._sleep = sleep
self.seconds = seconds
self.hours = hours
self.minutes = minutes
def loop(*, seconds=0, minutes=0, hours=0, count=None, reconnect=True, loop=None):
"""A decorator that schedules a task in the background for you with
optional reconnect logic. The decorator returns a :class:`Loop`.
Parameters
------------
seconds: :class:`float`
The number of seconds between every iteration.
minutes: :class:`float`
The number of minutes between every iteration.
hours: :class:`float`
The number of hours between every iteration.
count: Optional[:class:`int`]
The number of loops to do, ``None`` if it should be an
infinite loop.
reconnect: :class:`bool`
Whether to handle errors and restart the task
using an exponential back-off algorithm similar to the
one used in :meth:`discord.Client.connect`.
loop: :class:`asyncio.AbstractEventLoop`
The loop to use to register the task, if not given
defaults to :func:`asyncio.get_event_loop`.
Raises
--------
ValueError
An invalid value was given.
TypeError
The function was not a coroutine.
"""
def decorator(func):
kwargs = {
'seconds': seconds,
'minutes': minutes,
'hours': hours,
'count': count,
'reconnect': reconnect,
'loop': loop
}
return Loop(func, **kwargs)
return decorator | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/ext/tasks/__init__.py | __init__.py |
from .errors import UnexpectedQuoteError, InvalidEndOfQuotedStringError, ExpectedClosingQuoteError
# map from opening quotes to closing quotes
_quotes = {
'"': '"',
"‘": "’",
"‚": "‛",
"“": "”",
"„": "‟",
"⹂": "⹂",
"「": "」",
"『": "』",
"〝": "〞",
"﹁": "﹂",
"﹃": "﹄",
""": """,
"「": "」",
"«": "»",
"‹": "›",
"《": "》",
"〈": "〉",
}
_all_quotes = set(_quotes.keys()) | set(_quotes.values())
class StringView:
def __init__(self, buffer):
self.index = 0
self.buffer = buffer
self.end = len(buffer)
self.previous = 0
@property
def current(self):
return None if self.eof else self.buffer[self.index]
@property
def eof(self):
return self.index >= self.end
def undo(self):
self.index = self.previous
def skip_ws(self):
pos = 0
while not self.eof:
try:
current = self.buffer[self.index + pos]
if not current.isspace():
break
pos += 1
except IndexError:
break
self.previous = self.index
self.index += pos
return self.previous != self.index
def skip_string(self, string):
strlen = len(string)
if self.buffer[self.index:self.index + strlen] == string:
self.previous = self.index
self.index += strlen
return True
return False
def read_rest(self):
result = self.buffer[self.index:]
self.previous = self.index
self.index = self.end
return result
def read(self, n):
result = self.buffer[self.index:self.index + n]
self.previous = self.index
self.index += n
return result
def get(self):
try:
result = self.buffer[self.index + 1]
except IndexError:
result = None
self.previous = self.index
self.index += 1
return result
def get_word(self):
pos = 0
while not self.eof:
try:
current = self.buffer[self.index + pos]
if current.isspace():
break
pos += 1
except IndexError:
break
self.previous = self.index
result = self.buffer[self.index:self.index + pos]
self.index += pos
return result
def get_quoted_word(self):
current = self.current
if current is None:
return None
close_quote = _quotes.get(current)
is_quoted = bool(close_quote)
if is_quoted:
result = []
_escaped_quotes = (current, close_quote)
else:
result = [current]
_escaped_quotes = _all_quotes
while not self.eof:
current = self.get()
if not current:
if is_quoted:
# unexpected EOF
raise ExpectedClosingQuoteError(close_quote)
return ''.join(result)
# currently we accept strings in the format of "hello world"
# to embed a quote inside the string you must escape it: "a \"world\""
if current == '\\':
next_char = self.get()
if not next_char:
# string ends with \ and no character after it
if is_quoted:
# if we're quoted then we're expecting a closing quote
raise ExpectedClosingQuoteError(close_quote)
# if we aren't then we just let it through
return ''.join(result)
if next_char in _escaped_quotes:
# escaped quote
result.append(next_char)
else:
# different escape character, ignore it
self.undo()
result.append(current)
continue
if not is_quoted and current in _all_quotes:
# we aren't quoted
raise UnexpectedQuoteError(current)
# closing quote
if is_quoted and current == close_quote:
next_char = self.get()
valid_eof = not next_char or next_char.isspace()
if not valid_eof:
raise InvalidEndOfQuotedStringError(next_char)
# we're quoted so it's okay
return ''.join(result)
if current.isspace() and not is_quoted:
# end of word found
return ''.join(result)
result.append(current)
def __repr__(self):
return '<StringView pos: {0.index} prev: {0.previous} end: {0.end} eof: {0.eof}>'.format(self) | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/ext/commands/view.py | view.py |
import discord.abc
import discord.utils
class Context(discord.abc.Messageable):
r"""Represents the context in which a command is being invoked under.
This class contains a lot of meta data to help you understand more about
the invocation context. This class is not created manually and is instead
passed around to commands as the first parameter.
This class implements the :class:`~discord.abc.Messageable` ABC.
Attributes
-----------
message: :class:`.Message`
The message that triggered the command being executed.
bot: :class:`.Bot`
The bot that contains the command being executed.
args: :class:`list`
The list of transformed arguments that were passed into the command.
If this is accessed during the :func:`on_command_error` event
then this list could be incomplete.
kwargs: :class:`dict`
A dictionary of transformed arguments that were passed into the command.
Similar to :attr:`args`\, if this is accessed in the
:func:`on_command_error` event then this dict could be incomplete.
prefix: :class:`str`
The prefix that was used to invoke the command.
command: :class:`Command`
The command that is being invoked currently.
invoked_with: :class:`str`
The command name that triggered this invocation. Useful for finding out
which alias called the command.
invoked_parents: List[:class:`str`]
The command names of the parents that triggered this invocation. Useful for
finding out which aliases called the command.
For example in commands ``?a b c test``, the invoked parents are ``['a', 'b', 'c']``.
.. versionadded:: 1.7
invoked_subcommand: :class:`Command`
The subcommand that was invoked.
If no valid subcommand was invoked then this is equal to ``None``.
subcommand_passed: Optional[:class:`str`]
The string that was attempted to call a subcommand. This does not have
to point to a valid registered subcommand and could just point to a
nonsense string. If nothing was passed to attempt a call to a
subcommand then this is set to ``None``.
command_failed: :class:`bool`
A boolean that indicates if the command failed to be parsed, checked,
or invoked.
"""
def __init__(self, **attrs):
self.message = attrs.pop('message', None)
self.bot = attrs.pop('bot', None)
self.args = attrs.pop('args', [])
self.kwargs = attrs.pop('kwargs', {})
self.prefix = attrs.pop('prefix')
self.command = attrs.pop('command', None)
self.view = attrs.pop('view', None)
self.invoked_with = attrs.pop('invoked_with', None)
self.invoked_parents = attrs.pop('invoked_parents', [])
self.invoked_subcommand = attrs.pop('invoked_subcommand', None)
self.subcommand_passed = attrs.pop('subcommand_passed', None)
self.command_failed = attrs.pop('command_failed', False)
self._state = self.message._state
async def invoke(self, *args, **kwargs):
r"""|coro|
Calls a command with the arguments given.
This is useful if you want to just call the callback that a
:class:`.Command` holds internally.
.. note::
This does not handle converters, checks, cooldowns, pre-invoke,
or after-invoke hooks in any matter. It calls the internal callback
directly as-if it was a regular function.
You must take care in passing the proper arguments when
using this function.
.. warning::
The first parameter passed **must** be the command being invoked.
Parameters
-----------
command: :class:`.Command`
The command that is going to be called.
\*args
The arguments to to use.
\*\*kwargs
The keyword arguments to use.
Raises
-------
TypeError
The command argument to invoke is missing.
"""
try:
command = args[0]
except IndexError:
raise TypeError('Missing command to invoke.') from None
arguments = []
if command.cog is not None:
arguments.append(command.cog)
arguments.append(self)
arguments.extend(args[1:])
ret = await command.callback(*arguments, **kwargs)
return ret
async def reinvoke(self, *, call_hooks=False, restart=True):
"""|coro|
Calls the command again.
This is similar to :meth:`~.Context.invoke` except that it bypasses
checks, cooldowns, and error handlers.
.. note::
If you want to bypass :exc:`.UserInputError` derived exceptions,
it is recommended to use the regular :meth:`~.Context.invoke`
as it will work more naturally. After all, this will end up
using the old arguments the user has used and will thus just
fail again.
Parameters
------------
call_hooks: :class:`bool`
Whether to call the before and after invoke hooks.
restart: :class:`bool`
Whether to start the call chain from the very beginning
or where we left off (i.e. the command that caused the error).
The default is to start where we left off.
Raises
-------
ValueError
The context to reinvoke is not valid.
"""
cmd = self.command
view = self.view
if cmd is None:
raise ValueError('This context is not valid.')
# some state to revert to when we're done
index, previous = view.index, view.previous
invoked_with = self.invoked_with
invoked_subcommand = self.invoked_subcommand
invoked_parents = self.invoked_parents
subcommand_passed = self.subcommand_passed
if restart:
to_call = cmd.root_parent or cmd
view.index = len(self.prefix)
view.previous = 0
self.invoked_parents = []
self.invoked_with = view.get_word() # advance to get the root command
else:
to_call = cmd
try:
await to_call.reinvoke(self, call_hooks=call_hooks)
finally:
self.command = cmd
view.index = index
view.previous = previous
self.invoked_with = invoked_with
self.invoked_subcommand = invoked_subcommand
self.invoked_parents = invoked_parents
self.subcommand_passed = subcommand_passed
@property
def valid(self):
""":class:`bool`: Checks if the invocation context is valid to be invoked with."""
return self.prefix is not None and self.command is not None
async def _get_channel(self):
return self.channel
@property
def cog(self):
"""Optional[:class:`.Cog`]: Returns the cog associated with this context's command. None if it does not exist."""
if self.command is None:
return None
return self.command.cog
@discord.utils.cached_property
def guild(self):
"""Optional[:class:`.Guild`]: Returns the guild associated with this context's command. None if not available."""
return self.message.guild
@discord.utils.cached_property
def channel(self):
"""Union[:class:`.abc.Messageable`]: Returns the channel associated with this context's command.
Shorthand for :attr:`.Message.channel`.
"""
return self.message.channel
@discord.utils.cached_property
def author(self):
"""Union[:class:`~discord.User`, :class:`.Member`]:
Returns the author associated with this context's command. Shorthand for :attr:`.Message.author`
"""
return self.message.author
@discord.utils.cached_property
def me(self):
"""Union[:class:`.Member`, :class:`.ClientUser`]:
Similar to :attr:`.Guild.me` except it may return the :class:`.ClientUser` in private message contexts.
"""
return self.guild.me if self.guild is not None else self.bot.user
@property
def voice_client(self):
r"""Optional[:class:`.VoiceProtocol`]: A shortcut to :attr:`.Guild.voice_client`\, if applicable."""
g = self.guild
return g.voice_client if g else None
async def send_help(self, *args):
"""send_help(entity=<bot>)
|coro|
Shows the help command for the specified entity if given.
The entity can be a command or a cog.
If no entity is given, then it'll show help for the
entire bot.
If the entity is a string, then it looks up whether it's a
:class:`Cog` or a :class:`Command`.
.. note::
Due to the way this function works, instead of returning
something similar to :meth:`~.commands.HelpCommand.command_not_found`
this returns :class:`None` on bad input or no help command.
Parameters
------------
entity: Optional[Union[:class:`Command`, :class:`Cog`, :class:`str`]]
The entity to show help for.
Returns
--------
Any
The result of the help command, if any.
"""
from .core import Group, Command, wrap_callback
from .errors import CommandError
bot = self.bot
cmd = bot.help_command
if cmd is None:
return None
cmd = cmd.copy()
cmd.context = self
if len(args) == 0:
await cmd.prepare_help_command(self, None)
mapping = cmd.get_bot_mapping()
injected = wrap_callback(cmd.send_bot_help)
try:
return await injected(mapping)
except CommandError as e:
await cmd.on_help_command_error(self, e)
return None
entity = args[0]
if entity is None:
return None
if isinstance(entity, str):
entity = bot.get_cog(entity) or bot.get_command(entity)
try:
entity.qualified_name
except AttributeError:
# if we're here then it's not a cog, group, or command.
return None
await cmd.prepare_help_command(self, entity.qualified_name)
try:
if hasattr(entity, '__cog_commands__'):
injected = wrap_callback(cmd.send_cog_help)
return await injected(entity)
elif isinstance(entity, Group):
injected = wrap_callback(cmd.send_group_help)
return await injected(entity)
elif isinstance(entity, Command):
injected = wrap_callback(cmd.send_command_help)
return await injected(entity)
else:
return None
except CommandError as e:
await cmd.on_help_command_error(self, e)
@discord.utils.copy_doc(discord.Message.reply)
async def reply(self, content=None, **kwargs):
return await self.message.reply(content, **kwargs) | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/ext/commands/context.py | context.py |
import inspect
import copy
from ._types import _BaseCommand
__all__ = (
'CogMeta',
'Cog',
)
class CogMeta(type):
"""A metaclass for defining a cog.
Note that you should probably not use this directly. It is exposed
purely for documentation purposes along with making custom metaclasses to intermix
with other metaclasses such as the :class:`abc.ABCMeta` metaclass.
For example, to create an abstract cog mixin class, the following would be done.
.. code-block:: python3
import abc
class CogABCMeta(commands.CogMeta, abc.ABCMeta):
pass
class SomeMixin(metaclass=abc.ABCMeta):
pass
class SomeCogMixin(SomeMixin, commands.Cog, metaclass=CogABCMeta):
pass
.. note::
When passing an attribute of a metaclass that is documented below, note
that you must pass it as a keyword-only argument to the class creation
like the following example:
.. code-block:: python3
class MyCog(commands.Cog, name='My Cog'):
pass
Attributes
-----------
name: :class:`str`
The cog name. By default, it is the name of the class with no modification.
description: :class:`str`
The cog description. By default, it is the cleaned docstring of the class.
.. versionadded:: 1.6
command_attrs: :class:`dict`
A list of attributes to apply to every command inside this cog. The dictionary
is passed into the :class:`Command` options at ``__init__``.
If you specify attributes inside the command attribute in the class, it will
override the one specified inside this attribute. For example:
.. code-block:: python3
class MyCog(commands.Cog, command_attrs=dict(hidden=True)):
@commands.command()
async def foo(self, ctx):
pass # hidden -> True
@commands.command(hidden=False)
async def bar(self, ctx):
pass # hidden -> False
"""
def __new__(cls, *args, **kwargs):
name, bases, attrs = args
attrs['__cog_name__'] = kwargs.pop('name', name)
attrs['__cog_settings__'] = kwargs.pop('command_attrs', {})
description = kwargs.pop('description', None)
if description is None:
description = inspect.cleandoc(attrs.get('__doc__', ''))
attrs['__cog_description__'] = description
commands = {}
listeners = {}
no_bot_cog = 'Commands or listeners must not start with cog_ or bot_ (in method {0.__name__}.{1})'
new_cls = super().__new__(cls, name, bases, attrs, **kwargs)
for base in reversed(new_cls.__mro__):
for elem, value in base.__dict__.items():
if elem in commands:
del commands[elem]
if elem in listeners:
del listeners[elem]
is_static_method = isinstance(value, staticmethod)
if is_static_method:
value = value.__func__
if isinstance(value, _BaseCommand):
if is_static_method:
raise TypeError('Command in method {0}.{1!r} must not be staticmethod.'.format(base, elem))
if elem.startswith(('cog_', 'bot_')):
raise TypeError(no_bot_cog.format(base, elem))
commands[elem] = value
elif inspect.iscoroutinefunction(value):
try:
getattr(value, '__cog_listener__')
except AttributeError:
continue
else:
if elem.startswith(('cog_', 'bot_')):
raise TypeError(no_bot_cog.format(base, elem))
listeners[elem] = value
new_cls.__cog_commands__ = list(commands.values()) # this will be copied in Cog.__new__
listeners_as_list = []
for listener in listeners.values():
for listener_name in listener.__cog_listener_names__:
# I use __name__ instead of just storing the value so I can inject
# the self attribute when the time comes to add them to the bot
listeners_as_list.append((listener_name, listener.__name__))
new_cls.__cog_listeners__ = listeners_as_list
return new_cls
def __init__(self, *args, **kwargs):
super().__init__(*args)
@classmethod
def qualified_name(cls):
return cls.__cog_name__
def _cog_special_method(func):
func.__cog_special_method__ = None
return func
class Cog(metaclass=CogMeta):
"""The base class that all cogs must inherit from.
A cog is a collection of commands, listeners, and optional state to
help group commands together. More information on them can be found on
the :ref:`ext_commands_cogs` page.
When inheriting from this class, the options shown in :class:`CogMeta`
are equally valid here.
"""
def __new__(cls, *args, **kwargs):
# For issue 426, we need to store a copy of the command objects
# since we modify them to inject `self` to them.
# To do this, we need to interfere with the Cog creation process.
self = super().__new__(cls)
cmd_attrs = cls.__cog_settings__
# Either update the command with the cog provided defaults or copy it.
self.__cog_commands__ = tuple(c._update_copy(cmd_attrs) for c in cls.__cog_commands__)
lookup = {
cmd.qualified_name: cmd
for cmd in self.__cog_commands__
}
# Update the Command instances dynamically as well
for command in self.__cog_commands__:
setattr(self, command.callback.__name__, command)
parent = command.parent
if parent is not None:
# Get the latest parent reference
parent = lookup[parent.qualified_name]
# Update our parent's reference to our self
parent.remove_command(command.name)
parent.add_command(command)
return self
def get_commands(self):
r"""
Returns
--------
List[:class:`.Command`]
A :class:`list` of :class:`.Command`\s that are
defined inside this cog.
.. note::
This does not include subcommands.
"""
return [c for c in self.__cog_commands__ if c.parent is None]
@property
def qualified_name(self):
""":class:`str`: Returns the cog's specified name, not the class name."""
return self.__cog_name__
@property
def description(self):
""":class:`str`: Returns the cog's description, typically the cleaned docstring."""
return self.__cog_description__
@description.setter
def description(self, description):
self.__cog_description__ = description
def walk_commands(self):
"""An iterator that recursively walks through this cog's commands and subcommands.
Yields
------
Union[:class:`.Command`, :class:`.Group`]
A command or group from the cog.
"""
from .core import GroupMixin
for command in self.__cog_commands__:
if command.parent is None:
yield command
if isinstance(command, GroupMixin):
yield from command.walk_commands()
def get_listeners(self):
"""Returns a :class:`list` of (name, function) listener pairs that are defined in this cog.
Returns
--------
List[Tuple[:class:`str`, :ref:`coroutine <coroutine>`]]
The listeners defined in this cog.
"""
return [(name, getattr(self, method_name)) for name, method_name in self.__cog_listeners__]
@classmethod
def _get_overridden_method(cls, method):
"""Return None if the method is not overridden. Otherwise returns the overridden method."""
return getattr(method.__func__, '__cog_special_method__', method)
@classmethod
def listener(cls, name=None):
"""A decorator that marks a function as a listener.
This is the cog equivalent of :meth:`.Bot.listen`.
Parameters
------------
name: :class:`str`
The name of the event being listened to. If not provided, it
defaults to the function's name.
Raises
--------
TypeError
The function is not a coroutine function or a string was not passed as
the name.
"""
if name is not None and not isinstance(name, str):
raise TypeError('Cog.listener expected str but received {0.__class__.__name__!r} instead.'.format(name))
def decorator(func):
actual = func
if isinstance(actual, staticmethod):
actual = actual.__func__
if not inspect.iscoroutinefunction(actual):
raise TypeError('Listener function must be a coroutine function.')
actual.__cog_listener__ = True
to_assign = name or actual.__name__
try:
actual.__cog_listener_names__.append(to_assign)
except AttributeError:
actual.__cog_listener_names__ = [to_assign]
# we have to return `func` instead of `actual` because
# we need the type to be `staticmethod` for the metaclass
# to pick it up but the metaclass unfurls the function and
# thus the assignments need to be on the actual function
return func
return decorator
def has_error_handler(self):
""":class:`bool`: Checks whether the cog has an error handler.
.. versionadded:: 1.7
"""
return not hasattr(self.cog_command_error.__func__, '__cog_special_method__')
@_cog_special_method
def cog_unload(self):
"""A special method that is called when the cog gets removed.
This function **cannot** be a coroutine. It must be a regular
function.
Subclasses must replace this if they want special unloading behaviour.
"""
pass
@_cog_special_method
def bot_check_once(self, ctx):
"""A special method that registers as a :meth:`.Bot.check_once`
check.
This function **can** be a coroutine and must take a sole parameter,
``ctx``, to represent the :class:`.Context`.
"""
return True
@_cog_special_method
def bot_check(self, ctx):
"""A special method that registers as a :meth:`.Bot.check`
check.
This function **can** be a coroutine and must take a sole parameter,
``ctx``, to represent the :class:`.Context`.
"""
return True
@_cog_special_method
def cog_check(self, ctx):
"""A special method that registers as a :func:`commands.check`
for every command and subcommand in this cog.
This function **can** be a coroutine and must take a sole parameter,
``ctx``, to represent the :class:`.Context`.
"""
return True
@_cog_special_method
async def cog_command_error(self, ctx, error):
"""A special method that is called whenever an error
is dispatched inside this cog.
This is similar to :func:`.on_command_error` except only applying
to the commands inside this cog.
This **must** be a coroutine.
Parameters
-----------
ctx: :class:`.Context`
The invocation context where the error happened.
error: :class:`CommandError`
The error that happened.
"""
pass
@_cog_special_method
async def cog_before_invoke(self, ctx):
"""A special method that acts as a cog local pre-invoke hook.
This is similar to :meth:`.Command.before_invoke`.
This **must** be a coroutine.
Parameters
-----------
ctx: :class:`.Context`
The invocation context.
"""
pass
@_cog_special_method
async def cog_after_invoke(self, ctx):
"""A special method that acts as a cog local post-invoke hook.
This is similar to :meth:`.Command.after_invoke`.
This **must** be a coroutine.
Parameters
-----------
ctx: :class:`.Context`
The invocation context.
"""
pass
def _inject(self, bot):
cls = self.__class__
# realistically, the only thing that can cause loading errors
# is essentially just the command loading, which raises if there are
# duplicates. When this condition is met, we want to undo all what
# we've added so far for some form of atomic loading.
for index, command in enumerate(self.__cog_commands__):
command.cog = self
if command.parent is None:
try:
bot.add_command(command)
except Exception as e:
# undo our additions
for to_undo in self.__cog_commands__[:index]:
if to_undo.parent is None:
bot.remove_command(to_undo.name)
raise e
# check if we're overriding the default
if cls.bot_check is not Cog.bot_check:
bot.add_check(self.bot_check)
if cls.bot_check_once is not Cog.bot_check_once:
bot.add_check(self.bot_check_once, call_once=True)
# while Bot.add_listener can raise if it's not a coroutine,
# this precondition is already met by the listener decorator
# already, thus this should never raise.
# Outside of, memory errors and the like...
for name, method_name in self.__cog_listeners__:
bot.add_listener(getattr(self, method_name), name)
return self
def _eject(self, bot):
cls = self.__class__
try:
for command in self.__cog_commands__:
if command.parent is None:
bot.remove_command(command.name)
for _, method_name in self.__cog_listeners__:
bot.remove_listener(getattr(self, method_name))
if cls.bot_check is not Cog.bot_check:
bot.remove_check(self.bot_check)
if cls.bot_check_once is not Cog.bot_check_once:
bot.remove_check(self.bot_check_once, call_once=True)
finally:
try:
self.cog_unload()
except Exception:
pass | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/ext/commands/cog.py | cog.py |
import re
import inspect
import typing
import discord
from .errors import *
__all__ = (
'Converter',
'MemberConverter',
'UserConverter',
'MessageConverter',
'PartialMessageConverter',
'TextChannelConverter',
'InviteConverter',
'GuildConverter',
'RoleConverter',
'GameConverter',
'ColourConverter',
'ColorConverter',
'VoiceChannelConverter',
'StageChannelConverter',
'EmojiConverter',
'PartialEmojiConverter',
'CategoryChannelConverter',
'IDConverter',
'StoreChannelConverter',
'clean_content',
'Greedy',
)
def _get_from_guilds(bot, getter, argument):
result = None
for guild in bot.guilds:
result = getattr(guild, getter)(argument)
if result:
return result
return result
_utils_get = discord.utils.get
class Converter:
"""The base class of custom converters that require the :class:`.Context`
to be passed to be useful.
This allows you to implement converters that function similar to the
special cased ``discord`` classes.
Classes that derive from this should override the :meth:`~.Converter.convert`
method to do its conversion logic. This method must be a :ref:`coroutine <coroutine>`.
"""
async def convert(self, ctx, argument):
"""|coro|
The method to override to do conversion logic.
If an error is found while converting, it is recommended to
raise a :exc:`.CommandError` derived exception as it will
properly propagate to the error handlers.
Parameters
-----------
ctx: :class:`.Context`
The invocation context that the argument is being used in.
argument: :class:`str`
The argument that is being converted.
Raises
-------
:exc:`.CommandError`
A generic exception occurred when converting the argument.
:exc:`.BadArgument`
The converter failed to convert the argument.
"""
raise NotImplementedError('Derived classes need to implement this.')
class IDConverter(Converter):
def __init__(self):
self._id_regex = re.compile(r'([0-9]{15,20})$')
super().__init__()
def _get_id_match(self, argument):
return self._id_regex.match(argument)
class MemberConverter(IDConverter):
"""Converts to a :class:`~discord.Member`.
All lookups are via the local guild. If in a DM context, then the lookup
is done by the global cache.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by mention.
3. Lookup by name#discrim
4. Lookup by name
5. Lookup by nickname
.. versionchanged:: 1.5
Raise :exc:`.MemberNotFound` instead of generic :exc:`.BadArgument`
.. versionchanged:: 1.5.1
This converter now lazily fetches members from the gateway and HTTP APIs,
optionally caching the result if :attr:`.MemberCacheFlags.joined` is enabled.
"""
async def query_member_named(self, guild, argument):
cache = guild._state.member_cache_flags.joined
if len(argument) > 5 and argument[-5] == '#':
username, _, discriminator = argument.rpartition('#')
members = await guild.query_members(username, limit=100, cache=cache)
return discord.utils.get(members, name=username, discriminator=discriminator)
else:
members = await guild.query_members(argument, limit=100, cache=cache)
return discord.utils.find(lambda m: m.name == argument or m.nick == argument, members)
async def query_member_by_id(self, bot, guild, user_id):
ws = bot._get_websocket(shard_id=guild.shard_id)
cache = guild._state.member_cache_flags.joined
if ws.is_ratelimited():
# If we're being rate limited on the WS, then fall back to using the HTTP API
# So we don't have to wait ~60 seconds for the query to finish
try:
member = await guild.fetch_member(user_id)
except discord.HTTPException:
return None
if cache:
guild._add_member(member)
return member
# If we're not being rate limited then we can use the websocket to actually query
members = await guild.query_members(limit=1, user_ids=[user_id], cache=cache)
if not members:
return None
return members[0]
async def convert(self, ctx, argument):
bot = ctx.bot
match = self._get_id_match(argument) or re.match(r'<@!?([0-9]+)>$', argument)
guild = ctx.guild
result = None
user_id = None
if match is None:
# not a mention...
if guild:
result = guild.get_member_named(argument)
else:
result = _get_from_guilds(bot, 'get_member_named', argument)
else:
user_id = int(match.group(1))
if guild:
result = guild.get_member(user_id) or _utils_get(ctx.message.mentions, id=user_id)
else:
result = _get_from_guilds(bot, 'get_member', user_id)
if result is None:
if guild is None:
raise MemberNotFound(argument)
if user_id is not None:
result = await self.query_member_by_id(bot, guild, user_id)
else:
result = await self.query_member_named(guild, argument)
if not result:
raise MemberNotFound(argument)
return result
class UserConverter(IDConverter):
"""Converts to a :class:`~discord.User`.
All lookups are via the global user cache.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by mention.
3. Lookup by name#discrim
4. Lookup by name
.. versionchanged:: 1.5
Raise :exc:`.UserNotFound` instead of generic :exc:`.BadArgument`
.. versionchanged:: 1.6
This converter now lazily fetches users from the HTTP APIs if an ID is passed
and it's not available in cache.
"""
async def convert(self, ctx, argument):
match = self._get_id_match(argument) or re.match(r'<@!?([0-9]+)>$', argument)
result = None
state = ctx._state
if match is not None:
user_id = int(match.group(1))
result = ctx.bot.get_user(user_id) or _utils_get(ctx.message.mentions, id=user_id)
if result is None:
try:
result = await ctx.bot.fetch_user(user_id)
except discord.HTTPException:
raise UserNotFound(argument) from None
return result
arg = argument
# Remove the '@' character if this is the first character from the argument
if arg[0] == '@':
# Remove first character
arg = arg[1:]
# check for discriminator if it exists,
if len(arg) > 5 and arg[-5] == '#':
discrim = arg[-4:]
name = arg[:-5]
predicate = lambda u: u.name == name and u.discriminator == discrim
result = discord.utils.find(predicate, state._users.values())
if result is not None:
return result
predicate = lambda u: u.name == arg
result = discord.utils.find(predicate, state._users.values())
if result is None:
raise UserNotFound(argument)
return result
class PartialMessageConverter(Converter):
"""Converts to a :class:`discord.PartialMessage`.
.. versionadded:: 1.7
The creation strategy is as follows (in order):
1. By "{channel ID}-{message ID}" (retrieved by shift-clicking on "Copy ID")
2. By message ID (The message is assumed to be in the context channel.)
3. By message URL
"""
def _get_id_matches(self, argument):
id_regex = re.compile(r'(?:(?P<channel_id>[0-9]{15,20})-)?(?P<message_id>[0-9]{15,20})$')
link_regex = re.compile(
r'https?://(?:(ptb|canary|www)\.)?discord(?:app)?\.com/channels/'
r'(?:[0-9]{15,20}|@me)'
r'/(?P<channel_id>[0-9]{15,20})/(?P<message_id>[0-9]{15,20})/?$'
)
match = id_regex.match(argument) or link_regex.match(argument)
if not match:
raise MessageNotFound(argument)
channel_id = match.group("channel_id")
return int(match.group("message_id")), int(channel_id) if channel_id else None
async def convert(self, ctx, argument):
message_id, channel_id = self._get_id_matches(argument)
channel = ctx.bot.get_channel(channel_id) if channel_id else ctx.channel
if not channel:
raise ChannelNotFound(channel_id)
return discord.PartialMessage(channel=channel, id=message_id)
class MessageConverter(PartialMessageConverter):
"""Converts to a :class:`discord.Message`.
.. versionadded:: 1.1
The lookup strategy is as follows (in order):
1. Lookup by "{channel ID}-{message ID}" (retrieved by shift-clicking on "Copy ID")
2. Lookup by message ID (the message **must** be in the context channel)
3. Lookup by message URL
.. versionchanged:: 1.5
Raise :exc:`.ChannelNotFound`, :exc:`.MessageNotFound` or :exc:`.ChannelNotReadable` instead of generic :exc:`.BadArgument`
"""
async def convert(self, ctx, argument):
message_id, channel_id = self._get_id_matches(argument)
message = ctx.bot._connection._get_message(message_id)
if message:
return message
channel = ctx.bot.get_channel(channel_id) if channel_id else ctx.channel
if not channel:
raise ChannelNotFound(channel_id)
try:
return await channel.fetch_message(message_id)
except discord.NotFound:
raise MessageNotFound(argument)
except discord.Forbidden:
raise ChannelNotReadable(channel)
class TextChannelConverter(IDConverter):
"""Converts to a :class:`~discord.TextChannel`.
All lookups are via the local guild. If in a DM context, then the lookup
is done by the global cache.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by mention.
3. Lookup by name
.. versionchanged:: 1.5
Raise :exc:`.ChannelNotFound` instead of generic :exc:`.BadArgument`
"""
async def convert(self, ctx, argument):
bot = ctx.bot
match = self._get_id_match(argument) or re.match(r'<#([0-9]+)>$', argument)
result = None
guild = ctx.guild
if match is None:
# not a mention
if guild:
result = discord.utils.get(guild.text_channels, name=argument)
else:
def check(c):
return isinstance(c, discord.TextChannel) and c.name == argument
result = discord.utils.find(check, bot.get_all_channels())
else:
channel_id = int(match.group(1))
if guild:
result = guild.get_channel(channel_id)
else:
result = _get_from_guilds(bot, 'get_channel', channel_id)
if not isinstance(result, discord.TextChannel):
raise ChannelNotFound(argument)
return result
class VoiceChannelConverter(IDConverter):
"""Converts to a :class:`~discord.VoiceChannel`.
All lookups are via the local guild. If in a DM context, then the lookup
is done by the global cache.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by mention.
3. Lookup by name
.. versionchanged:: 1.5
Raise :exc:`.ChannelNotFound` instead of generic :exc:`.BadArgument`
"""
async def convert(self, ctx, argument):
bot = ctx.bot
match = self._get_id_match(argument) or re.match(r'<#([0-9]+)>$', argument)
result = None
guild = ctx.guild
if match is None:
# not a mention
if guild:
result = discord.utils.get(guild.voice_channels, name=argument)
else:
def check(c):
return isinstance(c, discord.VoiceChannel) and c.name == argument
result = discord.utils.find(check, bot.get_all_channels())
else:
channel_id = int(match.group(1))
if guild:
result = guild.get_channel(channel_id)
else:
result = _get_from_guilds(bot, 'get_channel', channel_id)
if not isinstance(result, discord.VoiceChannel):
raise ChannelNotFound(argument)
return result
class StageChannelConverter(IDConverter):
"""Converts to a :class:`~discord.StageChannel`.
.. versionadded:: 1.7
All lookups are via the local guild. If in a DM context, then the lookup
is done by the global cache.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by mention.
3. Lookup by name
"""
async def convert(self, ctx, argument):
bot = ctx.bot
match = self._get_id_match(argument) or re.match(r'<#([0-9]+)>$', argument)
result = None
guild = ctx.guild
if match is None:
# not a mention
if guild:
result = discord.utils.get(guild.stage_channels, name=argument)
else:
def check(c):
return isinstance(c, discord.StageChannel) and c.name == argument
result = discord.utils.find(check, bot.get_all_channels())
else:
channel_id = int(match.group(1))
if guild:
result = guild.get_channel(channel_id)
else:
result = _get_from_guilds(bot, 'get_channel', channel_id)
if not isinstance(result, discord.StageChannel):
raise ChannelNotFound(argument)
return result
class CategoryChannelConverter(IDConverter):
"""Converts to a :class:`~discord.CategoryChannel`.
All lookups are via the local guild. If in a DM context, then the lookup
is done by the global cache.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by mention.
3. Lookup by name
.. versionchanged:: 1.5
Raise :exc:`.ChannelNotFound` instead of generic :exc:`.BadArgument`
"""
async def convert(self, ctx, argument):
bot = ctx.bot
match = self._get_id_match(argument) or re.match(r'<#([0-9]+)>$', argument)
result = None
guild = ctx.guild
if match is None:
# not a mention
if guild:
result = discord.utils.get(guild.categories, name=argument)
else:
def check(c):
return isinstance(c, discord.CategoryChannel) and c.name == argument
result = discord.utils.find(check, bot.get_all_channels())
else:
channel_id = int(match.group(1))
if guild:
result = guild.get_channel(channel_id)
else:
result = _get_from_guilds(bot, 'get_channel', channel_id)
if not isinstance(result, discord.CategoryChannel):
raise ChannelNotFound(argument)
return result
class StoreChannelConverter(IDConverter):
"""Converts to a :class:`~discord.StoreChannel`.
All lookups are via the local guild. If in a DM context, then the lookup
is done by the global cache.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by mention.
3. Lookup by name.
.. versionadded:: 1.7
"""
async def convert(self, ctx, argument):
bot = ctx.bot
match = self._get_id_match(argument) or re.match(r'<#([0-9]+)>$', argument)
result = None
guild = ctx.guild
if match is None:
# not a mention
if guild:
result = discord.utils.get(guild.channels, name=argument)
else:
def check(c):
return isinstance(c, discord.StoreChannel) and c.name == argument
result = discord.utils.find(check, bot.get_all_channels())
else:
channel_id = int(match.group(1))
if guild:
result = guild.get_channel(channel_id)
else:
result = _get_from_guilds(bot, 'get_channel', channel_id)
if not isinstance(result, discord.StoreChannel):
raise ChannelNotFound(argument)
return result
class ColourConverter(Converter):
"""Converts to a :class:`~discord.Colour`.
.. versionchanged:: 1.5
Add an alias named ColorConverter
The following formats are accepted:
- ``0x<hex>``
- ``#<hex>``
- ``0x#<hex>``
- ``rgb(<number>, <number>, <number>)``
- Any of the ``classmethod`` in :class:`Colour`
- The ``_`` in the name can be optionally replaced with spaces.
Like CSS, ``<number>`` can be either 0-255 or 0-100% and ``<hex>`` can be
either a 6 digit hex number or a 3 digit hex shortcut (e.g. #fff).
.. versionchanged:: 1.5
Raise :exc:`.BadColourArgument` instead of generic :exc:`.BadArgument`
.. versionchanged:: 1.7
Added support for ``rgb`` function and 3-digit hex shortcuts
"""
RGB_REGEX = re.compile(r'rgb\s*\((?P<r>[0-9]{1,3}%?)\s*,\s*(?P<g>[0-9]{1,3}%?)\s*,\s*(?P<b>[0-9]{1,3}%?)\s*\)')
def parse_hex_number(self, argument):
arg = ''.join(i * 2 for i in argument) if len(argument) == 3 else argument
try:
value = int(arg, base=16)
if not (0 <= value <= 0xFFFFFF):
raise BadColourArgument(argument)
except ValueError:
raise BadColourArgument(argument)
else:
return discord.Color(value=value)
def parse_rgb_number(self, argument, number):
if number[-1] == '%':
value = int(number[:-1])
if not (0 <= value <= 100):
raise BadColourArgument(argument)
return round(255 * (value / 100))
value = int(number)
if not (0 <= value <= 255):
raise BadColourArgument(argument)
return value
def parse_rgb(self, argument, *, regex=RGB_REGEX):
match = regex.match(argument)
if match is None:
raise BadColourArgument(argument)
red = self.parse_rgb_number(argument, match.group('r'))
green = self.parse_rgb_number(argument, match.group('g'))
blue = self.parse_rgb_number(argument, match.group('b'))
return discord.Color.from_rgb(red, green, blue)
async def convert(self, ctx, argument):
if argument[0] == '#':
return self.parse_hex_number(argument[1:])
if argument[0:2] == '0x':
rest = argument[2:]
# Legacy backwards compatible syntax
if rest.startswith('#'):
return self.parse_hex_number(rest[1:])
return self.parse_hex_number(rest)
arg = argument.lower()
if arg[0:3] == 'rgb':
return self.parse_rgb(arg)
arg = arg.replace(' ', '_')
method = getattr(discord.Colour, arg, None)
if arg.startswith('from_') or method is None or not inspect.ismethod(method):
raise BadColourArgument(arg)
return method()
ColorConverter = ColourConverter
class RoleConverter(IDConverter):
"""Converts to a :class:`~discord.Role`.
All lookups are via the local guild. If in a DM context, the converter raises
:exc:`.NoPrivateMessage` exception.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by mention.
3. Lookup by name
.. versionchanged:: 1.5
Raise :exc:`.RoleNotFound` instead of generic :exc:`.BadArgument`
"""
async def convert(self, ctx, argument):
guild = ctx.guild
if not guild:
raise NoPrivateMessage()
match = self._get_id_match(argument) or re.match(r'<@&([0-9]+)>$', argument)
if match:
result = guild.get_role(int(match.group(1)))
else:
result = discord.utils.get(guild._roles.values(), name=argument)
if result is None:
raise RoleNotFound(argument)
return result
class GameConverter(Converter):
"""Converts to :class:`~discord.Game`."""
async def convert(self, ctx, argument):
return discord.Game(name=argument)
class InviteConverter(Converter):
"""Converts to a :class:`~discord.Invite`.
This is done via an HTTP request using :meth:`.Bot.fetch_invite`.
.. versionchanged:: 1.5
Raise :exc:`.BadInviteArgument` instead of generic :exc:`.BadArgument`
"""
async def convert(self, ctx, argument):
try:
invite = await ctx.bot.fetch_invite(argument)
return invite
except Exception as exc:
raise BadInviteArgument() from exc
class GuildConverter(IDConverter):
"""Converts to a :class:`~discord.Guild`.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by name. (There is no disambiguation for Guilds with multiple matching names).
.. versionadded:: 1.7
"""
async def convert(self, ctx, argument):
match = self._get_id_match(argument)
result = None
if match is not None:
guild_id = int(match.group(1))
result = ctx.bot.get_guild(guild_id)
if result is None:
result = discord.utils.get(ctx.bot.guilds, name=argument)
if result is None:
raise GuildNotFound(argument)
return result
class EmojiConverter(IDConverter):
"""Converts to a :class:`~discord.Emoji`.
All lookups are done for the local guild first, if available. If that lookup
fails, then it checks the client's global cache.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by extracting ID from the emoji.
3. Lookup by name
.. versionchanged:: 1.5
Raise :exc:`.EmojiNotFound` instead of generic :exc:`.BadArgument`
"""
async def convert(self, ctx, argument):
match = self._get_id_match(argument) or re.match(r'<a?:[a-zA-Z0-9\_]+:([0-9]+)>$', argument)
result = None
bot = ctx.bot
guild = ctx.guild
if match is None:
# Try to get the emoji by name. Try local guild first.
if guild:
result = discord.utils.get(guild.emojis, name=argument)
if result is None:
result = discord.utils.get(bot.emojis, name=argument)
else:
emoji_id = int(match.group(1))
# Try to look up emoji by id.
if guild:
result = discord.utils.get(guild.emojis, id=emoji_id)
if result is None:
result = discord.utils.get(bot.emojis, id=emoji_id)
if result is None:
raise EmojiNotFound(argument)
return result
class PartialEmojiConverter(Converter):
"""Converts to a :class:`~discord.PartialEmoji`.
This is done by extracting the animated flag, name and ID from the emoji.
.. versionchanged:: 1.5
Raise :exc:`.PartialEmojiConversionFailure` instead of generic :exc:`.BadArgument`
"""
async def convert(self, ctx, argument):
match = re.match(r'<(a?):([a-zA-Z0-9\_]+):([0-9]+)>$', argument)
if match:
emoji_animated = bool(match.group(1))
emoji_name = match.group(2)
emoji_id = int(match.group(3))
return discord.PartialEmoji.with_state(ctx.bot._connection, animated=emoji_animated, name=emoji_name,
id=emoji_id)
raise PartialEmojiConversionFailure(argument)
class clean_content(Converter):
"""Converts the argument to mention scrubbed version of
said content.
This behaves similarly to :attr:`~discord.Message.clean_content`.
Attributes
------------
fix_channel_mentions: :class:`bool`
Whether to clean channel mentions.
use_nicknames: :class:`bool`
Whether to use nicknames when transforming mentions.
escape_markdown: :class:`bool`
Whether to also escape special markdown characters.
remove_markdown: :class:`bool`
Whether to also remove special markdown characters. This option is not supported with ``escape_markdown``
.. versionadded:: 1.7
"""
def __init__(self, *, fix_channel_mentions=False, use_nicknames=True, escape_markdown=False, remove_markdown=False):
self.fix_channel_mentions = fix_channel_mentions
self.use_nicknames = use_nicknames
self.escape_markdown = escape_markdown
self.remove_markdown = remove_markdown
async def convert(self, ctx, argument):
message = ctx.message
transformations = {}
if self.fix_channel_mentions and ctx.guild:
def resolve_channel(id, *, _get=ctx.guild.get_channel):
ch = _get(id)
return ('<#%s>' % id), ('#' + ch.name if ch else '#deleted-channel')
transformations.update(resolve_channel(channel) for channel in message.raw_channel_mentions)
if self.use_nicknames and ctx.guild:
def resolve_member(id, *, _get=ctx.guild.get_member):
m = _get(id)
return '@' + m.display_name if m else '@deleted-user'
else:
def resolve_member(id, *, _get=ctx.bot.get_user):
m = _get(id)
return '@' + m.name if m else '@deleted-user'
transformations.update(
('<@%s>' % member_id, resolve_member(member_id))
for member_id in message.raw_mentions
)
transformations.update(
('<@!%s>' % member_id, resolve_member(member_id))
for member_id in message.raw_mentions
)
if ctx.guild:
def resolve_role(_id, *, _find=ctx.guild.get_role):
r = _find(_id)
return '@' + r.name if r else '@deleted-role'
transformations.update(
('<@&%s>' % role_id, resolve_role(role_id))
for role_id in message.raw_role_mentions
)
def repl(obj):
return transformations.get(obj.group(0), '')
pattern = re.compile('|'.join(transformations.keys()))
result = pattern.sub(repl, argument)
if self.escape_markdown:
result = discord.utils.escape_markdown(result)
elif self.remove_markdown:
result = discord.utils.remove_markdown(result)
# Completely ensure no mentions escape:
return discord.utils.escape_mentions(result)
class _Greedy:
__slots__ = ('converter',)
def __init__(self, *, converter=None):
self.converter = converter
def __getitem__(self, params):
if not isinstance(params, tuple):
params = (params,)
if len(params) != 1:
raise TypeError('Greedy[...] only takes a single argument')
converter = params[0]
if not (callable(converter) or isinstance(converter, Converter) or hasattr(converter, '__origin__')):
raise TypeError('Greedy[...] expects a type or a Converter instance.')
if converter is str or converter is type(None) or converter is _Greedy:
raise TypeError('Greedy[%s] is invalid.' % converter.__name__)
if getattr(converter, '__origin__', None) is typing.Union and type(None) in converter.__args__:
raise TypeError('Greedy[%r] is invalid.' % converter)
return self.__class__(converter=converter)
Greedy = _Greedy() | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/ext/commands/converter.py | converter.py |
import asyncio
import functools
import inspect
import typing
import datetime
import discord
from .errors import *
from .cooldowns import Cooldown, BucketType, CooldownMapping, MaxConcurrency
from . import converter as converters
from ._types import _BaseCommand
from .cog import Cog
__all__ = (
'Command',
'Group',
'GroupMixin',
'command',
'group',
'has_role',
'has_permissions',
'has_any_role',
'check',
'check_any',
'before_invoke',
'after_invoke',
'bot_has_role',
'bot_has_permissions',
'bot_has_any_role',
'cooldown',
'max_concurrency',
'dm_only',
'guild_only',
'is_owner',
'is_nsfw',
'has_guild_permissions',
'bot_has_guild_permissions'
)
def wrap_callback(coro):
@functools.wraps(coro)
async def wrapped(*args, **kwargs):
try:
ret = await coro(*args, **kwargs)
except CommandError:
raise
except asyncio.CancelledError:
return
except Exception as exc:
raise CommandInvokeError(exc) from exc
return ret
return wrapped
def hooked_wrapped_callback(command, ctx, coro):
@functools.wraps(coro)
async def wrapped(*args, **kwargs):
try:
ret = await coro(*args, **kwargs)
except CommandError:
ctx.command_failed = True
raise
except asyncio.CancelledError:
ctx.command_failed = True
return
except Exception as exc:
ctx.command_failed = True
raise CommandInvokeError(exc) from exc
finally:
if command._max_concurrency is not None:
await command._max_concurrency.release(ctx)
await command.call_after_hooks(ctx)
return ret
return wrapped
def _convert_to_bool(argument):
lowered = argument.lower()
if lowered in ('yes', 'y', 'true', 't', '1', 'enable', 'on'):
return True
elif lowered in ('no', 'n', 'false', 'f', '0', 'disable', 'off'):
return False
else:
raise BadBoolArgument(lowered)
class _CaseInsensitiveDict(dict):
def __contains__(self, k):
return super().__contains__(k.casefold())
def __delitem__(self, k):
return super().__delitem__(k.casefold())
def __getitem__(self, k):
return super().__getitem__(k.casefold())
def get(self, k, default=None):
return super().get(k.casefold(), default)
def pop(self, k, default=None):
return super().pop(k.casefold(), default)
def __setitem__(self, k, v):
super().__setitem__(k.casefold(), v)
class Command(_BaseCommand):
r"""A class that implements the protocol for a bot text command.
These are not created manually, instead they are created via the
decorator or functional interface.
Attributes
-----------
name: :class:`str`
The name of the command.
callback: :ref:`coroutine <coroutine>`
The coroutine that is executed when the command is called.
help: :class:`str`
The long help text for the command.
brief: Optional[:class:`str`]
The short help text for the command.
usage: Optional[:class:`str`]
A replacement for arguments in the default help text.
aliases: Union[List[:class:`str`], Tuple[:class:`str`]]
The list of aliases the command can be invoked under.
enabled: :class:`bool`
A boolean that indicates if the command is currently enabled.
If the command is invoked while it is disabled, then
:exc:`.DisabledCommand` is raised to the :func:`.on_command_error`
event. Defaults to ``True``.
parent: Optional[:class:`Command`]
The parent command that this command belongs to. ``None`` if there
isn't one.
cog: Optional[:class:`Cog`]
The cog that this command belongs to. ``None`` if there isn't one.
checks: List[Callable[[:class:`.Context`], :class:`bool`]]
A list of predicates that verifies if the command could be executed
with the given :class:`.Context` as the sole parameter. If an exception
is necessary to be thrown to signal failure, then one inherited from
:exc:`.CommandError` should be used. Note that if the checks fail then
:exc:`.CheckFailure` exception is raised to the :func:`.on_command_error`
event.
description: :class:`str`
The message prefixed into the default help command.
hidden: :class:`bool`
If ``True``\, the default help command does not show this in the
help output.
rest_is_raw: :class:`bool`
If ``False`` and a keyword-only argument is provided then the keyword
only argument is stripped and handled as if it was a regular argument
that handles :exc:`.MissingRequiredArgument` and default values in a
regular matter rather than passing the rest completely raw. If ``True``
then the keyword-only argument will pass in the rest of the arguments
in a completely raw matter. Defaults to ``False``.
invoked_subcommand: Optional[:class:`Command`]
The subcommand that was invoked, if any.
require_var_positional: :class:`bool`
If ``True`` and a variadic positional argument is specified, requires
the user to specify at least one argument. Defaults to ``False``.
.. versionadded:: 1.5
ignore_extra: :class:`bool`
If ``True``\, ignores extraneous strings passed to a command if all its
requirements are met (e.g. ``?foo a b c`` when only expecting ``a``
and ``b``). Otherwise :func:`.on_command_error` and local error handlers
are called with :exc:`.TooManyArguments`. Defaults to ``True``.
cooldown_after_parsing: :class:`bool`
If ``True``\, cooldown processing is done after argument parsing,
which calls converters. If ``False`` then cooldown processing is done
first and then the converters are called second. Defaults to ``False``.
extras: :class:`dict`
A dict of user provided extras to attach to the Command.
.. note::
This object may be copied by the library.
.. versionadded:: 1.7.3.2
"""
def __new__(cls, *args, **kwargs):
# if you're wondering why this is done, it's because we need to ensure
# we have a complete original copy of **kwargs even for classes that
# mess with it by popping before delegating to the subclass __init__.
# In order to do this, we need to control the instance creation and
# inject the original kwargs through __new__ rather than doing it
# inside __init__.
self = super().__new__(cls)
# we do a shallow copy because it's probably the most common use case.
# this could potentially break if someone modifies a list or something
# while it's in movement, but for now this is the cheapest and
# fastest way to do what we want.
self.__original_kwargs__ = kwargs.copy()
return self
def __init__(self, func, **kwargs):
if not asyncio.iscoroutinefunction(func):
raise TypeError('Callback must be a coroutine.')
self.name = name = kwargs.get('name') or func.__name__
if not isinstance(name, str):
raise TypeError('Name of a command must be a string.')
self.callback = func
self.enabled = kwargs.get('enabled', True)
help_doc = kwargs.get('help')
if help_doc is not None:
help_doc = inspect.cleandoc(help_doc)
else:
help_doc = inspect.getdoc(func)
if isinstance(help_doc, bytes):
help_doc = help_doc.decode('utf-8')
self.help = help_doc
self.brief = kwargs.get('brief')
self.usage = kwargs.get('usage')
self.rest_is_raw = kwargs.get('rest_is_raw', False)
self.aliases = kwargs.get('aliases', [])
self.extras = kwargs.get('extras', {})
if not isinstance(self.aliases, (list, tuple)):
raise TypeError("Aliases of a command must be a list or a tuple of strings.")
self.description = inspect.cleandoc(kwargs.get('description', ''))
self.hidden = kwargs.get('hidden', False)
try:
checks = func.__commands_checks__
checks.reverse()
except AttributeError:
checks = kwargs.get('checks', [])
finally:
self.checks = checks
try:
cooldown = func.__commands_cooldown__
except AttributeError:
cooldown = kwargs.get('cooldown')
finally:
self._buckets = CooldownMapping(cooldown)
try:
max_concurrency = func.__commands_max_concurrency__
except AttributeError:
max_concurrency = kwargs.get('max_concurrency')
finally:
self._max_concurrency = max_concurrency
self.require_var_positional = kwargs.get('require_var_positional', False)
self.ignore_extra = kwargs.get('ignore_extra', True)
self.cooldown_after_parsing = kwargs.get('cooldown_after_parsing', False)
self.cog = None
# bandaid for the fact that sometimes parent can be the bot instance
parent = kwargs.get('parent')
self.parent = parent if isinstance(parent, _BaseCommand) else None
try:
before_invoke = func.__before_invoke__
except AttributeError:
self._before_invoke = None
else:
self.before_invoke(before_invoke)
try:
after_invoke = func.__after_invoke__
except AttributeError:
self._after_invoke = None
else:
self.after_invoke(after_invoke)
@property
def raw(self):
return self._raw
@property
def callback(self):
return self._callback
@callback.setter
def callback(self, function):
self._callback = function
self.module = function.__module__
signature = inspect.signature(function)
self.params = signature.parameters.copy()
# PEP-563 allows postponing evaluation of annotations with a __future__
# import. When postponed, Parameter.annotation will be a string and must
# be replaced with the real value for the converters to work later on
for key, value in self.params.items():
if isinstance(value.annotation, str):
self.params[key] = value = value.replace(annotation=eval(value.annotation, function.__globals__))
# fail early for when someone passes an unparameterized Greedy type
if value.annotation is converters.Greedy:
raise TypeError('Unparameterized Greedy[...] is disallowed in signature.')
def add_check(self, func):
"""Adds a check to the command.
This is the non-decorator interface to :func:`.check`.
.. versionadded:: 1.3
Parameters
-----------
func
The function that will be used as a check.
"""
self.checks.append(func)
def remove_check(self, func):
"""Removes a check from the command.
This function is idempotent and will not raise an exception
if the function is not in the command's checks.
.. versionadded:: 1.3
Parameters
-----------
func
The function to remove from the checks.
"""
try:
self.checks.remove(func)
except ValueError:
pass
def update(self, **kwargs):
"""Updates :class:`Command` instance with updated attribute.
This works similarly to the :func:`.command` decorator in terms
of parameters in that they are passed to the :class:`Command` or
subclass constructors, sans the name and callback.
"""
self.__init__(self.callback, **dict(self.__original_kwargs__, **kwargs))
async def __call__(self, *args, **kwargs):
"""|coro|
Calls the internal callback that the command holds.
.. note::
This bypasses all mechanisms -- including checks, converters,
invoke hooks, cooldowns, etc. You must take care to pass
the proper arguments and types to this function.
.. versionadded:: 1.3
"""
if self.cog is not None:
return await self.callback(self.cog, *args, **kwargs)
else:
return await self.callback(*args, **kwargs)
def _ensure_assignment_on_copy(self, other):
other._before_invoke = self._before_invoke
other._after_invoke = self._after_invoke
if self.checks != other.checks:
other.checks = self.checks.copy()
if self._buckets.valid and not other._buckets.valid:
other._buckets = self._buckets.copy()
if self._max_concurrency != other._max_concurrency:
other._max_concurrency = self._max_concurrency.copy()
try:
other.on_error = self.on_error
except AttributeError:
pass
return other
def copy(self):
"""Creates a copy of this command.
Returns
--------
:class:`Command`
A new instance of this command.
"""
ret = self.__class__(self.callback, **self.__original_kwargs__)
return self._ensure_assignment_on_copy(ret)
def _update_copy(self, kwargs):
if kwargs:
kw = kwargs.copy()
kw.update(self.__original_kwargs__)
copy = self.__class__(self.callback, **kw)
return self._ensure_assignment_on_copy(copy)
else:
return self.copy()
async def dispatch_error(self, ctx, error):
ctx.command_failed = True
cog = self.cog
try:
coro = self.on_error
except AttributeError:
pass
else:
injected = wrap_callback(coro)
if cog is not None:
await injected(cog, ctx, error)
else:
await injected(ctx, error)
try:
if cog is not None:
local = Cog._get_overridden_method(cog.cog_command_error)
if local is not None:
wrapped = wrap_callback(local)
await wrapped(ctx, error)
finally:
ctx.bot.dispatch('command_error', ctx, error)
async def _actual_conversion(self, ctx, converter, argument, param):
if converter is bool:
return _convert_to_bool(argument)
try:
module = converter.__module__
except AttributeError:
pass
else:
if module is not None and (module.startswith('discord.') and not module.endswith('converter')):
converter = getattr(converters, converter.__name__ + 'Converter', converter)
try:
if inspect.isclass(converter):
if issubclass(converter, converters.Converter):
instance = converter()
ret = await instance.convert(ctx, argument)
return ret
else:
method = getattr(converter, 'convert', None)
if method is not None and inspect.ismethod(method):
ret = await method(ctx, argument)
return ret
elif isinstance(converter, converters.Converter):
ret = await converter.convert(ctx, argument)
return ret
except CommandError:
raise
except Exception as exc:
raise ConversionError(converter, exc) from exc
try:
return converter(argument)
except CommandError:
raise
except Exception as exc:
try:
name = converter.__name__
except AttributeError:
name = converter.__class__.__name__
raise BadArgument('Converting to "{}" failed for parameter "{}".'.format(name, param.name)) from exc
async def do_conversion(self, ctx, converter, argument, param):
try:
origin = converter.__origin__
except AttributeError:
pass
else:
if origin is typing.Union:
errors = []
_NoneType = type(None)
for conv in converter.__args__:
# if we got to this part in the code, then the previous conversions have failed
# so we should just undo the view, return the default, and allow parsing to continue
# with the other parameters
if conv is _NoneType and param.kind != param.VAR_POSITIONAL:
ctx.view.undo()
return None if param.default is param.empty else param.default
try:
value = await self._actual_conversion(ctx, conv, argument, param)
except CommandError as exc:
errors.append(exc)
else:
return value
# if we're here, then we failed all the converters
raise BadUnionArgument(param, converter.__args__, errors)
return await self._actual_conversion(ctx, converter, argument, param)
def _get_converter(self, param):
converter = param.annotation
if converter is param.empty:
if param.default is not param.empty:
converter = str if param.default is None else type(param.default)
else:
converter = str
return converter
async def transform(self, ctx, param):
required = param.default is param.empty
converter = self._get_converter(param)
consume_rest_is_special = param.kind == param.KEYWORD_ONLY and not self.rest_is_raw
view = ctx.view
view.skip_ws()
# The greedy converter is simple -- it keeps going until it fails in which case,
# it undos the view ready for the next parameter to use instead
if type(converter) is converters._Greedy:
if param.kind == param.POSITIONAL_OR_KEYWORD or param.kind == param.POSITIONAL_ONLY:
return await self._transform_greedy_pos(ctx, param, required, converter.converter)
elif param.kind == param.VAR_POSITIONAL:
return await self._transform_greedy_var_pos(ctx, param, converter.converter)
else:
# if we're here, then it's a KEYWORD_ONLY param type
# since this is mostly useless, we'll helpfully transform Greedy[X]
# into just X and do the parsing that way.
converter = converter.converter
if view.eof:
if param.kind == param.VAR_POSITIONAL:
raise RuntimeError() # break the loop
if required:
if self._is_typing_optional(param.annotation):
return None
raise MissingRequiredArgument(param)
return param.default
previous = view.index
if consume_rest_is_special:
argument = view.read_rest().strip()
else:
argument = view.get_quoted_word()
view.previous = previous
return await self.do_conversion(ctx, converter, argument, param)
async def _transform_greedy_pos(self, ctx, param, required, converter):
view = ctx.view
result = []
while not view.eof:
# for use with a manual undo
previous = view.index
view.skip_ws()
try:
argument = view.get_quoted_word()
value = await self.do_conversion(ctx, converter, argument, param)
except (CommandError, ArgumentParsingError):
view.index = previous
break
else:
result.append(value)
if not result and not required:
return param.default
return result
async def _transform_greedy_var_pos(self, ctx, param, converter):
view = ctx.view
previous = view.index
try:
argument = view.get_quoted_word()
value = await self.do_conversion(ctx, converter, argument, param)
except (CommandError, ArgumentParsingError):
view.index = previous
raise RuntimeError() from None # break loop
else:
return value
@property
def clean_params(self):
"""OrderedDict[:class:`str`, :class:`inspect.Parameter`]:
Retrieves the parameter OrderedDict without the context or self parameters.
Useful for inspecting signature.
"""
result = self.params.copy()
if self.cog is not None:
# first parameter is self
result.popitem(last=False)
try:
# first/second parameter is context
result.popitem(last=False)
except Exception:
raise ValueError('Missing context parameter') from None
return result
@property
def full_parent_name(self):
""":class:`str`: Retrieves the fully qualified parent command name.
This the base command name required to execute it. For example,
in ``?one two three`` the parent name would be ``one two``.
"""
entries = []
command = self
while command.parent is not None:
command = command.parent
entries.append(command.name)
return ' '.join(reversed(entries))
@property
def parents(self):
"""List[:class:`Command`]: Retrieves the parents of this command.
If the command has no parents then it returns an empty :class:`list`.
For example in commands ``?a b c test``, the parents are ``[c, b, a]``.
.. versionadded:: 1.1
"""
entries = []
command = self
while command.parent is not None:
command = command.parent
entries.append(command)
return entries
@property
def root_parent(self):
"""Optional[:class:`Command`]: Retrieves the root parent of this command.
If the command has no parents then it returns ``None``.
For example in commands ``?a b c test``, the root parent is ``a``.
"""
if not self.parent:
return None
return self.parents[-1]
@property
def qualified_name(self):
""":class:`str`: Retrieves the fully qualified command name.
This is the full parent name with the command name as well.
For example, in ``?one two three`` the qualified name would be
``one two three``.
"""
parent = self.full_parent_name
if parent:
return parent + ' ' + self.name
else:
return self.name
def __str__(self):
return self.qualified_name
async def _parse_arguments(self, ctx):
ctx.args = [ctx] if self.cog is None else [self.cog, ctx]
ctx.kwargs = {}
args = ctx.args
kwargs = ctx.kwargs
view = ctx.view
iterator = iter(self.params.items())
if self.cog is not None:
# we have 'self' as the first parameter so just advance
# the iterator and resume parsing
try:
next(iterator)
except StopIteration:
fmt = 'Callback for {0.name} command is missing "self" parameter.'
raise discord.ClientException(fmt.format(self))
# next we have the 'ctx' as the next parameter
try:
next(iterator)
except StopIteration:
fmt = 'Callback for {0.name} command is missing "ctx" parameter.'
raise discord.ClientException(fmt.format(self))
for name, param in iterator:
if param.kind == param.POSITIONAL_OR_KEYWORD or param.kind == param.POSITIONAL_ONLY:
transformed = await self.transform(ctx, param)
args.append(transformed)
elif param.kind == param.KEYWORD_ONLY:
# kwarg only param denotes "consume rest" semantics
if self.rest_is_raw:
converter = self._get_converter(param)
argument = view.read_rest()
kwargs[name] = await self.do_conversion(ctx, converter, argument, param)
else:
kwargs[name] = await self.transform(ctx, param)
break
elif param.kind == param.VAR_POSITIONAL:
if view.eof and self.require_var_positional:
raise MissingRequiredArgument(param)
while not view.eof:
try:
transformed = await self.transform(ctx, param)
args.append(transformed)
except RuntimeError:
break
if not self.ignore_extra and not view.eof:
raise TooManyArguments('Too many arguments passed to ' + self.qualified_name)
async def call_before_hooks(self, ctx):
# now that we're done preparing we can call the pre-command hooks
# first, call the command local hook:
cog = self.cog
if self._before_invoke is not None:
# should be cog if @commands.before_invoke is used
instance = getattr(self._before_invoke, '__self__', cog)
# __self__ only exists for methods, not functions
# however, if @command.before_invoke is used, it will be a function
if instance:
await self._before_invoke(instance, ctx)
else:
await self._before_invoke(ctx)
# call the cog local hook if applicable:
if cog is not None:
hook = Cog._get_overridden_method(cog.cog_before_invoke)
if hook is not None:
await hook(ctx)
# call the bot global hook if necessary
hook = ctx.bot._before_invoke
if hook is not None:
await hook(ctx)
async def call_after_hooks(self, ctx):
cog = self.cog
if self._after_invoke is not None:
instance = getattr(self._after_invoke, '__self__', cog)
if instance:
await self._after_invoke(instance, ctx)
else:
await self._after_invoke(ctx)
# call the cog local hook if applicable:
if cog is not None:
hook = Cog._get_overridden_method(cog.cog_after_invoke)
if hook is not None:
await hook(ctx)
hook = ctx.bot._after_invoke
if hook is not None:
await hook(ctx)
def _prepare_cooldowns(self, ctx):
if self._buckets.valid:
dt = ctx.message.edited_at or ctx.message.created_at
current = dt.replace(tzinfo=datetime.timezone.utc).timestamp()
bucket = self._buckets.get_bucket(ctx.message, current)
retry_after = bucket.update_rate_limit(current)
if retry_after:
raise CommandOnCooldown(bucket, retry_after)
async def prepare(self, ctx):
ctx.command = self
if not await self.can_run(ctx):
raise CheckFailure('The check functions for command {0.qualified_name} failed.'.format(self))
if self._max_concurrency is not None:
await self._max_concurrency.acquire(ctx)
try:
if self.cooldown_after_parsing:
await self._parse_arguments(ctx)
self._prepare_cooldowns(ctx)
else:
self._prepare_cooldowns(ctx)
await self._parse_arguments(ctx)
await self.call_before_hooks(ctx)
except:
if self._max_concurrency is not None:
await self._max_concurrency.release(ctx)
raise
def is_on_cooldown(self, ctx):
"""Checks whether the command is currently on cooldown.
Parameters
-----------
ctx: :class:`.Context`
The invocation context to use when checking the commands cooldown status.
Returns
--------
:class:`bool`
A boolean indicating if the command is on cooldown.
"""
if not self._buckets.valid:
return False
bucket = self._buckets.get_bucket(ctx.message)
dt = ctx.message.edited_at or ctx.message.created_at
current = dt.replace(tzinfo=datetime.timezone.utc).timestamp()
return bucket.get_tokens(current) == 0
def reset_cooldown(self, ctx):
"""Resets the cooldown on this command.
Parameters
-----------
ctx: :class:`.Context`
The invocation context to reset the cooldown under.
"""
if self._buckets.valid:
bucket = self._buckets.get_bucket(ctx.message)
bucket.reset()
def get_cooldown_retry_after(self, ctx):
"""Retrieves the amount of seconds before this command can be tried again.
.. versionadded:: 1.4
Parameters
-----------
ctx: :class:`.Context`
The invocation context to retrieve the cooldown from.
Returns
--------
:class:`float`
The amount of time left on this command's cooldown in seconds.
If this is ``0.0`` then the command isn't on cooldown.
"""
if self._buckets.valid:
bucket = self._buckets.get_bucket(ctx.message)
dt = ctx.message.edited_at or ctx.message.created_at
current = dt.replace(tzinfo=datetime.timezone.utc).timestamp()
return bucket.get_retry_after(current)
return 0.0
async def invoke(self, ctx):
await self.prepare(ctx)
# terminate the invoked_subcommand chain.
# since we're in a regular command (and not a group) then
# the invoked subcommand is None.
ctx.invoked_subcommand = None
ctx.subcommand_passed = None
injected = hooked_wrapped_callback(self, ctx, self.callback)
await injected(*ctx.args, **ctx.kwargs)
async def reinvoke(self, ctx, *, call_hooks=False):
ctx.command = self
await self._parse_arguments(ctx)
if call_hooks:
await self.call_before_hooks(ctx)
ctx.invoked_subcommand = None
try:
await self.callback(*ctx.args, **ctx.kwargs)
except:
ctx.command_failed = True
raise
finally:
if call_hooks:
await self.call_after_hooks(ctx)
def error(self, coro):
"""A decorator that registers a coroutine as a local error handler.
A local error handler is an :func:`.on_command_error` event limited to
a single command. However, the :func:`.on_command_error` is still
invoked afterwards as the catch-all.
Parameters
-----------
coro: :ref:`coroutine <coroutine>`
The coroutine to register as the local error handler.
Raises
-------
TypeError
The coroutine passed is not actually a coroutine.
"""
if not asyncio.iscoroutinefunction(coro):
raise TypeError('The error handler must be a coroutine.')
self.on_error = coro
return coro
def has_error_handler(self):
""":class:`bool`: Checks whether the command has an error handler registered.
.. versionadded:: 1.7
"""
return hasattr(self, 'on_error')
def before_invoke(self, coro):
"""A decorator that registers a coroutine as a pre-invoke hook.
A pre-invoke hook is called directly before the command is
called. This makes it a useful function to set up database
connections or any type of set up required.
This pre-invoke hook takes a sole parameter, a :class:`.Context`.
See :meth:`.Bot.before_invoke` for more info.
Parameters
-----------
coro: :ref:`coroutine <coroutine>`
The coroutine to register as the pre-invoke hook.
Raises
-------
TypeError
The coroutine passed is not actually a coroutine.
"""
if not asyncio.iscoroutinefunction(coro):
raise TypeError('The pre-invoke hook must be a coroutine.')
self._before_invoke = coro
return coro
def after_invoke(self, coro):
"""A decorator that registers a coroutine as a post-invoke hook.
A post-invoke hook is called directly after the command is
called. This makes it a useful function to clean-up database
connections or any type of clean up required.
This post-invoke hook takes a sole parameter, a :class:`.Context`.
See :meth:`.Bot.after_invoke` for more info.
Parameters
-----------
coro: :ref:`coroutine <coroutine>`
The coroutine to register as the post-invoke hook.
Raises
-------
TypeError
The coroutine passed is not actually a coroutine.
"""
if not asyncio.iscoroutinefunction(coro):
raise TypeError('The post-invoke hook must be a coroutine.')
self._after_invoke = coro
return coro
@property
def cog_name(self):
"""Optional[:class:`str`]: The name of the cog this command belongs to, if any."""
return type(self.cog).__cog_name__ if self.cog is not None else None
@property
def short_doc(self):
""":class:`str`: Gets the "short" documentation of a command.
By default, this is the :attr:`brief` attribute.
If that lookup leads to an empty string then the first line of the
:attr:`help` attribute is used instead.
"""
if self.brief is not None:
return self.brief
if self.help is not None:
return self.help.split('\n', 1)[0]
return ''
def _is_typing_optional(self, annotation):
try:
origin = annotation.__origin__
except AttributeError:
return False
if origin is not typing.Union:
return False
return annotation.__args__[-1] is type(None)
@property
def signature(self):
""":class:`str`: Returns a POSIX-like signature useful for help command output."""
if self.usage is not None:
return self.usage
params = self.clean_params
if not params:
return ''
result = []
for name, param in params.items():
greedy = isinstance(param.annotation, converters._Greedy)
if param.default is not param.empty:
# We don't want None or '' to trigger the [name=value] case and instead it should
# do [name] since [name=None] or [name=] are not exactly useful for the user.
should_print = param.default if isinstance(param.default, str) else param.default is not None
if should_print:
result.append('[%s=%s]' % (name, param.default) if not greedy else
'[%s=%s]...' % (name, param.default))
continue
else:
result.append('[%s]' % name)
elif param.kind == param.VAR_POSITIONAL:
if self.require_var_positional:
result.append('<%s...>' % name)
else:
result.append('[%s...]' % name)
elif greedy:
result.append('[%s]...' % name)
elif self._is_typing_optional(param.annotation):
result.append('[%s]' % name)
else:
result.append('<%s>' % name)
return ' '.join(result)
async def can_run(self, ctx):
"""|coro|
Checks if the command can be executed by checking all the predicates
inside the :attr:`checks` attribute. This also checks whether the
command is disabled.
.. versionchanged:: 1.3
Checks whether the command is disabled or not
Parameters
-----------
ctx: :class:`.Context`
The ctx of the command currently being invoked.
Raises
-------
:class:`CommandError`
Any command error that was raised during a check call will be propagated
by this function.
Returns
--------
:class:`bool`
A boolean indicating if the command can be invoked.
"""
if not self.enabled:
raise DisabledCommand('{0.name} command is disabled'.format(self))
original = ctx.command
ctx.command = self
try:
if not await ctx.bot.can_run(ctx):
raise CheckFailure('The global check functions for command {0.qualified_name} failed.'.format(self))
cog = self.cog
if cog is not None:
local_check = Cog._get_overridden_method(cog.cog_check)
if local_check is not None:
ret = await discord.utils.maybe_coroutine(local_check, ctx)
if not ret:
return False
predicates = self.checks
if not predicates:
# since we have no checks, then we just return True.
return True
return await discord.utils.async_all(predicate(ctx) for predicate in predicates)
finally:
ctx.command = original
class GroupMixin:
"""A mixin that implements common functionality for classes that behave
similar to :class:`.Group` and are allowed to register commands.
Attributes
-----------
all_commands: :class:`dict`
A mapping of command name to :class:`.Command`
objects.
case_insensitive: :class:`bool`
Whether the commands should be case insensitive. Defaults to ``False``.
"""
def __init__(self, *args, **kwargs):
case_insensitive = kwargs.get('case_insensitive', False)
self.all_commands = _CaseInsensitiveDict() if case_insensitive else {}
self.case_insensitive = case_insensitive
super().__init__(*args, **kwargs)
@property
def commands(self):
"""Set[:class:`.Command`]: A unique set of commands without aliases that are registered."""
return set(self.all_commands.values())
def recursively_remove_all_commands(self):
for command in self.all_commands.copy().values():
if isinstance(command, GroupMixin):
command.recursively_remove_all_commands()
self.remove_command(command.name)
def add_command(self, command):
"""Adds a :class:`.Command` into the internal list of commands.
This is usually not called, instead the :meth:`~.GroupMixin.command` or
:meth:`~.GroupMixin.group` shortcut decorators are used instead.
.. versionchanged:: 1.4
Raise :exc:`.CommandRegistrationError` instead of generic :exc:`.ClientException`
Parameters
-----------
command: :class:`Command`
The command to add.
Raises
-------
:exc:`.CommandRegistrationError`
If the command or its alias is already registered by different command.
TypeError
If the command passed is not a subclass of :class:`.Command`.
"""
if not isinstance(command, Command):
raise TypeError('The command passed must be a subclass of Command')
if isinstance(self, Command):
command.parent = self
if command.name in self.all_commands:
raise CommandRegistrationError(command.name)
self.all_commands[command.name] = command
for alias in command.aliases:
if alias in self.all_commands:
self.remove_command(command.name)
raise CommandRegistrationError(alias, alias_conflict=True)
self.all_commands[alias] = command
def remove_command(self, name):
"""Remove a :class:`.Command` from the internal list
of commands.
This could also be used as a way to remove aliases.
Parameters
-----------
name: :class:`str`
The name of the command to remove.
Returns
--------
Optional[:class:`.Command`]
The command that was removed. If the name is not valid then
``None`` is returned instead.
"""
command = self.all_commands.pop(name, None)
# does not exist
if command is None:
return None
if name in command.aliases:
# we're removing an alias so we don't want to remove the rest
return command
# we're not removing the alias so let's delete the rest of them.
for alias in command.aliases:
cmd = self.all_commands.pop(alias, None)
# in the case of a CommandRegistrationError, an alias might conflict
# with an already existing command. If this is the case, we want to
# make sure the pre-existing command is not removed.
if cmd not in (None, command):
self.all_commands[alias] = cmd
return command
def walk_commands(self):
"""An iterator that recursively walks through all commands and subcommands.
.. versionchanged:: 1.4
Duplicates due to aliases are no longer returned
Yields
------
Union[:class:`.Command`, :class:`.Group`]
A command or group from the internal list of commands.
"""
for command in self.commands:
yield command
if isinstance(command, GroupMixin):
yield from command.walk_commands()
def get_command(self, name):
"""Get a :class:`.Command` from the internal list
of commands.
This could also be used as a way to get aliases.
The name could be fully qualified (e.g. ``'foo bar'``) will get
the subcommand ``bar`` of the group command ``foo``. If a
subcommand is not found then ``None`` is returned just as usual.
Parameters
-----------
name: :class:`str`
The name of the command to get.
Returns
--------
Optional[:class:`Command`]
The command that was requested. If not found, returns ``None``.
"""
# fast path, no space in name.
if ' ' not in name:
return self.all_commands.get(name)
names = name.split()
if not names:
return None
obj = self.all_commands.get(names[0])
if not isinstance(obj, GroupMixin):
return obj
for name in names[1:]:
try:
obj = obj.all_commands[name]
except (AttributeError, KeyError):
return None
return obj
def command(self, *args, **kwargs):
"""A shortcut decorator that invokes :func:`.command` and adds it to
the internal command list via :meth:`~.GroupMixin.add_command`.
Returns
--------
Callable[..., :class:`Command`]
A decorator that converts the provided method into a Command, adds it to the bot, then returns it.
"""
def decorator(func):
kwargs.setdefault('parent', self)
result = command(*args, **kwargs)(func)
self.add_command(result)
return result
return decorator
def group(self, *args, **kwargs):
"""A shortcut decorator that invokes :func:`.group` and adds it to
the internal command list via :meth:`~.GroupMixin.add_command`.
Returns
--------
Callable[..., :class:`Group`]
A decorator that converts the provided method into a Group, adds it to the bot, then returns it.
"""
def decorator(func):
kwargs.setdefault('parent', self)
result = group(*args, **kwargs)(func)
self.add_command(result)
return result
return decorator
class Group(GroupMixin, Command):
"""A class that implements a grouping protocol for commands to be
executed as subcommands.
This class is a subclass of :class:`.Command` and thus all options
valid in :class:`.Command` are valid in here as well.
Attributes
-----------
invoke_without_command: :class:`bool`
Indicates if the group callback should begin parsing and
invocation only if no subcommand was found. Useful for
making it an error handling function to tell the user that
no subcommand was found or to have different functionality
in case no subcommand was found. If this is ``False``, then
the group callback will always be invoked first. This means
that the checks and the parsing dictated by its parameters
will be executed. Defaults to ``False``.
case_insensitive: :class:`bool`
Indicates if the group's commands should be case insensitive.
Defaults to ``False``.
"""
def __init__(self, *args, **attrs):
self.invoke_without_command = attrs.pop('invoke_without_command', False)
super().__init__(*args, **attrs)
def copy(self):
"""Creates a copy of this :class:`Group`.
Returns
--------
:class:`Group`
A new instance of this group.
"""
ret = super().copy()
for cmd in self.commands:
ret.add_command(cmd.copy())
return ret
async def invoke(self, ctx):
ctx.invoked_subcommand = None
ctx.subcommand_passed = None
early_invoke = not self.invoke_without_command
if early_invoke:
await self.prepare(ctx)
view = ctx.view
previous = view.index
view.skip_ws()
trigger = view.get_word()
if trigger:
ctx.subcommand_passed = trigger
ctx.invoked_subcommand = self.all_commands.get(trigger, None)
if early_invoke:
injected = hooked_wrapped_callback(self, ctx, self.callback)
await injected(*ctx.args, **ctx.kwargs)
ctx.invoked_parents.append(ctx.invoked_with)
if trigger and ctx.invoked_subcommand:
ctx.invoked_with = trigger
await ctx.invoked_subcommand.invoke(ctx)
elif not early_invoke:
# undo the trigger parsing
view.index = previous
view.previous = previous
await super().invoke(ctx)
async def reinvoke(self, ctx, *, call_hooks=False):
ctx.invoked_subcommand = None
early_invoke = not self.invoke_without_command
if early_invoke:
ctx.command = self
await self._parse_arguments(ctx)
if call_hooks:
await self.call_before_hooks(ctx)
view = ctx.view
previous = view.index
view.skip_ws()
trigger = view.get_word()
if trigger:
ctx.subcommand_passed = trigger
ctx.invoked_subcommand = self.all_commands.get(trigger, None)
if early_invoke:
try:
await self.callback(*ctx.args, **ctx.kwargs)
except:
ctx.command_failed = True
raise
finally:
if call_hooks:
await self.call_after_hooks(ctx)
ctx.invoked_parents.append(ctx.invoked_with)
if trigger and ctx.invoked_subcommand:
ctx.invoked_with = trigger
await ctx.invoked_subcommand.reinvoke(ctx, call_hooks=call_hooks)
elif not early_invoke:
# undo the trigger parsing
view.index = previous
view.previous = previous
await super().reinvoke(ctx, call_hooks=call_hooks)
# Decorators
def command(name=None, cls=None, **attrs):
"""A decorator that transforms a function into a :class:`.Command`
or if called with :func:`.group`, :class:`.Group`.
By default the ``help`` attribute is received automatically from the
docstring of the function and is cleaned up with the use of
``inspect.cleandoc``. If the docstring is ``bytes``, then it is decoded
into :class:`str` using utf-8 encoding.
All checks added using the :func:`.check` & co. decorators are added into
the function. There is no way to supply your own checks through this
decorator.
Parameters
-----------
name: :class:`str`
The name to create the command with. By default this uses the
function name unchanged.
cls
The class to construct with. By default this is :class:`.Command`.
You usually do not change this.
attrs
Keyword arguments to pass into the construction of the class denoted
by ``cls``.
Raises
-------
TypeError
If the function is not a coroutine or is already a command.
"""
if cls is None:
cls = Command
def decorator(func):
if isinstance(func, Command):
raise TypeError('Callback is already a command.')
return cls(func, name=name, **attrs)
return decorator
def group(name=None, **attrs):
"""A decorator that transforms a function into a :class:`.Group`.
This is similar to the :func:`.command` decorator but the ``cls``
parameter is set to :class:`Group` by default.
.. versionchanged:: 1.1
The ``cls`` parameter can now be passed.
"""
attrs.setdefault('cls', Group)
return command(name=name, **attrs)
def check(predicate):
r"""A decorator that adds a check to the :class:`.Command` or its
subclasses. These checks could be accessed via :attr:`.Command.checks`.
These checks should be predicates that take in a single parameter taking
a :class:`.Context`. If the check returns a ``False``\-like value then
during invocation a :exc:`.CheckFailure` exception is raised and sent to
the :func:`.on_command_error` event.
If an exception should be thrown in the predicate then it should be a
subclass of :exc:`.CommandError`. Any exception not subclassed from it
will be propagated while those subclassed will be sent to
:func:`.on_command_error`.
A special attribute named ``predicate`` is bound to the value
returned by this decorator to retrieve the predicate passed to the
decorator. This allows the following introspection and chaining to be done:
.. code-block:: python3
def owner_or_permissions(**perms):
original = commands.has_permissions(**perms).predicate
async def extended_check(ctx):
if ctx.guild is None:
return False
return ctx.guild.owner_id == ctx.author.id or await original(ctx)
return commands.check(extended_check)
.. note::
The function returned by ``predicate`` is **always** a coroutine,
even if the original function was not a coroutine.
.. versionchanged:: 1.3
The ``predicate`` attribute was added.
Examples
---------
Creating a basic check to see if the command invoker is you.
.. code-block:: python3
def check_if_it_is_me(ctx):
return ctx.message.author.id == 85309593344815104
@bot.command()
@commands.check(check_if_it_is_me)
async def only_for_me(ctx):
await ctx.send('I know you!')
Transforming common checks into its own decorator:
.. code-block:: python3
def is_me():
def predicate(ctx):
return ctx.message.author.id == 85309593344815104
return commands.check(predicate)
@bot.command()
@is_me()
async def only_me(ctx):
await ctx.send('Only you!')
Parameters
-----------
predicate: Callable[[:class:`Context`], :class:`bool`]
The predicate to check if the command should be invoked.
"""
def decorator(func):
if isinstance(func, Command):
func.checks.append(predicate)
else:
if not hasattr(func, '__commands_checks__'):
func.__commands_checks__ = []
func.__commands_checks__.append(predicate)
return func
if inspect.iscoroutinefunction(predicate):
decorator.predicate = predicate
else:
@functools.wraps(predicate)
async def wrapper(ctx):
return predicate(ctx)
decorator.predicate = wrapper
return decorator
def check_any(*checks):
r"""A :func:`check` that is added that checks if any of the checks passed
will pass, i.e. using logical OR.
If all checks fail then :exc:`.CheckAnyFailure` is raised to signal the failure.
It inherits from :exc:`.CheckFailure`.
.. note::
The ``predicate`` attribute for this function **is** a coroutine.
.. versionadded:: 1.3
Parameters
------------
\*checks: Callable[[:class:`Context`], :class:`bool`]
An argument list of checks that have been decorated with
the :func:`check` decorator.
Raises
-------
TypeError
A check passed has not been decorated with the :func:`check`
decorator.
Examples
---------
Creating a basic check to see if it's the bot owner or
the server owner:
.. code-block:: python3
def is_guild_owner():
def predicate(ctx):
return ctx.guild is not None and ctx.guild.owner_id == ctx.author.id
return commands.check(predicate)
@bot.command()
@commands.check_any(commands.is_owner(), is_guild_owner())
async def only_for_owners(ctx):
await ctx.send('Hello mister owner!')
"""
unwrapped = []
for wrapped in checks:
try:
pred = wrapped.predicate
except AttributeError:
raise TypeError('%r must be wrapped by commands.check decorator' % wrapped) from None
else:
unwrapped.append(pred)
async def predicate(ctx):
errors = []
for func in unwrapped:
try:
value = await func(ctx)
except CheckFailure as e:
errors.append(e)
else:
if value:
return True
# if we're here, all checks failed
raise CheckAnyFailure(unwrapped, errors)
return check(predicate)
def has_role(item):
"""A :func:`.check` that is added that checks if the member invoking the
command has the role specified via the name or ID specified.
If a string is specified, you must give the exact name of the role, including
caps and spelling.
If an integer is specified, you must give the exact snowflake ID of the role.
If the message is invoked in a private message context then the check will
return ``False``.
This check raises one of two special exceptions, :exc:`.MissingRole` if the user
is missing a role, or :exc:`.NoPrivateMessage` if it is used in a private message.
Both inherit from :exc:`.CheckFailure`.
.. versionchanged:: 1.1
Raise :exc:`.MissingRole` or :exc:`.NoPrivateMessage`
instead of generic :exc:`.CheckFailure`
Parameters
-----------
item: Union[:class:`int`, :class:`str`]
The name or ID of the role to check.
"""
def predicate(ctx):
if not isinstance(ctx.channel, discord.abc.GuildChannel):
raise NoPrivateMessage()
if isinstance(item, int):
role = discord.utils.get(ctx.author.roles, id=item)
else:
role = discord.utils.get(ctx.author.roles, name=item)
if role is None:
raise MissingRole(item)
return True
return check(predicate)
def has_any_role(*items):
r"""A :func:`.check` that is added that checks if the member invoking the
command has **any** of the roles specified. This means that if they have
one out of the three roles specified, then this check will return `True`.
Similar to :func:`.has_role`\, the names or IDs passed in must be exact.
This check raises one of two special exceptions, :exc:`.MissingAnyRole` if the user
is missing all roles, or :exc:`.NoPrivateMessage` if it is used in a private message.
Both inherit from :exc:`.CheckFailure`.
.. versionchanged:: 1.1
Raise :exc:`.MissingAnyRole` or :exc:`.NoPrivateMessage`
instead of generic :exc:`.CheckFailure`
Parameters
-----------
items: List[Union[:class:`str`, :class:`int`]]
An argument list of names or IDs to check that the member has roles wise.
Example
--------
.. code-block:: python3
@bot.command()
@commands.has_any_role('Library Devs', 'Moderators', 492212595072434186)
async def cool(ctx):
await ctx.send('You are cool indeed')
"""
def predicate(ctx):
if not isinstance(ctx.channel, discord.abc.GuildChannel):
raise NoPrivateMessage()
getter = functools.partial(discord.utils.get, ctx.author.roles)
if any(getter(id=item) is not None if isinstance(item, int) else getter(name=item) is not None for item in items):
return True
raise MissingAnyRole(items)
return check(predicate)
def bot_has_role(item):
"""Similar to :func:`.has_role` except checks if the bot itself has the
role.
This check raises one of two special exceptions, :exc:`.BotMissingRole` if the bot
is missing the role, or :exc:`.NoPrivateMessage` if it is used in a private message.
Both inherit from :exc:`.CheckFailure`.
.. versionchanged:: 1.1
Raise :exc:`.BotMissingRole` or :exc:`.NoPrivateMessage`
instead of generic :exc:`.CheckFailure`
"""
def predicate(ctx):
ch = ctx.channel
if not isinstance(ch, discord.abc.GuildChannel):
raise NoPrivateMessage()
me = ch.guild.me
if isinstance(item, int):
role = discord.utils.get(me.roles, id=item)
else:
role = discord.utils.get(me.roles, name=item)
if role is None:
raise BotMissingRole(item)
return True
return check(predicate)
def bot_has_any_role(*items):
"""Similar to :func:`.has_any_role` except checks if the bot itself has
any of the roles listed.
This check raises one of two special exceptions, :exc:`.BotMissingAnyRole` if the bot
is missing all roles, or :exc:`.NoPrivateMessage` if it is used in a private message.
Both inherit from :exc:`.CheckFailure`.
.. versionchanged:: 1.1
Raise :exc:`.BotMissingAnyRole` or :exc:`.NoPrivateMessage`
instead of generic checkfailure
"""
def predicate(ctx):
ch = ctx.channel
if not isinstance(ch, discord.abc.GuildChannel):
raise NoPrivateMessage()
me = ch.guild.me
getter = functools.partial(discord.utils.get, me.roles)
if any(getter(id=item) is not None if isinstance(item, int) else getter(name=item) is not None for item in items):
return True
raise BotMissingAnyRole(items)
return check(predicate)
def has_permissions(**perms):
"""A :func:`.check` that is added that checks if the member has all of
the permissions necessary.
Note that this check operates on the current channel permissions, not the
guild wide permissions.
The permissions passed in must be exactly like the properties shown under
:class:`.discord.Permissions`.
This check raises a special exception, :exc:`.MissingPermissions`
that is inherited from :exc:`.CheckFailure`.
Parameters
------------
perms
An argument list of permissions to check for.
Example
---------
.. code-block:: python3
@bot.command()
@commands.has_permissions(manage_messages=True)
async def test(ctx):
await ctx.send('You can manage messages.')
"""
invalid = set(perms) - set(discord.Permissions.VALID_FLAGS)
if invalid:
raise TypeError('Invalid permission(s): %s' % (', '.join(invalid)))
def predicate(ctx):
ch = ctx.channel
permissions = ch.permissions_for(ctx.author)
missing = [perm for perm, value in perms.items() if getattr(permissions, perm) != value]
if not missing:
return True
raise MissingPermissions(missing)
return check(predicate)
def bot_has_permissions(**perms):
"""Similar to :func:`.has_permissions` except checks if the bot itself has
the permissions listed.
This check raises a special exception, :exc:`.BotMissingPermissions`
that is inherited from :exc:`.CheckFailure`.
"""
invalid = set(perms) - set(discord.Permissions.VALID_FLAGS)
if invalid:
raise TypeError('Invalid permission(s): %s' % (', '.join(invalid)))
def predicate(ctx):
guild = ctx.guild
me = guild.me if guild is not None else ctx.bot.user
permissions = ctx.channel.permissions_for(me)
missing = [perm for perm, value in perms.items() if getattr(permissions, perm) != value]
if not missing:
return True
raise BotMissingPermissions(missing)
return check(predicate)
def has_guild_permissions(**perms):
"""Similar to :func:`.has_permissions`, but operates on guild wide
permissions instead of the current channel permissions.
If this check is called in a DM context, it will raise an
exception, :exc:`.NoPrivateMessage`.
.. versionadded:: 1.3
"""
invalid = set(perms) - set(discord.Permissions.VALID_FLAGS)
if invalid:
raise TypeError('Invalid permission(s): %s' % (', '.join(invalid)))
def predicate(ctx):
if not ctx.guild:
raise NoPrivateMessage
permissions = ctx.author.guild_permissions
missing = [perm for perm, value in perms.items() if getattr(permissions, perm) != value]
if not missing:
return True
raise MissingPermissions(missing)
return check(predicate)
def bot_has_guild_permissions(**perms):
"""Similar to :func:`.has_guild_permissions`, but checks the bot
members guild permissions.
.. versionadded:: 1.3
"""
invalid = set(perms) - set(discord.Permissions.VALID_FLAGS)
if invalid:
raise TypeError('Invalid permission(s): %s' % (', '.join(invalid)))
def predicate(ctx):
if not ctx.guild:
raise NoPrivateMessage
permissions = ctx.me.guild_permissions
missing = [perm for perm, value in perms.items() if getattr(permissions, perm) != value]
if not missing:
return True
raise BotMissingPermissions(missing)
return check(predicate)
def dm_only():
"""A :func:`.check` that indicates this command must only be used in a
DM context. Only private messages are allowed when
using the command.
This check raises a special exception, :exc:`.PrivateMessageOnly`
that is inherited from :exc:`.CheckFailure`.
.. versionadded:: 1.1
"""
def predicate(ctx):
if ctx.guild is not None:
raise PrivateMessageOnly()
return True
return check(predicate)
def guild_only():
"""A :func:`.check` that indicates this command must only be used in a
guild context only. Basically, no private messages are allowed when
using the command.
This check raises a special exception, :exc:`.NoPrivateMessage`
that is inherited from :exc:`.CheckFailure`.
"""
def predicate(ctx):
if ctx.guild is None:
raise NoPrivateMessage()
return True
return check(predicate)
def is_owner():
"""A :func:`.check` that checks if the person invoking this command is the
owner of the bot.
This is powered by :meth:`.Bot.is_owner`.
This check raises a special exception, :exc:`.NotOwner` that is derived
from :exc:`.CheckFailure`.
"""
async def predicate(ctx):
if not await ctx.bot.is_owner(ctx.author):
raise NotOwner('You do not own this bot.')
return True
return check(predicate)
def is_nsfw():
"""A :func:`.check` that checks if the channel is a NSFW channel.
This check raises a special exception, :exc:`.NSFWChannelRequired`
that is derived from :exc:`.CheckFailure`.
.. versionchanged:: 1.1
Raise :exc:`.NSFWChannelRequired` instead of generic :exc:`.CheckFailure`.
DM channels will also now pass this check.
"""
def pred(ctx):
ch = ctx.channel
if ctx.guild is None or (isinstance(ch, discord.TextChannel) and ch.is_nsfw()):
return True
raise NSFWChannelRequired(ch)
return check(pred)
def cooldown(rate, per, type=BucketType.default):
"""A decorator that adds a cooldown to a :class:`.Command`
A cooldown allows a command to only be used a specific amount
of times in a specific time frame. These cooldowns can be based
either on a per-guild, per-channel, per-user, per-role or global basis.
Denoted by the third argument of ``type`` which must be of enum
type :class:`.BucketType`.
If a cooldown is triggered, then :exc:`.CommandOnCooldown` is triggered in
:func:`.on_command_error` and the local error handler.
A command can only have a single cooldown.
Parameters
------------
rate: :class:`int`
The number of times a command can be used before triggering a cooldown.
per: :class:`float`
The amount of seconds to wait for a cooldown when it's been triggered.
type: Union[:class:`.BucketType`, Callable[[:class:`.Message`], Any]]
The type of cooldown to have. If callable, should return a key for the mapping.
.. versionchanged:: 1.7
Callables are now supported for custom bucket types.
"""
def decorator(func):
if isinstance(func, Command):
func._buckets = CooldownMapping(Cooldown(rate, per, type))
else:
func.__commands_cooldown__ = Cooldown(rate, per, type)
return func
return decorator
def max_concurrency(number, per=BucketType.default, *, wait=False):
"""A decorator that adds a maximum concurrency to a :class:`.Command` or its subclasses.
This enables you to only allow a certain number of command invocations at the same time,
for example if a command takes too long or if only one user can use it at a time. This
differs from a cooldown in that there is no set waiting period or token bucket -- only
a set number of people can run the command.
.. versionadded:: 1.3
Parameters
-------------
number: :class:`int`
The maximum number of invocations of this command that can be running at the same time.
per: :class:`.BucketType`
The bucket that this concurrency is based on, e.g. ``BucketType.guild`` would allow
it to be used up to ``number`` times per guild.
wait: :class:`bool`
Whether the command should wait for the queue to be over. If this is set to ``False``
then instead of waiting until the command can run again, the command raises
:exc:`.MaxConcurrencyReached` to its error handler. If this is set to ``True``
then the command waits until it can be executed.
"""
def decorator(func):
value = MaxConcurrency(number, per=per, wait=wait)
if isinstance(func, Command):
func._max_concurrency = value
else:
func.__commands_max_concurrency__ = value
return func
return decorator
def before_invoke(coro):
"""A decorator that registers a coroutine as a pre-invoke hook.
This allows you to refer to one before invoke hook for several commands that
do not have to be within the same cog.
.. versionadded:: 1.4
Example
---------
.. code-block:: python3
async def record_usage(ctx):
print(ctx.author, 'used', ctx.command, 'at', ctx.message.created_at)
@bot.command()
@commands.before_invoke(record_usage)
async def who(ctx): # Output: <User> used who at <Time>
await ctx.send('i am a bot')
class What(commands.Cog):
@commands.before_invoke(record_usage)
@commands.command()
async def when(self, ctx): # Output: <User> used when at <Time>
await ctx.send('and i have existed since {}'.format(ctx.bot.user.created_at))
@commands.command()
async def where(self, ctx): # Output: <Nothing>
await ctx.send('on Discord')
@commands.command()
async def why(self, ctx): # Output: <Nothing>
await ctx.send('because someone made me')
bot.add_cog(What())
"""
def decorator(func):
if isinstance(func, Command):
func.before_invoke(coro)
else:
func.__before_invoke__ = coro
return func
return decorator
def after_invoke(coro):
"""A decorator that registers a coroutine as a post-invoke hook.
This allows you to refer to one after invoke hook for several commands that
do not have to be within the same cog.
.. versionadded:: 1.4
"""
def decorator(func):
if isinstance(func, Command):
func.after_invoke(coro)
else:
func.__after_invoke__ = coro
return func
return decorator | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/ext/commands/core.py | core.py |
from discord.enums import Enum
import time
import asyncio
from collections import deque
from ...abc import PrivateChannel
from .errors import MaxConcurrencyReached
__all__ = (
'BucketType',
'Cooldown',
'CooldownMapping',
'MaxConcurrency',
)
class BucketType(Enum):
default = 0
user = 1
guild = 2
channel = 3
member = 4
category = 5
role = 6
def get_key(self, msg):
if self is BucketType.user:
return msg.author.id
elif self is BucketType.guild:
return (msg.guild or msg.author).id
elif self is BucketType.channel:
return msg.channel.id
elif self is BucketType.member:
return ((msg.guild and msg.guild.id), msg.author.id)
elif self is BucketType.category:
return (msg.channel.category or msg.channel).id
elif self is BucketType.role:
# we return the channel id of a private-channel as there are only roles in guilds
# and that yields the same result as for a guild with only the @everyone role
# NOTE: PrivateChannel doesn't actually have an id attribute but we assume we are
# recieving a DMChannel or GroupChannel which inherit from PrivateChannel and do
return (msg.channel if isinstance(msg.channel, PrivateChannel) else msg.author.top_role).id
def __call__(self, msg):
return self.get_key(msg)
class Cooldown:
__slots__ = ('rate', 'per', 'type', '_window', '_tokens', '_last')
def __init__(self, rate, per, type):
self.rate = int(rate)
self.per = float(per)
self.type = type
self._window = 0.0
self._tokens = self.rate
self._last = 0.0
if not callable(self.type):
raise TypeError('Cooldown type must be a BucketType or callable')
def get_tokens(self, current=None):
if not current:
current = time.time()
tokens = self._tokens
if current > self._window + self.per:
tokens = self.rate
return tokens
def get_retry_after(self, current=None):
current = current or time.time()
tokens = self.get_tokens(current)
if tokens == 0:
return self.per - (current - self._window)
return 0.0
def update_rate_limit(self, current=None):
current = current or time.time()
self._last = current
self._tokens = self.get_tokens(current)
# first token used means that we start a new rate limit window
if self._tokens == self.rate:
self._window = current
# check if we are rate limited
if self._tokens == 0:
return self.per - (current - self._window)
# we're not so decrement our tokens
self._tokens -= 1
# see if we got rate limited due to this token change, and if
# so update the window to point to our current time frame
if self._tokens == 0:
self._window = current
def reset(self):
self._tokens = self.rate
self._last = 0.0
def copy(self):
return Cooldown(self.rate, self.per, self.type)
def __repr__(self):
return '<Cooldown rate: {0.rate} per: {0.per} window: {0._window} tokens: {0._tokens}>'.format(self)
class CooldownMapping:
def __init__(self, original):
self._cache = {}
self._cooldown = original
def copy(self):
ret = CooldownMapping(self._cooldown)
ret._cache = self._cache.copy()
return ret
@property
def valid(self):
return self._cooldown is not None
@classmethod
def from_cooldown(cls, rate, per, type):
return cls(Cooldown(rate, per, type))
def _bucket_key(self, msg):
return self._cooldown.type(msg)
def _verify_cache_integrity(self, current=None):
# we want to delete all cache objects that haven't been used
# in a cooldown window. e.g. if we have a command that has a
# cooldown of 60s and it has not been used in 60s then that key should be deleted
current = current or time.time()
dead_keys = [k for k, v in self._cache.items() if current > v._last + v.per]
for k in dead_keys:
del self._cache[k]
def get_bucket(self, message, current=None):
if self._cooldown.type is BucketType.default:
return self._cooldown
self._verify_cache_integrity(current)
key = self._bucket_key(message)
if key not in self._cache:
bucket = self._cooldown.copy()
self._cache[key] = bucket
else:
bucket = self._cache[key]
return bucket
def update_rate_limit(self, message, current=None):
bucket = self.get_bucket(message, current)
return bucket.update_rate_limit(current)
class _Semaphore:
"""This class is a version of a semaphore.
If you're wondering why asyncio.Semaphore isn't being used,
it's because it doesn't expose the internal value. This internal
value is necessary because I need to support both `wait=True` and
`wait=False`.
An asyncio.Queue could have been used to do this as well -- but it is
not as inefficient since internally that uses two queues and is a bit
overkill for what is basically a counter.
"""
__slots__ = ('value', 'loop', '_waiters')
def __init__(self, number):
self.value = number
self.loop = asyncio.get_event_loop()
self._waiters = deque()
def __repr__(self):
return '<_Semaphore value={0.value} waiters={1}>'.format(self, len(self._waiters))
def locked(self):
return self.value == 0
def is_active(self):
return len(self._waiters) > 0
def wake_up(self):
while self._waiters:
future = self._waiters.popleft()
if not future.done():
future.set_result(None)
return
async def acquire(self, *, wait=False):
if not wait and self.value <= 0:
# signal that we're not acquiring
return False
while self.value <= 0:
future = self.loop.create_future()
self._waiters.append(future)
try:
await future
except:
future.cancel()
if self.value > 0 and not future.cancelled():
self.wake_up()
raise
self.value -= 1
return True
def release(self):
self.value += 1
self.wake_up()
class MaxConcurrency:
__slots__ = ('number', 'per', 'wait', '_mapping')
def __init__(self, number, *, per, wait):
self._mapping = {}
self.per = per
self.number = number
self.wait = wait
if number <= 0:
raise ValueError('max_concurrency \'number\' cannot be less than 1')
if not isinstance(per, BucketType):
raise TypeError('max_concurrency \'per\' must be of type BucketType not %r' % type(per))
def copy(self):
return self.__class__(self.number, per=self.per, wait=self.wait)
def __repr__(self):
return '<MaxConcurrency per={0.per!r} number={0.number} wait={0.wait}>'.format(self)
def get_key(self, message):
return self.per.get_key(message)
async def acquire(self, message):
key = self.get_key(message)
try:
sem = self._mapping[key]
except KeyError:
self._mapping[key] = sem = _Semaphore(self.number)
acquired = await sem.acquire(wait=self.wait)
if not acquired:
raise MaxConcurrencyReached(self.number, self.per)
async def release(self, message):
# Technically there's no reason for this function to be async
# But it might be more useful in the future
key = self.get_key(message)
try:
sem = self._mapping[key]
except KeyError:
# ...? peculiar
return
else:
sem.release()
if sem.value >= self.number and not sem.is_active():
del self._mapping[key] | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/ext/commands/cooldowns.py | cooldowns.py |
from discord.errors import ClientException, DiscordException
__all__ = (
'CommandError',
'MissingRequiredArgument',
'BadArgument',
'PrivateMessageOnly',
'NoPrivateMessage',
'CheckFailure',
'CheckAnyFailure',
'CommandNotFound',
'DisabledCommand',
'CommandInvokeError',
'TooManyArguments',
'UserInputError',
'CommandOnCooldown',
'MaxConcurrencyReached',
'NotOwner',
'MessageNotFound',
'MemberNotFound',
'GuildNotFound',
'UserNotFound',
'ChannelNotFound',
'ChannelNotReadable',
'BadColourArgument',
'RoleNotFound',
'BadInviteArgument',
'EmojiNotFound',
'PartialEmojiConversionFailure',
'BadBoolArgument',
'MissingRole',
'BotMissingRole',
'MissingAnyRole',
'BotMissingAnyRole',
'MissingPermissions',
'BotMissingPermissions',
'NSFWChannelRequired',
'ConversionError',
'BadUnionArgument',
'ArgumentParsingError',
'UnexpectedQuoteError',
'InvalidEndOfQuotedStringError',
'ExpectedClosingQuoteError',
'ExtensionError',
'ExtensionAlreadyLoaded',
'ExtensionNotLoaded',
'NoEntryPointError',
'ExtensionFailed',
'ExtensionNotFound',
'CommandRegistrationError',
)
class CommandError(DiscordException):
r"""The base exception type for all command related errors.
This inherits from :exc:`discord.DiscordException`.
This exception and exceptions inherited from it are handled
in a special way as they are caught and passed into a special event
from :class:`.Bot`\, :func:`on_command_error`.
"""
def __init__(self, message=None, *args):
if message is not None:
# clean-up @everyone and @here mentions
m = message.replace('@everyone', '@\u200beveryone').replace('@here', '@\u200bhere')
super().__init__(m, *args)
else:
super().__init__(*args)
class ConversionError(CommandError):
"""Exception raised when a Converter class raises non-CommandError.
This inherits from :exc:`CommandError`.
Attributes
----------
converter: :class:`discord.ext.commands.Converter`
The converter that failed.
original: :exc:`Exception`
The original exception that was raised. You can also get this via
the ``__cause__`` attribute.
"""
def __init__(self, converter, original):
self.converter = converter
self.original = original
class UserInputError(CommandError):
"""The base exception type for errors that involve errors
regarding user input.
This inherits from :exc:`CommandError`.
"""
pass
class CommandNotFound(CommandError):
"""Exception raised when a command is attempted to be invoked
but no command under that name is found.
This is not raised for invalid subcommands, rather just the
initial main command that is attempted to be invoked.
This inherits from :exc:`CommandError`.
"""
pass
class MissingRequiredArgument(UserInputError):
"""Exception raised when parsing a command and a parameter
that is required is not encountered.
This inherits from :exc:`UserInputError`
Attributes
-----------
param: :class:`inspect.Parameter`
The argument that is missing.
"""
def __init__(self, param):
self.param = param
super().__init__('{0.name} is a required argument that is missing.'.format(param))
class TooManyArguments(UserInputError):
"""Exception raised when the command was passed too many arguments and its
:attr:`.Command.ignore_extra` attribute was not set to ``True``.
This inherits from :exc:`UserInputError`
"""
pass
class BadArgument(UserInputError):
"""Exception raised when a parsing or conversion failure is encountered
on an argument to pass into a command.
This inherits from :exc:`UserInputError`
"""
pass
class CheckFailure(CommandError):
"""Exception raised when the predicates in :attr:`.Command.checks` have failed.
This inherits from :exc:`CommandError`
"""
pass
class CheckAnyFailure(CheckFailure):
"""Exception raised when all predicates in :func:`check_any` fail.
This inherits from :exc:`CheckFailure`.
.. versionadded:: 1.3
Attributes
------------
errors: List[:class:`CheckFailure`]
A list of errors that were caught during execution.
checks: List[Callable[[:class:`Context`], :class:`bool`]]
A list of check predicates that failed.
"""
def __init__(self, checks, errors):
self.checks = checks
self.errors = errors
super().__init__('You do not have permission to run this command.')
class PrivateMessageOnly(CheckFailure):
"""Exception raised when an operation does not work outside of private
message contexts.
This inherits from :exc:`CheckFailure`
"""
def __init__(self, message=None):
super().__init__(message or 'This command can only be used in private messages.')
class NoPrivateMessage(CheckFailure):
"""Exception raised when an operation does not work in private message
contexts.
This inherits from :exc:`CheckFailure`
"""
def __init__(self, message=None):
super().__init__(message or 'This command cannot be used in private messages.')
class NotOwner(CheckFailure):
"""Exception raised when the message author is not the owner of the bot.
This inherits from :exc:`CheckFailure`
"""
pass
class MemberNotFound(BadArgument):
"""Exception raised when the member provided was not found in the bot's
cache.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.5
Attributes
-----------
argument: :class:`str`
The member supplied by the caller that was not found
"""
def __init__(self, argument):
self.argument = argument
super().__init__('Member "{}" not found.'.format(argument))
class GuildNotFound(BadArgument):
"""Exception raised when the guild provided was not found in the bot's cache.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.7
Attributes
-----------
argument: :class:`str`
The guild supplied by the called that was not found
"""
def __init__(self, argument):
self.argument = argument
super().__init__('Guild "{}" not found.'.format(argument))
class UserNotFound(BadArgument):
"""Exception raised when the user provided was not found in the bot's
cache.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.5
Attributes
-----------
argument: :class:`str`
The user supplied by the caller that was not found
"""
def __init__(self, argument):
self.argument = argument
super().__init__('User "{}" not found.'.format(argument))
class MessageNotFound(BadArgument):
"""Exception raised when the message provided was not found in the channel.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.5
Attributes
-----------
argument: :class:`str`
The message supplied by the caller that was not found
"""
def __init__(self, argument):
self.argument = argument
super().__init__('Message "{}" not found.'.format(argument))
class ChannelNotReadable(BadArgument):
"""Exception raised when the bot does not have permission to read messages
in the channel.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.5
Attributes
-----------
argument: :class:`.abc.GuildChannel`
The channel supplied by the caller that was not readable
"""
def __init__(self, argument):
self.argument = argument
super().__init__("Can't read messages in {}.".format(argument.mention))
class ChannelNotFound(BadArgument):
"""Exception raised when the bot can not find the channel.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.5
Attributes
-----------
argument: :class:`str`
The channel supplied by the caller that was not found
"""
def __init__(self, argument):
self.argument = argument
super().__init__('Channel "{}" not found.'.format(argument))
class BadColourArgument(BadArgument):
"""Exception raised when the colour is not valid.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.5
Attributes
-----------
argument: :class:`str`
The colour supplied by the caller that was not valid
"""
def __init__(self, argument):
self.argument = argument
super().__init__('Colour "{}" is invalid.'.format(argument))
BadColorArgument = BadColourArgument
class RoleNotFound(BadArgument):
"""Exception raised when the bot can not find the role.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.5
Attributes
-----------
argument: :class:`str`
The role supplied by the caller that was not found
"""
def __init__(self, argument):
self.argument = argument
super().__init__('Role "{}" not found.'.format(argument))
class BadInviteArgument(BadArgument):
"""Exception raised when the invite is invalid or expired.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.5
"""
def __init__(self):
super().__init__('Invite is invalid or expired.')
class EmojiNotFound(BadArgument):
"""Exception raised when the bot can not find the emoji.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.5
Attributes
-----------
argument: :class:`str`
The emoji supplied by the caller that was not found
"""
def __init__(self, argument):
self.argument = argument
super().__init__('Emoji "{}" not found.'.format(argument))
class PartialEmojiConversionFailure(BadArgument):
"""Exception raised when the emoji provided does not match the correct
format.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.5
Attributes
-----------
argument: :class:`str`
The emoji supplied by the caller that did not match the regex
"""
def __init__(self, argument):
self.argument = argument
super().__init__('Couldn\'t convert "{}" to PartialEmoji.'.format(argument))
class BadBoolArgument(BadArgument):
"""Exception raised when a boolean argument was not convertable.
This inherits from :exc:`BadArgument`
.. versionadded:: 1.5
Attributes
-----------
argument: :class:`str`
The boolean argument supplied by the caller that is not in the predefined list
"""
def __init__(self, argument):
self.argument = argument
super().__init__('{} is not a recognised boolean option'.format(argument))
class DisabledCommand(CommandError):
"""Exception raised when the command being invoked is disabled.
This inherits from :exc:`CommandError`
"""
pass
class CommandInvokeError(CommandError):
"""Exception raised when the command being invoked raised an exception.
This inherits from :exc:`CommandError`
Attributes
-----------
original: :exc:`Exception`
The original exception that was raised. You can also get this via
the ``__cause__`` attribute.
"""
def __init__(self, e):
self.original = e
super().__init__('Command raised an exception: {0.__class__.__name__}: {0}'.format(e))
class CommandOnCooldown(CommandError):
"""Exception raised when the command being invoked is on cooldown.
This inherits from :exc:`CommandError`
Attributes
-----------
cooldown: Cooldown
A class with attributes ``rate``, ``per``, and ``type`` similar to
the :func:`.cooldown` decorator.
retry_after: :class:`float`
The amount of seconds to wait before you can retry again.
"""
def __init__(self, cooldown, retry_after):
self.cooldown = cooldown
self.retry_after = retry_after
super().__init__('You are on cooldown. Try again in {:.2f}s'.format(retry_after))
class MaxConcurrencyReached(CommandError):
"""Exception raised when the command being invoked has reached its maximum concurrency.
This inherits from :exc:`CommandError`.
Attributes
------------
number: :class:`int`
The maximum number of concurrent invokers allowed.
per: :class:`.BucketType`
The bucket type passed to the :func:`.max_concurrency` decorator.
"""
def __init__(self, number, per):
self.number = number
self.per = per
name = per.name
suffix = 'per %s' % name if per.name != 'default' else 'globally'
plural = '%s times %s' if number > 1 else '%s time %s'
fmt = plural % (number, suffix)
super().__init__('Too many people using this command. It can only be used {} concurrently.'.format(fmt))
class MissingRole(CheckFailure):
"""Exception raised when the command invoker lacks a role to run a command.
This inherits from :exc:`CheckFailure`
.. versionadded:: 1.1
Attributes
-----------
missing_role: Union[:class:`str`, :class:`int`]
The required role that is missing.
This is the parameter passed to :func:`~.commands.has_role`.
"""
def __init__(self, missing_role):
self.missing_role = missing_role
message = 'Role {0!r} is required to run this command.'.format(missing_role)
super().__init__(message)
class BotMissingRole(CheckFailure):
"""Exception raised when the bot's member lacks a role to run a command.
This inherits from :exc:`CheckFailure`
.. versionadded:: 1.1
Attributes
-----------
missing_role: Union[:class:`str`, :class:`int`]
The required role that is missing.
This is the parameter passed to :func:`~.commands.has_role`.
"""
def __init__(self, missing_role):
self.missing_role = missing_role
message = 'Bot requires the role {0!r} to run this command'.format(missing_role)
super().__init__(message)
class MissingAnyRole(CheckFailure):
"""Exception raised when the command invoker lacks any of
the roles specified to run a command.
This inherits from :exc:`CheckFailure`
.. versionadded:: 1.1
Attributes
-----------
missing_roles: List[Union[:class:`str`, :class:`int`]]
The roles that the invoker is missing.
These are the parameters passed to :func:`~.commands.has_any_role`.
"""
def __init__(self, missing_roles):
self.missing_roles = missing_roles
missing = ["'{}'".format(role) for role in missing_roles]
if len(missing) > 2:
fmt = '{}, or {}'.format(", ".join(missing[:-1]), missing[-1])
else:
fmt = ' or '.join(missing)
message = "You are missing at least one of the required roles: {}".format(fmt)
super().__init__(message)
class BotMissingAnyRole(CheckFailure):
"""Exception raised when the bot's member lacks any of
the roles specified to run a command.
This inherits from :exc:`CheckFailure`
.. versionadded:: 1.1
Attributes
-----------
missing_roles: List[Union[:class:`str`, :class:`int`]]
The roles that the bot's member is missing.
These are the parameters passed to :func:`~.commands.has_any_role`.
"""
def __init__(self, missing_roles):
self.missing_roles = missing_roles
missing = ["'{}'".format(role) for role in missing_roles]
if len(missing) > 2:
fmt = '{}, or {}'.format(", ".join(missing[:-1]), missing[-1])
else:
fmt = ' or '.join(missing)
message = "Bot is missing at least one of the required roles: {}".format(fmt)
super().__init__(message)
class NSFWChannelRequired(CheckFailure):
"""Exception raised when a channel does not have the required NSFW setting.
This inherits from :exc:`CheckFailure`.
.. versionadded:: 1.1
Parameters
-----------
channel: :class:`discord.abc.GuildChannel`
The channel that does not have NSFW enabled.
"""
def __init__(self, channel):
self.channel = channel
super().__init__("Channel '{}' needs to be NSFW for this command to work.".format(channel))
class MissingPermissions(CheckFailure):
"""Exception raised when the command invoker lacks permissions to run a
command.
This inherits from :exc:`CheckFailure`
Attributes
-----------
missing_perms: :class:`list`
The required permissions that are missing.
"""
def __init__(self, missing_perms, *args):
self.missing_perms = missing_perms
missing = [perm.replace('_', ' ').replace('guild', 'server').title() for perm in missing_perms]
if len(missing) > 2:
fmt = '{}, and {}'.format(", ".join(missing[:-1]), missing[-1])
else:
fmt = ' and '.join(missing)
message = 'You are missing {} permission(s) to run this command.'.format(fmt)
super().__init__(message, *args)
class BotMissingPermissions(CheckFailure):
"""Exception raised when the bot's member lacks permissions to run a
command.
This inherits from :exc:`CheckFailure`
Attributes
-----------
missing_perms: :class:`list`
The required permissions that are missing.
"""
def __init__(self, missing_perms, *args):
self.missing_perms = missing_perms
missing = [perm.replace('_', ' ').replace('guild', 'server').title() for perm in missing_perms]
if len(missing) > 2:
fmt = '{}, and {}'.format(", ".join(missing[:-1]), missing[-1])
else:
fmt = ' and '.join(missing)
message = 'Bot requires {} permission(s) to run this command.'.format(fmt)
super().__init__(message, *args)
class BadUnionArgument(UserInputError):
"""Exception raised when a :data:`typing.Union` converter fails for all
its associated types.
This inherits from :exc:`UserInputError`
Attributes
-----------
param: :class:`inspect.Parameter`
The parameter that failed being converted.
converters: Tuple[Type, ...]
A tuple of converters attempted in conversion, in order of failure.
errors: List[:class:`CommandError`]
A list of errors that were caught from failing the conversion.
"""
def __init__(self, param, converters, errors):
self.param = param
self.converters = converters
self.errors = errors
def _get_name(x):
try:
return x.__name__
except AttributeError:
return x.__class__.__name__
to_string = [_get_name(x) for x in converters]
if len(to_string) > 2:
fmt = '{}, or {}'.format(', '.join(to_string[:-1]), to_string[-1])
else:
fmt = ' or '.join(to_string)
super().__init__('Could not convert "{0.name}" into {1}.'.format(param, fmt))
class ArgumentParsingError(UserInputError):
"""An exception raised when the parser fails to parse a user's input.
This inherits from :exc:`UserInputError`.
There are child classes that implement more granular parsing errors for
i18n purposes.
"""
pass
class UnexpectedQuoteError(ArgumentParsingError):
"""An exception raised when the parser encounters a quote mark inside a non-quoted string.
This inherits from :exc:`ArgumentParsingError`.
Attributes
------------
quote: :class:`str`
The quote mark that was found inside the non-quoted string.
"""
def __init__(self, quote):
self.quote = quote
super().__init__('Unexpected quote mark, {0!r}, in non-quoted string'.format(quote))
class InvalidEndOfQuotedStringError(ArgumentParsingError):
"""An exception raised when a space is expected after the closing quote in a string
but a different character is found.
This inherits from :exc:`ArgumentParsingError`.
Attributes
-----------
char: :class:`str`
The character found instead of the expected string.
"""
def __init__(self, char):
self.char = char
super().__init__('Expected space after closing quotation but received {0!r}'.format(char))
class ExpectedClosingQuoteError(ArgumentParsingError):
"""An exception raised when a quote character is expected but not found.
This inherits from :exc:`ArgumentParsingError`.
Attributes
-----------
close_quote: :class:`str`
The quote character expected.
"""
def __init__(self, close_quote):
self.close_quote = close_quote
super().__init__('Expected closing {}.'.format(close_quote))
class ExtensionError(DiscordException):
"""Base exception for extension related errors.
This inherits from :exc:`~discord.DiscordException`.
Attributes
------------
name: :class:`str`
The extension that had an error.
"""
def __init__(self, message=None, *args, name):
self.name = name
message = message or 'Extension {!r} had an error.'.format(name)
# clean-up @everyone and @here mentions
m = message.replace('@everyone', '@\u200beveryone').replace('@here', '@\u200bhere')
super().__init__(m, *args)
class ExtensionAlreadyLoaded(ExtensionError):
"""An exception raised when an extension has already been loaded.
This inherits from :exc:`ExtensionError`
"""
def __init__(self, name):
super().__init__('Extension {!r} is already loaded.'.format(name), name=name)
class ExtensionNotLoaded(ExtensionError):
"""An exception raised when an extension was not loaded.
This inherits from :exc:`ExtensionError`
"""
def __init__(self, name):
super().__init__('Extension {!r} has not been loaded.'.format(name), name=name)
class NoEntryPointError(ExtensionError):
"""An exception raised when an extension does not have a ``setup`` entry point function.
This inherits from :exc:`ExtensionError`
"""
def __init__(self, name):
super().__init__("Extension {!r} has no 'setup' function.".format(name), name=name)
class ExtensionFailed(ExtensionError):
"""An exception raised when an extension failed to load during execution of the module or ``setup`` entry point.
This inherits from :exc:`ExtensionError`
Attributes
-----------
name: :class:`str`
The extension that had the error.
original: :exc:`Exception`
The original exception that was raised. You can also get this via
the ``__cause__`` attribute.
"""
def __init__(self, name, original):
self.original = original
fmt = 'Extension {0!r} raised an error: {1.__class__.__name__}: {1}'
super().__init__(fmt.format(name, original), name=name)
class ExtensionNotFound(ExtensionError):
"""An exception raised when an extension is not found.
This inherits from :exc:`ExtensionError`
.. versionchanged:: 1.3
Made the ``original`` attribute always None.
Attributes
-----------
name: :class:`str`
The extension that had the error.
original: :class:`NoneType`
Always ``None`` for backwards compatibility.
"""
def __init__(self, name, original=None):
self.original = None
fmt = 'Extension {0!r} could not be loaded.'
super().__init__(fmt.format(name), name=name)
class CommandRegistrationError(ClientException):
"""An exception raised when the command can't be added
because the name is already taken by a different command.
This inherits from :exc:`discord.ClientException`
.. versionadded:: 1.4
Attributes
----------
name: :class:`str`
The command name that had the error.
alias_conflict: :class:`bool`
Whether the name that conflicts is an alias of the command we try to add.
"""
def __init__(self, name, *, alias_conflict=False):
self.name = name
self.alias_conflict = alias_conflict
type_ = 'alias' if alias_conflict else 'command'
super().__init__('The {} {} is already an existing command or alias.'.format(type_, name)) | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/ext/commands/errors.py | errors.py |
import itertools
import copy
import functools
import inspect
import re
import discord.utils
from .core import Group, Command
from .errors import CommandError
__all__ = (
'Paginator',
'HelpCommand',
'DefaultHelpCommand',
'MinimalHelpCommand',
)
# help -> shows info of bot on top/bottom and lists subcommands
# help command -> shows detailed info of command
# help command <subcommand chain> -> same as above
# <description>
# <command signature with aliases>
# <long doc>
# Cog:
# <command> <shortdoc>
# <command> <shortdoc>
# Other Cog:
# <command> <shortdoc>
# No Category:
# <command> <shortdoc>
# Type <prefix>help command for more info on a command.
# You can also type <prefix>help category for more info on a category.
class Paginator:
"""A class that aids in paginating code blocks for Discord messages.
.. container:: operations
.. describe:: len(x)
Returns the total number of characters in the paginator.
Attributes
-----------
prefix: :class:`str`
The prefix inserted to every page. e.g. three backticks.
suffix: :class:`str`
The suffix appended at the end of every page. e.g. three backticks.
max_size: :class:`int`
The maximum amount of codepoints allowed in a page.
linesep: :class:`str`
The character string inserted between lines. e.g. a newline character.
.. versionadded:: 1.7
"""
def __init__(self, prefix='```', suffix='```', max_size=2000, linesep='\n'):
self.prefix = prefix
self.suffix = suffix
self.max_size = max_size
self.linesep = linesep
self.clear()
def clear(self):
"""Clears the paginator to have no pages."""
if self.prefix is not None:
self._current_page = [self.prefix]
self._count = len(self.prefix) + self._linesep_len # prefix + newline
else:
self._current_page = []
self._count = 0
self._pages = []
@property
def _prefix_len(self):
return len(self.prefix) if self.prefix else 0
@property
def _suffix_len(self):
return len(self.suffix) if self.suffix else 0
@property
def _linesep_len(self):
return len(self.linesep)
def add_line(self, line='', *, empty=False):
"""Adds a line to the current page.
If the line exceeds the :attr:`max_size` then an exception
is raised.
Parameters
-----------
line: :class:`str`
The line to add.
empty: :class:`bool`
Indicates if another empty line should be added.
Raises
------
RuntimeError
The line was too big for the current :attr:`max_size`.
"""
max_page_size = self.max_size - self._prefix_len - self._suffix_len - 2 * self._linesep_len
if len(line) > max_page_size:
raise RuntimeError('Line exceeds maximum page size %s' % (max_page_size))
if self._count + len(line) + self._linesep_len > self.max_size - self._suffix_len:
self.close_page()
self._count += len(line) + self._linesep_len
self._current_page.append(line)
if empty:
self._current_page.append('')
self._count += self._linesep_len
def close_page(self):
"""Prematurely terminate a page."""
if self.suffix is not None:
self._current_page.append(self.suffix)
self._pages.append(self.linesep.join(self._current_page))
if self.prefix is not None:
self._current_page = [self.prefix]
self._count = len(self.prefix) + self._linesep_len # prefix + linesep
else:
self._current_page = []
self._count = 0
def __len__(self):
total = sum(len(p) for p in self._pages)
return total + self._count
@property
def pages(self):
"""List[:class:`str`]: Returns the rendered list of pages."""
# we have more than just the prefix in our current page
if len(self._current_page) > (0 if self.prefix is None else 1):
self.close_page()
return self._pages
def __repr__(self):
fmt = '<Paginator prefix: {0.prefix!r} suffix: {0.suffix!r} linesep: {0.linesep!r} max_size: {0.max_size} count: {0._count}>'
return fmt.format(self)
def _not_overriden(f):
f.__help_command_not_overriden__ = True
return f
class _HelpCommandImpl(Command):
def __init__(self, inject, *args, **kwargs):
super().__init__(inject.command_callback, *args, **kwargs)
self._original = inject
self._injected = inject
async def prepare(self, ctx):
self._injected = injected = self._original.copy()
injected.context = ctx
self.callback = injected.command_callback
on_error = injected.on_help_command_error
if not hasattr(on_error, '__help_command_not_overriden__'):
if self.cog is not None:
self.on_error = self._on_error_cog_implementation
else:
self.on_error = on_error
await super().prepare(ctx)
async def _parse_arguments(self, ctx):
# Make the parser think we don't have a cog so it doesn't
# inject the parameter into `ctx.args`.
original_cog = self.cog
self.cog = None
try:
await super()._parse_arguments(ctx)
finally:
self.cog = original_cog
async def _on_error_cog_implementation(self, dummy, ctx, error):
await self._injected.on_help_command_error(ctx, error)
@property
def clean_params(self):
result = self.params.copy()
try:
result.popitem(last=False)
except Exception:
raise ValueError('Missing context parameter') from None
else:
return result
def _inject_into_cog(self, cog):
# Warning: hacky
# Make the cog think that get_commands returns this command
# as well if we inject it without modifying __cog_commands__
# since that's used for the injection and ejection of cogs.
def wrapped_get_commands(*, _original=cog.get_commands):
ret = _original()
ret.append(self)
return ret
# Ditto here
def wrapped_walk_commands(*, _original=cog.walk_commands):
yield from _original()
yield self
functools.update_wrapper(wrapped_get_commands, cog.get_commands)
functools.update_wrapper(wrapped_walk_commands, cog.walk_commands)
cog.get_commands = wrapped_get_commands
cog.walk_commands = wrapped_walk_commands
self.cog = cog
def _eject_cog(self):
if self.cog is None:
return
# revert back into their original methods
cog = self.cog
cog.get_commands = cog.get_commands.__wrapped__
cog.walk_commands = cog.walk_commands.__wrapped__
self.cog = None
class HelpCommand:
r"""The base implementation for help command formatting.
.. note::
Internally instances of this class are deep copied every time
the command itself is invoked to prevent a race condition
mentioned in :issue:`2123`.
This means that relying on the state of this class to be
the same between command invocations would not work as expected.
Attributes
------------
context: Optional[:class:`Context`]
The context that invoked this help formatter. This is generally set after
the help command assigned, :func:`command_callback`\, has been called.
show_hidden: :class:`bool`
Specifies if hidden commands should be shown in the output.
Defaults to ``False``.
verify_checks: Optional[:class:`bool`]
Specifies if commands should have their :attr:`.Command.checks` called
and verified. If ``True``, always calls :attr:`.Commands.checks`.
If ``None``, only calls :attr:`.Commands.checks` in a guild setting.
If ``False``, never calls :attr:`.Commands.checks`. Defaults to ``True``.
.. versionchanged:: 1.7
command_attrs: :class:`dict`
A dictionary of options to pass in for the construction of the help command.
This allows you to change the command behaviour without actually changing
the implementation of the command. The attributes will be the same as the
ones passed in the :class:`.Command` constructor.
"""
MENTION_TRANSFORMS = {
'@everyone': '@\u200beveryone',
'@here': '@\u200bhere',
r'<@!?[0-9]{17,22}>': '@deleted-user',
r'<@&[0-9]{17,22}>': '@deleted-role'
}
MENTION_PATTERN = re.compile('|'.join(MENTION_TRANSFORMS.keys()))
def __new__(cls, *args, **kwargs):
# To prevent race conditions of a single instance while also allowing
# for settings to be passed the original arguments passed must be assigned
# to allow for easier copies (which will be made when the help command is actually called)
# see issue 2123
self = super().__new__(cls)
# Shallow copies cannot be used in this case since it is not unusual to pass
# instances that need state, e.g. Paginator or what have you into the function
# The keys can be safely copied as-is since they're 99.99% certain of being
# string keys
deepcopy = copy.deepcopy
self.__original_kwargs__ = {
k: deepcopy(v)
for k, v in kwargs.items()
}
self.__original_args__ = deepcopy(args)
return self
def __init__(self, **options):
self.show_hidden = options.pop('show_hidden', False)
self.verify_checks = options.pop('verify_checks', True)
self.command_attrs = attrs = options.pop('command_attrs', {})
attrs.setdefault('name', 'help')
attrs.setdefault('help', 'Shows this message')
self.context = None
self._command_impl = _HelpCommandImpl(self, **self.command_attrs)
def copy(self):
obj = self.__class__(*self.__original_args__, **self.__original_kwargs__)
obj._command_impl = self._command_impl
return obj
def _add_to_bot(self, bot):
command = _HelpCommandImpl(self, **self.command_attrs)
bot.add_command(command)
self._command_impl = command
def _remove_from_bot(self, bot):
bot.remove_command(self._command_impl.name)
self._command_impl._eject_cog()
def add_check(self, func):
"""
Adds a check to the help command.
.. versionadded:: 1.4
Parameters
----------
func
The function that will be used as a check.
"""
self._command_impl.add_check(func)
def remove_check(self, func):
"""
Removes a check from the help command.
This function is idempotent and will not raise an exception if
the function is not in the command's checks.
.. versionadded:: 1.4
Parameters
----------
func
The function to remove from the checks.
"""
self._command_impl.remove_check(func)
def get_bot_mapping(self):
"""Retrieves the bot mapping passed to :meth:`send_bot_help`."""
bot = self.context.bot
mapping = {
cog: cog.get_commands()
for cog in bot.cogs.values()
}
mapping[None] = [c for c in bot.commands if c.cog is None]
return mapping
@property
def clean_prefix(self):
""":class:`str`: The cleaned up invoke prefix. i.e. mentions are ``@name`` instead of ``<@id>``."""
user = self.context.guild.me if self.context.guild else self.context.bot.user
# this breaks if the prefix mention is not the bot itself but I
# consider this to be an *incredibly* strange use case. I'd rather go
# for this common use case rather than waste performance for the
# odd one.
pattern = re.compile(r"<@!?%s>" % user.id)
return pattern.sub("@%s" % user.display_name.replace('\\', r'\\'), self.context.prefix)
@property
def invoked_with(self):
"""Similar to :attr:`Context.invoked_with` except properly handles
the case where :meth:`Context.send_help` is used.
If the help command was used regularly then this returns
the :attr:`Context.invoked_with` attribute. Otherwise, if
it the help command was called using :meth:`Context.send_help`
then it returns the internal command name of the help command.
Returns
---------
:class:`str`
The command name that triggered this invocation.
"""
command_name = self._command_impl.name
ctx = self.context
if ctx is None or ctx.command is None or ctx.command.qualified_name != command_name:
return command_name
return ctx.invoked_with
def get_command_signature(self, command):
"""Retrieves the signature portion of the help page.
Parameters
------------
command: :class:`Command`
The command to get the signature of.
Returns
--------
:class:`str`
The signature for the command.
"""
parent = command.parent
entries = []
while parent is not None:
if not parent.signature or parent.invoke_without_command:
entries.append(parent.name)
else:
entries.append(parent.name + ' ' + parent.signature)
parent = parent.parent
parent_sig = ' '.join(reversed(entries))
if len(command.aliases) > 0:
aliases = '|'.join(command.aliases)
fmt = '[%s|%s]' % (command.name, aliases)
if parent_sig:
fmt = parent_sig + ' ' + fmt
alias = fmt
else:
alias = command.name if not parent_sig else parent_sig + ' ' + command.name
return '%s%s %s' % (self.clean_prefix, alias, command.signature)
def remove_mentions(self, string):
"""Removes mentions from the string to prevent abuse.
This includes ``@everyone``, ``@here``, member mentions and role mentions.
Returns
-------
:class:`str`
The string with mentions removed.
"""
def replace(obj, *, transforms=self.MENTION_TRANSFORMS):
return transforms.get(obj.group(0), '@invalid')
return self.MENTION_PATTERN.sub(replace, string)
@property
def cog(self):
"""A property for retrieving or setting the cog for the help command.
When a cog is set for the help command, it is as-if the help command
belongs to that cog. All cog special methods will apply to the help
command and it will be automatically unset on unload.
To unbind the cog from the help command, you can set it to ``None``.
Returns
--------
Optional[:class:`Cog`]
The cog that is currently set for the help command.
"""
return self._command_impl.cog
@cog.setter
def cog(self, cog):
# Remove whatever cog is currently valid, if any
self._command_impl._eject_cog()
# If a new cog is set then inject it.
if cog is not None:
self._command_impl._inject_into_cog(cog)
def command_not_found(self, string):
"""|maybecoro|
A method called when a command is not found in the help command.
This is useful to override for i18n.
Defaults to ``No command called {0} found.``
Parameters
------------
string: :class:`str`
The string that contains the invalid command. Note that this has
had mentions removed to prevent abuse.
Returns
---------
:class:`str`
The string to use when a command has not been found.
"""
return 'No command called "{}" found.'.format(string)
def subcommand_not_found(self, command, string):
"""|maybecoro|
A method called when a command did not have a subcommand requested in the help command.
This is useful to override for i18n.
Defaults to either:
- ``'Command "{command.qualified_name}" has no subcommands.'``
- If there is no subcommand in the ``command`` parameter.
- ``'Command "{command.qualified_name}" has no subcommand named {string}'``
- If the ``command`` parameter has subcommands but not one named ``string``.
Parameters
------------
command: :class:`Command`
The command that did not have the subcommand requested.
string: :class:`str`
The string that contains the invalid subcommand. Note that this has
had mentions removed to prevent abuse.
Returns
---------
:class:`str`
The string to use when the command did not have the subcommand requested.
"""
if isinstance(command, Group) and len(command.all_commands) > 0:
return 'Command "{0.qualified_name}" has no subcommand named {1}'.format(command, string)
return 'Command "{0.qualified_name}" has no subcommands.'.format(command)
async def filter_commands(self, commands, *, sort=False, key=None):
"""|coro|
Returns a filtered list of commands and optionally sorts them.
This takes into account the :attr:`verify_checks` and :attr:`show_hidden`
attributes.
Parameters
------------
commands: Iterable[:class:`Command`]
An iterable of commands that are getting filtered.
sort: :class:`bool`
Whether to sort the result.
key: Optional[Callable[:class:`Command`, Any]]
An optional key function to pass to :func:`py:sorted` that
takes a :class:`Command` as its sole parameter. If ``sort`` is
passed as ``True`` then this will default as the command name.
Returns
---------
List[:class:`Command`]
A list of commands that passed the filter.
"""
if sort and key is None:
key = lambda c: c.name
iterator = commands if self.show_hidden else filter(lambda c: not c.hidden, commands)
if self.verify_checks is False:
# if we do not need to verify the checks then we can just
# run it straight through normally without using await.
return sorted(iterator, key=key) if sort else list(iterator)
if self.verify_checks is None and not self.context.guild:
# if verify_checks is None and we're in a DM, don't verify
return sorted(iterator, key=key) if sort else list(iterator)
# if we're here then we need to check every command if it can run
async def predicate(cmd):
try:
return await cmd.can_run(self.context)
except CommandError:
return False
ret = []
for cmd in iterator:
valid = await predicate(cmd)
if valid:
ret.append(cmd)
if sort:
ret.sort(key=key)
return ret
def get_max_size(self, commands):
"""Returns the largest name length of the specified command list.
Parameters
------------
commands: Sequence[:class:`Command`]
A sequence of commands to check for the largest size.
Returns
--------
:class:`int`
The maximum width of the commands.
"""
as_lengths = (
discord.utils._string_width(c.name)
for c in commands
)
return max(as_lengths, default=0)
def get_destination(self):
"""Returns the :class:`~discord.abc.Messageable` where the help command will be output.
You can override this method to customise the behaviour.
By default this returns the context's channel.
Returns
-------
:class:`.abc.Messageable`
The destination where the help command will be output.
"""
return self.context.channel
async def send_error_message(self, error):
"""|coro|
Handles the implementation when an error happens in the help command.
For example, the result of :meth:`command_not_found` or
:meth:`command_has_no_subcommand_found` will be passed here.
You can override this method to customise the behaviour.
By default, this sends the error message to the destination
specified by :meth:`get_destination`.
.. note::
You can access the invocation context with :attr:`HelpCommand.context`.
Parameters
------------
error: :class:`str`
The error message to display to the user. Note that this has
had mentions removed to prevent abuse.
"""
destination = self.get_destination()
await destination.send(error)
@_not_overriden
async def on_help_command_error(self, ctx, error):
"""|coro|
The help command's error handler, as specified by :ref:`ext_commands_error_handler`.
Useful to override if you need some specific behaviour when the error handler
is called.
By default this method does nothing and just propagates to the default
error handlers.
Parameters
------------
ctx: :class:`Context`
The invocation context.
error: :class:`CommandError`
The error that was raised.
"""
pass
async def send_bot_help(self, mapping):
"""|coro|
Handles the implementation of the bot command page in the help command.
This function is called when the help command is called with no arguments.
It should be noted that this method does not return anything -- rather the
actual message sending should be done inside this method. Well behaved subclasses
should use :meth:`get_destination` to know where to send, as this is a customisation
point for other users.
You can override this method to customise the behaviour.
.. note::
You can access the invocation context with :attr:`HelpCommand.context`.
Also, the commands in the mapping are not filtered. To do the filtering
you will have to call :meth:`filter_commands` yourself.
Parameters
------------
mapping: Mapping[Optional[:class:`Cog`], List[:class:`Command`]]
A mapping of cogs to commands that have been requested by the user for help.
The key of the mapping is the :class:`~.commands.Cog` that the command belongs to, or
``None`` if there isn't one, and the value is a list of commands that belongs to that cog.
"""
return None
async def send_cog_help(self, cog):
"""|coro|
Handles the implementation of the cog page in the help command.
This function is called when the help command is called with a cog as the argument.
It should be noted that this method does not return anything -- rather the
actual message sending should be done inside this method. Well behaved subclasses
should use :meth:`get_destination` to know where to send, as this is a customisation
point for other users.
You can override this method to customise the behaviour.
.. note::
You can access the invocation context with :attr:`HelpCommand.context`.
To get the commands that belong to this cog see :meth:`Cog.get_commands`.
The commands returned not filtered. To do the filtering you will have to call
:meth:`filter_commands` yourself.
Parameters
-----------
cog: :class:`Cog`
The cog that was requested for help.
"""
return None
async def send_group_help(self, group):
"""|coro|
Handles the implementation of the group page in the help command.
This function is called when the help command is called with a group as the argument.
It should be noted that this method does not return anything -- rather the
actual message sending should be done inside this method. Well behaved subclasses
should use :meth:`get_destination` to know where to send, as this is a customisation
point for other users.
You can override this method to customise the behaviour.
.. note::
You can access the invocation context with :attr:`HelpCommand.context`.
To get the commands that belong to this group without aliases see
:attr:`Group.commands`. The commands returned not filtered. To do the
filtering you will have to call :meth:`filter_commands` yourself.
Parameters
-----------
group: :class:`Group`
The group that was requested for help.
"""
return None
async def send_command_help(self, command):
"""|coro|
Handles the implementation of the single command page in the help command.
It should be noted that this method does not return anything -- rather the
actual message sending should be done inside this method. Well behaved subclasses
should use :meth:`get_destination` to know where to send, as this is a customisation
point for other users.
You can override this method to customise the behaviour.
.. note::
You can access the invocation context with :attr:`HelpCommand.context`.
.. admonition:: Showing Help
:class: helpful
There are certain attributes and methods that are helpful for a help command
to show such as the following:
- :attr:`Command.help`
- :attr:`Command.brief`
- :attr:`Command.short_doc`
- :attr:`Command.description`
- :meth:`get_command_signature`
There are more than just these attributes but feel free to play around with
these to help you get started to get the output that you want.
Parameters
-----------
command: :class:`Command`
The command that was requested for help.
"""
return None
async def prepare_help_command(self, ctx, command=None):
"""|coro|
A low level method that can be used to prepare the help command
before it does anything. For example, if you need to prepare
some state in your subclass before the command does its processing
then this would be the place to do it.
The default implementation does nothing.
.. note::
This is called *inside* the help command callback body. So all
the usual rules that happen inside apply here as well.
Parameters
-----------
ctx: :class:`Context`
The invocation context.
command: Optional[:class:`str`]
The argument passed to the help command.
"""
pass
async def command_callback(self, ctx, *, command=None):
"""|coro|
The actual implementation of the help command.
It is not recommended to override this method and instead change
the behaviour through the methods that actually get dispatched.
- :meth:`send_bot_help`
- :meth:`send_cog_help`
- :meth:`send_group_help`
- :meth:`send_command_help`
- :meth:`get_destination`
- :meth:`command_not_found`
- :meth:`subcommand_not_found`
- :meth:`send_error_message`
- :meth:`on_help_command_error`
- :meth:`prepare_help_command`
"""
await self.prepare_help_command(ctx, command)
bot = ctx.bot
if command is None:
mapping = self.get_bot_mapping()
return await self.send_bot_help(mapping)
# Check if it's a cog
cog = bot.get_cog(command)
if cog is not None:
return await self.send_cog_help(cog)
maybe_coro = discord.utils.maybe_coroutine
# If it's not a cog then it's a command.
# Since we want to have detailed errors when someone
# passes an invalid subcommand, we need to walk through
# the command group chain ourselves.
keys = command.split(' ')
cmd = bot.all_commands.get(keys[0])
if cmd is None:
string = await maybe_coro(self.command_not_found, self.remove_mentions(keys[0]))
return await self.send_error_message(string)
for key in keys[1:]:
try:
found = cmd.all_commands.get(key)
except AttributeError:
string = await maybe_coro(self.subcommand_not_found, cmd, self.remove_mentions(key))
return await self.send_error_message(string)
else:
if found is None:
string = await maybe_coro(self.subcommand_not_found, cmd, self.remove_mentions(key))
return await self.send_error_message(string)
cmd = found
if isinstance(cmd, Group):
return await self.send_group_help(cmd)
else:
return await self.send_command_help(cmd)
class DefaultHelpCommand(HelpCommand):
"""The implementation of the default help command.
This inherits from :class:`HelpCommand`.
It extends it with the following attributes.
Attributes
------------
width: :class:`int`
The maximum number of characters that fit in a line.
Defaults to 80.
sort_commands: :class:`bool`
Whether to sort the commands in the output alphabetically. Defaults to ``True``.
dm_help: Optional[:class:`bool`]
A tribool that indicates if the help command should DM the user instead of
sending it to the channel it received it from. If the boolean is set to
``True``, then all help output is DM'd. If ``False``, none of the help
output is DM'd. If ``None``, then the bot will only DM when the help
message becomes too long (dictated by more than :attr:`dm_help_threshold` characters).
Defaults to ``False``.
dm_help_threshold: Optional[:class:`int`]
The number of characters the paginator must accumulate before getting DM'd to the
user if :attr:`dm_help` is set to ``None``. Defaults to 1000.
indent: :class:`int`
How much to indent the commands from a heading. Defaults to ``2``.
commands_heading: :class:`str`
The command list's heading string used when the help command is invoked with a category name.
Useful for i18n. Defaults to ``"Commands:"``
no_category: :class:`str`
The string used when there is a command which does not belong to any category(cog).
Useful for i18n. Defaults to ``"No Category"``
paginator: :class:`Paginator`
The paginator used to paginate the help command output.
"""
def __init__(self, **options):
self.width = options.pop('width', 80)
self.indent = options.pop('indent', 2)
self.sort_commands = options.pop('sort_commands', True)
self.dm_help = options.pop('dm_help', False)
self.dm_help_threshold = options.pop('dm_help_threshold', 1000)
self.commands_heading = options.pop('commands_heading', "Commands:")
self.no_category = options.pop('no_category', 'No Category')
self.paginator = options.pop('paginator', None)
if self.paginator is None:
self.paginator = Paginator()
super().__init__(**options)
def shorten_text(self, text):
""":class:`str`: Shortens text to fit into the :attr:`width`."""
if len(text) > self.width:
return text[:self.width - 3] + '...'
return text
def get_ending_note(self):
""":class:`str`: Returns help command's ending note. This is mainly useful to override for i18n purposes."""
command_name = self.invoked_with
return "Type {0}{1} command for more info on a command.\n" \
"You can also type {0}{1} category for more info on a category.".format(self.clean_prefix, command_name)
def add_indented_commands(self, commands, *, heading, max_size=None):
"""Indents a list of commands after the specified heading.
The formatting is added to the :attr:`paginator`.
The default implementation is the command name indented by
:attr:`indent` spaces, padded to ``max_size`` followed by
the command's :attr:`Command.short_doc` and then shortened
to fit into the :attr:`width`.
Parameters
-----------
commands: Sequence[:class:`Command`]
A list of commands to indent for output.
heading: :class:`str`
The heading to add to the output. This is only added
if the list of commands is greater than 0.
max_size: Optional[:class:`int`]
The max size to use for the gap between indents.
If unspecified, calls :meth:`get_max_size` on the
commands parameter.
"""
if not commands:
return
self.paginator.add_line(heading)
max_size = max_size or self.get_max_size(commands)
get_width = discord.utils._string_width
for command in commands:
name = command.name
width = max_size - (get_width(name) - len(name))
entry = '{0}{1:<{width}} {2}'.format(self.indent * ' ', name, command.short_doc, width=width)
self.paginator.add_line(self.shorten_text(entry))
async def send_pages(self):
"""A helper utility to send the page output from :attr:`paginator` to the destination."""
destination = self.get_destination()
for page in self.paginator.pages:
await destination.send(page)
def add_command_formatting(self, command):
"""A utility function to format the non-indented block of commands and groups.
Parameters
------------
command: :class:`Command`
The command to format.
"""
if command.description:
self.paginator.add_line(command.description, empty=True)
signature = self.get_command_signature(command)
self.paginator.add_line(signature, empty=True)
if command.help:
try:
self.paginator.add_line(command.help, empty=True)
except RuntimeError:
for line in command.help.splitlines():
self.paginator.add_line(line)
self.paginator.add_line()
if command.example:
self.paginator.add_line("Examples:")
for example in command.example:
self.paginator.add_line(" - %s%s" % (self.clean_prefix, example), empty=True)
self.paginator.add_line()
def get_destination(self):
ctx = self.context
if self.dm_help is True:
return ctx.author
elif self.dm_help is None and len(self.paginator) > self.dm_help_threshold:
return ctx.author
else:
return ctx.channel
async def prepare_help_command(self, ctx, command):
self.paginator.clear()
await super().prepare_help_command(ctx, command)
async def send_bot_help(self, mapping):
ctx = self.context
bot = ctx.bot
if bot.description:
# <description> portion
self.paginator.add_line(bot.description, empty=True)
no_category = '\u200b{0.no_category}:'.format(self)
def get_category(command, *, no_category=no_category):
cog = command.cog
return cog.qualified_name + ':' if cog is not None else no_category
filtered = await self.filter_commands(bot.commands, sort=True, key=get_category)
max_size = self.get_max_size(filtered)
to_iterate = itertools.groupby(filtered, key=get_category)
# Now we can add the commands to the page.
for category, commands in to_iterate:
commands = sorted(commands, key=lambda c: c.name) if self.sort_commands else list(commands)
self.add_indented_commands(commands, heading=category, max_size=max_size)
note = self.get_ending_note()
if note:
self.paginator.add_line()
self.paginator.add_line(note)
await self.send_pages()
async def send_command_help(self, command):
self.add_command_formatting(command)
self.paginator.close_page()
await self.send_pages()
async def send_group_help(self, group):
self.add_command_formatting(group)
filtered = await self.filter_commands(group.commands, sort=self.sort_commands)
self.add_indented_commands(filtered, heading=self.commands_heading)
if filtered:
note = self.get_ending_note()
if note:
self.paginator.add_line()
self.paginator.add_line(note)
await self.send_pages()
async def send_cog_help(self, cog):
if cog.description:
self.paginator.add_line(cog.description, empty=True)
filtered = await self.filter_commands(cog.get_commands(), sort=self.sort_commands)
self.add_indented_commands(filtered, heading=self.commands_heading)
note = self.get_ending_note()
if note:
self.paginator.add_line()
self.paginator.add_line(note)
await self.send_pages()
class MinimalHelpCommand(HelpCommand):
"""An implementation of a help command with minimal output.
This inherits from :class:`HelpCommand`.
Attributes
------------
sort_commands: :class:`bool`
Whether to sort the commands in the output alphabetically. Defaults to ``True``.
commands_heading: :class:`str`
The command list's heading string used when the help command is invoked with a category name.
Useful for i18n. Defaults to ``"Commands"``
aliases_heading: :class:`str`
The alias list's heading string used to list the aliases of the command. Useful for i18n.
Defaults to ``"Aliases:"``.
dm_help: Optional[:class:`bool`]
A tribool that indicates if the help command should DM the user instead of
sending it to the channel it received it from. If the boolean is set to
``True``, then all help output is DM'd. If ``False``, none of the help
output is DM'd. If ``None``, then the bot will only DM when the help
message becomes too long (dictated by more than :attr:`dm_help_threshold` characters).
Defaults to ``False``.
dm_help_threshold: Optional[:class:`int`]
The number of characters the paginator must accumulate before getting DM'd to the
user if :attr:`dm_help` is set to ``None``. Defaults to 1000.
no_category: :class:`str`
The string used when there is a command which does not belong to any category(cog).
Useful for i18n. Defaults to ``"No Category"``
paginator: :class:`Paginator`
The paginator used to paginate the help command output.
"""
def __init__(self, **options):
self.sort_commands = options.pop('sort_commands', True)
self.commands_heading = options.pop('commands_heading', "Commands")
self.dm_help = options.pop('dm_help', False)
self.dm_help_threshold = options.pop('dm_help_threshold', 1000)
self.aliases_heading = options.pop('aliases_heading', "Aliases:")
self.no_category = options.pop('no_category', 'No Category')
self.paginator = options.pop('paginator', None)
if self.paginator is None:
self.paginator = Paginator(suffix=None, prefix=None)
super().__init__(**options)
async def send_pages(self):
"""A helper utility to send the page output from :attr:`paginator` to the destination."""
destination = self.get_destination()
for page in self.paginator.pages:
await destination.send(page)
def get_opening_note(self):
"""Returns help command's opening note. This is mainly useful to override for i18n purposes.
The default implementation returns ::
Use `{prefix}{command_name} [command]` for more info on a command.
You can also use `{prefix}{command_name} [category]` for more info on a category.
Returns
-------
:class:`str`
The help command opening note.
"""
command_name = self.invoked_with
return "Use `{0}{1} [command]` for more info on a command.\n" \
"You can also use `{0}{1} [category]` for more info on a category.".format(self.clean_prefix, command_name)
def get_command_signature(self, command):
return '%s%s %s' % (self.clean_prefix, command.qualified_name, command.signature)
def get_ending_note(self):
"""Return the help command's ending note. This is mainly useful to override for i18n purposes.
The default implementation does nothing.
Returns
-------
:class:`str`
The help command ending note.
"""
return None
def add_bot_commands_formatting(self, commands, heading):
"""Adds the minified bot heading with commands to the output.
The formatting should be added to the :attr:`paginator`.
The default implementation is a bold underline heading followed
by commands separated by an EN SPACE (U+2002) in the next line.
Parameters
-----------
commands: Sequence[:class:`Command`]
A list of commands that belong to the heading.
heading: :class:`str`
The heading to add to the line.
"""
if commands:
# U+2002 Middle Dot
joined = '\u2002'.join(c.name for c in commands)
self.paginator.add_line('__**%s**__' % heading)
self.paginator.add_line(joined)
def add_subcommand_formatting(self, command):
"""Adds formatting information on a subcommand.
The formatting should be added to the :attr:`paginator`.
The default implementation is the prefix and the :attr:`Command.qualified_name`
optionally followed by an En dash and the command's :attr:`Command.short_doc`.
Parameters
-----------
command: :class:`Command`
The command to show information of.
"""
fmt = '{0}{1} \N{EN DASH} {2}' if command.short_doc else '{0}{1}'
self.paginator.add_line(fmt.format(self.clean_prefix, command.qualified_name, command.short_doc))
def add_aliases_formatting(self, aliases):
"""Adds the formatting information on a command's aliases.
The formatting should be added to the :attr:`paginator`.
The default implementation is the :attr:`aliases_heading` bolded
followed by a comma separated list of aliases.
This is not called if there are no aliases to format.
Parameters
-----------
aliases: Sequence[:class:`str`]
A list of aliases to format.
"""
self.paginator.add_line('**%s** %s' % (self.aliases_heading, ', '.join(aliases)), empty=True)
def add_command_formatting(self, command):
"""A utility function to format commands and groups.
Parameters
------------
command: :class:`Command`
The command to format.
"""
if command.description:
self.paginator.add_line(command.description, empty=True)
signature = self.get_command_signature(command)
if command.aliases:
self.paginator.add_line(signature)
self.add_aliases_formatting(command.aliases)
else:
self.paginator.add_line(signature, empty=True)
if command.help:
try:
self.paginator.add_line(command.help, empty=True)
except RuntimeError:
for line in command.help.splitlines():
self.paginator.add_line(line)
self.paginator.add_line()
def get_destination(self):
ctx = self.context
if self.dm_help is True:
return ctx.author
elif self.dm_help is None and len(self.paginator) > self.dm_help_threshold:
return ctx.author
else:
return ctx.channel
async def prepare_help_command(self, ctx, command):
self.paginator.clear()
await super().prepare_help_command(ctx, command)
async def send_bot_help(self, mapping):
ctx = self.context
bot = ctx.bot
if bot.description:
self.paginator.add_line(bot.description, empty=True)
note = self.get_opening_note()
if note:
self.paginator.add_line(note, empty=True)
no_category = '\u200b{0.no_category}'.format(self)
def get_category(command, *, no_category=no_category):
cog = command.cog
return cog.qualified_name if cog is not None else no_category
filtered = await self.filter_commands(bot.commands, sort=True, key=get_category)
to_iterate = itertools.groupby(filtered, key=get_category)
for category, commands in to_iterate:
commands = sorted(commands, key=lambda c: c.name) if self.sort_commands else list(commands)
self.add_bot_commands_formatting(commands, category)
note = self.get_ending_note()
if note:
self.paginator.add_line()
self.paginator.add_line(note)
await self.send_pages()
async def send_cog_help(self, cog):
bot = self.context.bot
if bot.description:
self.paginator.add_line(bot.description, empty=True)
note = self.get_opening_note()
if note:
self.paginator.add_line(note, empty=True)
if cog.description:
self.paginator.add_line(cog.description, empty=True)
filtered = await self.filter_commands(cog.get_commands(), sort=self.sort_commands)
if filtered:
self.paginator.add_line('**%s %s**' % (cog.qualified_name, self.commands_heading))
for command in filtered:
self.add_subcommand_formatting(command)
note = self.get_ending_note()
if note:
self.paginator.add_line()
self.paginator.add_line(note)
await self.send_pages()
async def send_group_help(self, group):
self.add_command_formatting(group)
filtered = await self.filter_commands(group.commands, sort=self.sort_commands)
if filtered:
note = self.get_opening_note()
if note:
self.paginator.add_line(note, empty=True)
self.paginator.add_line('**%s**' % self.commands_heading)
for command in filtered:
self.add_subcommand_formatting(command)
note = self.get_ending_note()
if note:
self.paginator.add_line()
self.paginator.add_line(note)
await self.send_pages()
async def send_command_help(self, command):
self.add_command_formatting(command)
self.paginator.close_page()
await self.send_pages() | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/ext/commands/help.py | help.py |
import asyncio
import collections
import inspect
import importlib.util
import sys
import traceback
import types
import discord
from .core import GroupMixin
from .view import StringView
from .context import Context
from . import errors
from .help import HelpCommand, DefaultHelpCommand
from .cog import Cog
def when_mentioned(bot, msg):
"""A callable that implements a command prefix equivalent to being mentioned.
These are meant to be passed into the :attr:`.Bot.command_prefix` attribute.
"""
return [bot.user.mention + ' ', '<@!%s> ' % bot.user.id]
def when_mentioned_or(*prefixes):
"""A callable that implements when mentioned or other prefixes provided.
These are meant to be passed into the :attr:`.Bot.command_prefix` attribute.
Example
--------
.. code-block:: python3
bot = commands.Bot(command_prefix=commands.when_mentioned_or('!'))
.. note::
This callable returns another callable, so if this is done inside a custom
callable, you must call the returned callable, for example:
.. code-block:: python3
async def get_prefix(bot, message):
extras = await prefixes_for(message.guild) # returns a list
return commands.when_mentioned_or(*extras)(bot, message)
See Also
----------
:func:`.when_mentioned`
"""
def inner(bot, msg):
r = list(prefixes)
r = when_mentioned(bot, msg) + r
return r
return inner
def _is_submodule(parent, child):
return parent == child or child.startswith(parent + ".")
class _DefaultRepr:
def __repr__(self):
return '<default-help-command>'
_default = _DefaultRepr()
class BotBase(GroupMixin):
def __init__(self, command_prefix, help_command=_default, description=None, **options):
super().__init__(**options)
self.command_prefix = command_prefix
self.extra_events = {}
self.__cogs = {}
self.__extensions = {}
self._checks = []
self._check_once = []
self._before_invoke = None
self._after_invoke = None
self._help_command = None
self.description = inspect.cleandoc(description) if description else ''
self.owner_id = options.get('owner_id')
self.owner_ids = options.get('owner_ids', set())
self.strip_after_prefix = options.get('strip_after_prefix', False)
if self.owner_id and self.owner_ids:
raise TypeError('Both owner_id and owner_ids are set.')
if self.owner_ids and not isinstance(self.owner_ids, collections.abc.Collection):
raise TypeError('owner_ids must be a collection not {0.__class__!r}'.format(self.owner_ids))
if options.pop('self_bot', False):
self._skip_check = lambda x, y: x != y
else:
self._skip_check = lambda x, y: x == y
if help_command is _default:
self.help_command = DefaultHelpCommand()
else:
self.help_command = help_command
@property
def owner(self):
""":class:`discord.User`: The owner, retrieved from owner_id. In case of improper caching, this can return None
.. versionadded:: 1.5.0.1"""
if not self.owner_id or self.owner_ids:
raise AttributeError('No owner_id specified or you used owner_ids. If you used owner_ids, please refer to `Bot.owners`')
return self.get_user(self.owner_id)
@property
def owners(self):
"""List[:class:`discord.User`]: The owners, retrieved from owner_ids. In case of improper caching, this list may not contain all owners.
.. versionadded:: 1.5.0.1"""
if not self.owner_ids or self.owner_id:
raise TypeError('No owner_ids specified or you used owner_id. If you used owner_id, please refer to `Bot.owner`')
owners = []
for user in self.owner_ids:
owner = self.get_user(user)
if owner:
owners.append(owner)
return owners
# internal helpers
def dispatch(self, event_name, *args, **kwargs):
super().dispatch(event_name, *args, **kwargs)
ev = 'on_' + event_name
for event in self.extra_events.get(ev, []):
self._schedule_event(event, ev, *args, **kwargs)
async def close(self):
for extension in tuple(self.__extensions):
try:
self.unload_extension(extension)
except Exception:
pass
for cog in tuple(self.__cogs):
try:
self.remove_cog(cog)
except Exception:
pass
await super().close()
async def on_command_error(self, context, exception):
"""|coro|
The default command error handler provided by the bot.
By default this prints to :data:`sys.stderr` however it could be
overridden to have a different implementation.
This only fires if you do not specify any listeners for command error.
"""
if self.extra_events.get('on_command_error', None):
return
if hasattr(context.command, 'on_error'):
return
cog = context.cog
if cog and Cog._get_overridden_method(cog.cog_command_error) is not None:
return
print('Ignoring exception in command {}:'.format(context.command), file=sys.stderr)
traceback.print_exception(type(exception), exception, exception.__traceback__, file=sys.stderr)
# global check registration
def check(self, func):
r"""A decorator that adds a global check to the bot.
A global check is similar to a :func:`.check` that is applied
on a per command basis except it is run before any command checks
have been verified and applies to every command the bot has.
.. note::
This function can either be a regular function or a coroutine.
Similar to a command :func:`.check`\, this takes a single parameter
of type :class:`.Context` and can only raise exceptions inherited from
:exc:`.CommandError`.
Example
---------
.. code-block:: python3
@bot.check
def check_commands(ctx):
return ctx.command.qualified_name in allowed_commands
"""
self.add_check(func)
return func
def add_check(self, func, *, call_once=False):
"""Adds a global check to the bot.
This is the non-decorator interface to :meth:`.check`
and :meth:`.check_once`.
Parameters
-----------
func
The function that was used as a global check.
call_once: :class:`bool`
If the function should only be called once per
:meth:`.Command.invoke` call.
"""
if call_once:
self._check_once.append(func)
else:
self._checks.append(func)
def remove_check(self, func, *, call_once=False):
"""Removes a global check from the bot.
This function is idempotent and will not raise an exception
if the function is not in the global checks.
Parameters
-----------
func
The function to remove from the global checks.
call_once: :class:`bool`
If the function was added with ``call_once=True`` in
the :meth:`.Bot.add_check` call or using :meth:`.check_once`.
"""
l = self._check_once if call_once else self._checks
try:
l.remove(func)
except ValueError:
pass
def check_once(self, func):
r"""A decorator that adds a "call once" global check to the bot.
Unlike regular global checks, this one is called only once
per :meth:`.Command.invoke` call.
Regular global checks are called whenever a command is called
or :meth:`.Command.can_run` is called. This type of check
bypasses that and ensures that it's called only once, even inside
the default help command.
.. note::
When using this function the :class:`.Context` sent to a group subcommand
may only parse the parent command and not the subcommands due to it
being invoked once per :meth:`.Bot.invoke` call.
.. note::
This function can either be a regular function or a coroutine.
Similar to a command :func:`.check`\, this takes a single parameter
of type :class:`.Context` and can only raise exceptions inherited from
:exc:`.CommandError`.
Example
---------
.. code-block:: python3
@bot.check_once
def whitelist(ctx):
return ctx.message.author.id in my_whitelist
"""
self.add_check(func, call_once=True)
return func
async def can_run(self, ctx, *, call_once=False):
data = self._check_once if call_once else self._checks
if len(data) == 0:
return True
return await discord.utils.async_all(f(ctx) for f in data)
async def is_owner(self, user):
"""|coro|
Checks if a :class:`~discord.User` or :class:`~discord.Member` is the owner of
this bot.
If an :attr:`owner_id` is not set, it is fetched automatically
through the use of :meth:`~.Bot.application_info`.
.. versionchanged:: 1.3
The function also checks if the application is team-owned if
:attr:`owner_ids` is not set.
Parameters
-----------
user: :class:`.abc.User`
The user to check for.
Returns
--------
:class:`bool`
Whether the user is the owner.
"""
if self.owner_id:
return user.id == self.owner_id
elif self.owner_ids:
return user.id in self.owner_ids
else:
app = await self.application_info()
if app.team:
self.owner_ids = ids = {m.id for m in app.team.members}
return user.id in ids
else:
self.owner_id = owner_id = app.owner.id
return user.id == owner_id
def before_invoke(self, coro):
"""A decorator that registers a coroutine as a pre-invoke hook.
A pre-invoke hook is called directly before the command is
called. This makes it a useful function to set up database
connections or any type of set up required.
This pre-invoke hook takes a sole parameter, a :class:`.Context`.
.. note::
The :meth:`~.Bot.before_invoke` and :meth:`~.Bot.after_invoke` hooks are
only called if all checks and argument parsing procedures pass
without error. If any check or argument parsing procedures fail
then the hooks are not called.
Parameters
-----------
coro: :ref:`coroutine <coroutine>`
The coroutine to register as the pre-invoke hook.
Raises
-------
TypeError
The coroutine passed is not actually a coroutine.
"""
if not asyncio.iscoroutinefunction(coro):
raise TypeError('The pre-invoke hook must be a coroutine.')
self._before_invoke = coro
return coro
def after_invoke(self, coro):
r"""A decorator that registers a coroutine as a post-invoke hook.
A post-invoke hook is called directly after the command is
called. This makes it a useful function to clean-up database
connections or any type of clean up required.
This post-invoke hook takes a sole parameter, a :class:`.Context`.
.. note::
Similar to :meth:`~.Bot.before_invoke`\, this is not called unless
checks and argument parsing procedures succeed. This hook is,
however, **always** called regardless of the internal command
callback raising an error (i.e. :exc:`.CommandInvokeError`\).
This makes it ideal for clean-up scenarios.
Parameters
-----------
coro: :ref:`coroutine <coroutine>`
The coroutine to register as the post-invoke hook.
Raises
-------
TypeError
The coroutine passed is not actually a coroutine.
"""
if not asyncio.iscoroutinefunction(coro):
raise TypeError('The post-invoke hook must be a coroutine.')
self._after_invoke = coro
return coro
# listener registration
def add_listener(self, func, name=None):
"""The non decorator alternative to :meth:`.listen`.
Parameters
-----------
func: :ref:`coroutine <coroutine>`
The function to call.
name: Optional[:class:`str`]
The name of the event to listen for. Defaults to ``func.__name__``.
Example
--------
.. code-block:: python3
async def on_ready(): pass
async def my_message(message): pass
bot.add_listener(on_ready)
bot.add_listener(my_message, 'on_message')
"""
name = func.__name__ if name is None else name
if not asyncio.iscoroutinefunction(func):
raise TypeError('Listeners must be coroutines')
if name in self.extra_events:
self.extra_events[name].append(func)
else:
self.extra_events[name] = [func]
def remove_listener(self, func, name=None):
"""Removes a listener from the pool of listeners.
Parameters
-----------
func
The function that was used as a listener to remove.
name: :class:`str`
The name of the event we want to remove. Defaults to
``func.__name__``.
"""
name = func.__name__ if name is None else name
if name in self.extra_events:
try:
self.extra_events[name].remove(func)
except ValueError:
pass
def listen(self, name=None):
"""A decorator that registers another function as an external
event listener. Basically this allows you to listen to multiple
events from different places e.g. such as :func:`.on_ready`
The functions being listened to must be a :ref:`coroutine <coroutine>`.
Example
--------
.. code-block:: python3
@bot.listen()
async def on_message(message):
print('one')
# in some other file...
@bot.listen('on_message')
async def my_message(message):
print('two')
Would print one and two in an unspecified order.
Raises
-------
TypeError
The function being listened to is not a coroutine.
"""
def decorator(func):
self.add_listener(func, name)
return func
return decorator
# cogs
def add_cog(self, cog):
"""Adds a "cog" to the bot.
A cog is a class that has its own event listeners and commands.
Parameters
-----------
cog: :class:`.Cog`
The cog to register to the bot.
Raises
-------
TypeError
The cog does not inherit from :class:`.Cog`.
CommandError
An error happened during loading.
"""
if not isinstance(cog, Cog):
raise TypeError('cogs must derive from Cog')
cog = cog._inject(self)
self.__cogs[cog.__cog_name__] = cog
def get_cog(self, name):
"""Gets the cog instance requested.
If the cog is not found, ``None`` is returned instead.
Parameters
-----------
name: :class:`str`
The name of the cog you are requesting.
This is equivalent to the name passed via keyword
argument in class creation or the class name if unspecified.
Returns
--------
Optional[:class:`Cog`]
The cog that was requested. If not found, returns ``None``.
"""
return self.__cogs.get(name)
def remove_cog(self, name):
"""Removes a cog from the bot.
All registered commands and event listeners that the
cog has registered will be removed as well.
If no cog is found then this method has no effect.
Parameters
-----------
name: :class:`str`
The name of the cog to remove.
"""
cog = self.__cogs.pop(name, None)
if cog is None:
return
help_command = self._help_command
if help_command and help_command.cog is cog:
help_command.cog = None
cog._eject(self)
@property
def cogs(self):
"""Mapping[:class:`str`, :class:`Cog`]: A read-only mapping of cog name to cog."""
return types.MappingProxyType(self.__cogs)
# extensions
def _remove_module_references(self, name):
# find all references to the module
# remove the cogs registered from the module
for cogname, cog in self.__cogs.copy().items():
if _is_submodule(name, cog.__module__):
self.remove_cog(cogname)
# remove all the commands from the module
for cmd in self.all_commands.copy().values():
if cmd.module is not None and _is_submodule(name, cmd.module):
if isinstance(cmd, GroupMixin):
cmd.recursively_remove_all_commands()
self.remove_command(cmd.name)
# remove all the listeners from the module
for event_list in self.extra_events.copy().values():
remove = []
for index, event in enumerate(event_list):
if event.__module__ is not None and _is_submodule(name, event.__module__):
remove.append(index)
for index in reversed(remove):
del event_list[index]
def _call_module_finalizers(self, lib, key):
try:
func = getattr(lib, 'teardown')
except AttributeError:
pass
else:
try:
func(self)
except Exception:
pass
finally:
self.__extensions.pop(key, None)
sys.modules.pop(key, None)
name = lib.__name__
for module in list(sys.modules.keys()):
if _is_submodule(name, module):
del sys.modules[module]
def _load_from_module_spec(self, spec, key):
# precondition: key not in self.__extensions
lib = importlib.util.module_from_spec(spec)
sys.modules[key] = lib
try:
spec.loader.exec_module(lib)
except Exception as e:
del sys.modules[key]
raise errors.ExtensionFailed(key, e) from e
try:
setup = getattr(lib, 'setup')
except AttributeError:
del sys.modules[key]
raise errors.NoEntryPointError(key)
try:
setup(self)
except Exception as e:
del sys.modules[key]
self._remove_module_references(lib.__name__)
self._call_module_finalizers(lib, key)
raise errors.ExtensionFailed(key, e) from e
else:
self.__extensions[key] = lib
def _resolve_name(self, name, package):
try:
return importlib.util.resolve_name(name, package)
except ImportError:
raise errors.ExtensionNotFound(name)
def load_extension(self, name, *, package=None):
"""Loads an extension.
An extension is a python module that contains commands, cogs, or
listeners.
An extension must have a global function, ``setup`` defined as
the entry point on what to do when the extension is loaded. This entry
point must have a single argument, the ``bot``.
Parameters
------------
name: :class:`str`
The extension name to load. It must be dot separated like
regular Python imports if accessing a sub-module. e.g.
``foo.test`` if you want to import ``foo/test.py``.
package: Optional[:class:`str`]
The package name to resolve relative imports with.
This is required when loading an extension using a relative path, e.g ``.foo.test``.
Defaults to ``None``.
.. versionadded:: 1.7
Raises
--------
ExtensionNotFound
The extension could not be imported.
This is also raised if the name of the extension could not
be resolved using the provided ``package`` parameter.
ExtensionAlreadyLoaded
The extension is already loaded.
NoEntryPointError
The extension does not have a setup function.
ExtensionFailed
The extension or its setup function had an execution error.
"""
name = self._resolve_name(name, package)
if name in self.__extensions:
raise errors.ExtensionAlreadyLoaded(name)
spec = importlib.util.find_spec(name)
if spec is None:
raise errors.ExtensionNotFound(name)
self._load_from_module_spec(spec, name)
def unload_extension(self, name, *, package=None):
"""Unloads an extension.
When the extension is unloaded, all commands, listeners, and cogs are
removed from the bot and the module is un-imported.
The extension can provide an optional global function, ``teardown``,
to do miscellaneous clean-up if necessary. This function takes a single
parameter, the ``bot``, similar to ``setup`` from
:meth:`~.Bot.load_extension`.
Parameters
------------
name: :class:`str`
The extension name to unload. It must be dot separated like
regular Python imports if accessing a sub-module. e.g.
``foo.test`` if you want to import ``foo/test.py``.
package: Optional[:class:`str`]
The package name to resolve relative imports with.
This is required when unloading an extension using a relative path, e.g ``.foo.test``.
Defaults to ``None``.
.. versionadded:: 1.7
Raises
-------
ExtensionNotFound
The name of the extension could not
be resolved using the provided ``package`` parameter.
ExtensionNotLoaded
The extension was not loaded.
"""
name = self._resolve_name(name, package)
lib = self.__extensions.get(name)
if lib is None:
raise errors.ExtensionNotLoaded(name)
self._remove_module_references(lib.__name__)
self._call_module_finalizers(lib, name)
def reload_extension(self, name, *, package=None):
"""Atomically reloads an extension.
This replaces the extension with the same extension, only refreshed. This is
equivalent to a :meth:`unload_extension` followed by a :meth:`load_extension`
except done in an atomic way. That is, if an operation fails mid-reload then
the bot will roll-back to the prior working state.
Parameters
------------
name: :class:`str`
The extension name to reload. It must be dot separated like
regular Python imports if accessing a sub-module. e.g.
``foo.test`` if you want to import ``foo/test.py``.
package: Optional[:class:`str`]
The package name to resolve relative imports with.
This is required when reloading an extension using a relative path, e.g ``.foo.test``.
Defaults to ``None``.
.. versionadded:: 1.7
Raises
-------
ExtensionNotLoaded
The extension was not loaded.
ExtensionNotFound
The extension could not be imported.
This is also raised if the name of the extension could not
be resolved using the provided ``package`` parameter.
NoEntryPointError
The extension does not have a setup function.
ExtensionFailed
The extension setup function had an execution error.
"""
name = self._resolve_name(name, package)
lib = self.__extensions.get(name)
if lib is None:
raise errors.ExtensionNotLoaded(name)
# get the previous module states from sys modules
modules = {
name: module
for name, module in sys.modules.items()
if _is_submodule(lib.__name__, name)
}
try:
# Unload and then load the module...
self._remove_module_references(lib.__name__)
self._call_module_finalizers(lib, name)
self.load_extension(name)
except Exception:
# if the load failed, the remnants should have been
# cleaned from the load_extension function call
# so let's load it from our old compiled library.
lib.setup(self)
self.__extensions[name] = lib
# revert sys.modules back to normal and raise back to caller
sys.modules.update(modules)
raise
@property
def extensions(self):
"""Mapping[:class:`str`, :class:`py:types.ModuleType`]: A read-only mapping of extension name to extension."""
return types.MappingProxyType(self.__extensions)
# help command stuff
@property
def help_command(self):
return self._help_command
@help_command.setter
def help_command(self, value):
if value is not None:
if not isinstance(value, HelpCommand):
raise TypeError('help_command must be a subclass of HelpCommand')
if self._help_command is not None:
self._help_command._remove_from_bot(self)
self._help_command = value
value._add_to_bot(self)
elif self._help_command is not None:
self._help_command._remove_from_bot(self)
self._help_command = None
else:
self._help_command = None
# command processing
async def get_prefix(self, message):
"""|coro|
Retrieves the prefix the bot is listening to
with the message as a context.
Parameters
-----------
message: :class:`discord.Message`
The message context to get the prefix of.
Returns
--------
Union[List[:class:`str`], :class:`str`]
A list of prefixes or a single prefix that the bot is
listening for.
"""
prefix = ret = self.command_prefix
if callable(prefix):
ret = await discord.utils.maybe_coroutine(prefix, self, message)
if not isinstance(ret, str):
try:
ret = list(ret)
except TypeError:
# It's possible that a generator raised this exception. Don't
# replace it with our own error if that's the case.
if isinstance(ret, collections.abc.Iterable):
raise
raise TypeError("command_prefix must be plain string, iterable of strings, or callable "
"returning either of these, not {}".format(ret.__class__.__name__))
if not ret:
raise ValueError("Iterable command_prefix must contain at least one prefix")
return ret
async def get_context(self, message, *, cls=Context):
r"""|coro|
Returns the invocation context from the message.
This is a more low-level counter-part for :meth:`.process_commands`
to allow users more fine grained control over the processing.
The returned context is not guaranteed to be a valid invocation
context, :attr:`.Context.valid` must be checked to make sure it is.
If the context is not valid then it is not a valid candidate to be
invoked under :meth:`~.Bot.invoke`.
Parameters
-----------
message: :class:`discord.Message`
The message to get the invocation context from.
cls
The factory class that will be used to create the context.
By default, this is :class:`.Context`. Should a custom
class be provided, it must be similar enough to :class:`.Context`\'s
interface.
Returns
--------
:class:`.Context`
The invocation context. The type of this can change via the
``cls`` parameter.
"""
view = StringView(message.content)
ctx = cls(prefix=None, view=view, bot=self, message=message)
if self._skip_check(message.author.id, self.user.id):
return ctx
prefix = await self.get_prefix(message)
invoked_prefix = prefix
if isinstance(prefix, str):
if not view.skip_string(prefix):
return ctx
else:
try:
# if the context class' __init__ consumes something from the view this
# will be wrong. That seems unreasonable though.
if message.content.startswith(tuple(prefix)):
invoked_prefix = discord.utils.find(view.skip_string, prefix)
else:
return ctx
except TypeError:
if not isinstance(prefix, list):
raise TypeError("get_prefix must return either a string or a list of string, "
"not {}".format(prefix.__class__.__name__))
# It's possible a bad command_prefix got us here.
for value in prefix:
if not isinstance(value, str):
raise TypeError("Iterable command_prefix or list returned from get_prefix must "
"contain only strings, not {}".format(value.__class__.__name__))
# Getting here shouldn't happen
raise
if self.strip_after_prefix:
view.skip_ws()
invoker = view.get_word()
ctx.invoked_with = invoker
ctx.prefix = invoked_prefix
ctx.command = self.all_commands.get(invoker)
return ctx
async def invoke(self, ctx):
"""|coro|
Invokes the command given under the invocation context and
handles all the internal event dispatch mechanisms.
Parameters
-----------
ctx: :class:`.Context`
The invocation context to invoke.
"""
if ctx.command is not None:
self.dispatch('command', ctx)
try:
if await self.can_run(ctx, call_once=True):
await ctx.command.invoke(ctx)
else:
raise errors.CheckFailure('The global check once functions failed.')
except errors.CommandError as exc:
await ctx.command.dispatch_error(ctx, exc)
else:
self.dispatch('command_completion', ctx)
elif ctx.invoked_with:
exc = errors.CommandNotFound('Command "{}" is not found'.format(ctx.invoked_with))
self.dispatch('command_error', ctx, exc)
async def process_commands(self, message):
"""|coro|
This function processes the commands that have been registered
to the bot and other groups. Without this coroutine, none of the
commands will be triggered.
By default, this coroutine is called inside the :func:`.on_message`
event. If you choose to override the :func:`.on_message` event, then
you should invoke this coroutine as well.
This is built using other low level tools, and is equivalent to a
call to :meth:`~.Bot.get_context` followed by a call to :meth:`~.Bot.invoke`.
This also checks if the message's author is a bot and doesn't
call :meth:`~.Bot.get_context` or :meth:`~.Bot.invoke` if so.
Parameters
-----------
message: :class:`discord.Message`
The message to process commands for.
"""
if message.author.bot:
return
ctx = await self.get_context(message)
await self.invoke(ctx)
async def on_message(self, message):
await self.process_commands(message)
class Bot(BotBase, discord.Client):
"""Represents a discord bot.
This class is a subclass of :class:`discord.Client` and as a result
anything that you can do with a :class:`discord.Client` you can do with
this bot.
This class also subclasses :class:`.GroupMixin` to provide the functionality
to manage commands.
Attributes
-----------
command_prefix
The command prefix is what the message content must contain initially
to have a command invoked. This prefix could either be a string to
indicate what the prefix should be, or a callable that takes in the bot
as its first parameter and :class:`discord.Message` as its second
parameter and returns the prefix. This is to facilitate "dynamic"
command prefixes. This callable can be either a regular function or
a coroutine.
An empty string as the prefix always matches, enabling prefix-less
command invocation. While this may be useful in DMs it should be avoided
in servers, as it's likely to cause performance issues and unintended
command invocations.
The command prefix could also be an iterable of strings indicating that
multiple checks for the prefix should be used and the first one to
match will be the invocation prefix. You can get this prefix via
:attr:`.Context.prefix`. To avoid confusion empty iterables are not
allowed.
.. note::
When passing multiple prefixes be careful to not pass a prefix
that matches a longer prefix occurring later in the sequence. For
example, if the command prefix is ``('!', '!?')`` the ``'!?'``
prefix will never be matched to any message as the previous one
matches messages starting with ``!?``. This is especially important
when passing an empty string, it should always be last as no prefix
after it will be matched.
case_insensitive: :class:`bool`
Whether the commands should be case insensitive. Defaults to ``False``. This
attribute does not carry over to groups. You must set it to every group if
you require group commands to be case insensitive as well.
description: :class:`str`
The content prefixed into the default help message.
self_bot: :class:`bool`
If ``True``, the bot will only listen to commands invoked by itself rather
than ignoring itself. If ``False`` (the default) then the bot will ignore
itself. This cannot be changed once initialised.
help_command: Optional[:class:`.HelpCommand`]
The help command implementation to use. This can be dynamically
set at runtime. To remove the help command pass ``None``. For more
information on implementing a help command, see :ref:`ext_commands_help_command`.
owner_id: Optional[:class:`int`]
The user ID that owns the bot. If this is not set and is then queried via
:meth:`.is_owner` then it is fetched automatically using
:meth:`~.Bot.application_info`.
owner_ids: Optional[Collection[:class:`int`]]
The user IDs that owns the bot. This is similar to :attr:`owner_id`.
If this is not set and the application is team based, then it is
fetched automatically using :meth:`~.Bot.application_info`.
For performance reasons it is recommended to use a :class:`set`
for the collection. You cannot set both ``owner_id`` and ``owner_ids``.
.. versionadded:: 1.3
strip_after_prefix: :class:`bool`
Whether to strip whitespace characters after encountering the command
prefix. This allows for ``! hello`` and ``!hello`` to both work if
the ``command_prefix`` is set to ``!``. Defaults to ``False``.
.. versionadded:: 1.7
"""
pass
class AutoShardedBot(BotBase, discord.AutoShardedClient):
"""This is similar to :class:`.Bot` except that it is inherited from
:class:`discord.AutoShardedClient` instead.
"""
pass | zidiscord.py | /zidiscord.py-1.7.3.3.tar.gz/zidiscord.py-1.7.3.3/discord/ext/commands/bot.py | bot.py |
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Last Modified: 9 May 2006 Jim Washington
"""
useful queues for wsgi middleware
"""
import tempfile
class TemporaryFileQueue(object):
def __init__(self):
self.file = tempfile.TemporaryFile()
self.readPointer = 0
self.writePointer = 0
def read(self,bytes=None):
self.file.flush()
self.file.seek(self.readPointer)
if bytes:
s = self.file.read(bytes)
else:
s = self.file.read()
self.readPointer = self.file.tell()
return s
def write(self,data):
self.file.seek(self.writePointer)
self.file.write(data)
self.writePointer = self.file.tell()
def __len__(self):
#this is the length of the unread queue
return self.writePointer - self.readPointer
def close(self):
self.file.close()
self.file = None
class StringQueue(object):
# This is Python Licensed
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/426060
def __init__(self, data=""):
self.l_buffer = []
self.s_buffer = ""
self.write(data)
def write(self, data):
#check type here, as wrong data type will cause error on self.read,
#which may be confusing.
if not isinstance(data,basestring):
raise TypeError, "argument 1 must be string, not %s" % \
type(data).__name__
#append data to list, no need to "".join just yet.
self.l_buffer.append(data)
def _build_str(self):
#build a new string out of list
new_string = "".join(self.l_buffer)
#join string buffer and new string
self.s_buffer = "".join((self.s_buffer, new_string))
#clear list
self.l_buffer = []
def __len__(self):
#calculate length without needing to _build_str
return sum(len(i) for i in self.l_buffer) + len(self.s_buffer)
def close(self):
self.__init__()
def read(self, count=None):
#if string doesnt have enough chars to satisfy caller, or caller is
#requesting all data
if count > len(self.s_buffer) or count==None: self._build_str()
#if i don't have enough bytes to satisfy caller, return nothing.
if count > len(self.s_buffer): return ""
#get data requested by caller
result = self.s_buffer[:count]
#remove requested data from string buffer
self.s_buffer = self.s_buffer[len(result):]
return result | zif.gzipper | /zif.gzipper-0.2.tar.gz/zif.gzipper-0.2/src/zif/gzipper/queues.py | queues.py |
===========
zif.gzipper
===========
This is a wsgi middleware application intended for use with paste.deploy,
zope.paste, and zope3.
It serves as a wsgi filter to gzip output from a zope3 application.
Dependencies
------------
for zope3, zif.gzipper requires Sidnei da Silva's zope.paste
zope.paste is available at http://svn.zope.org/zope.paste/trunk/
::
cd [path.to.zope3.src.directory]/zope
svn co http://svn.zope.org/zope.paste/trunk/ paste
zope.paste is also available at `the python cheese shop
<http://cheeseshop.python.org/pypi/zope.paste>`_.
Instructions for zope.paste are at
`http://awkly.org/2006/01/25/zopepaste-wsgi-applications-in-zope-3-using-pastedeploy/
<http://awkly.org/2006/01/25/zopepaste-wsgi-applications-in-zope-3-using-pastedeploy/>`_
zope.paste requires paste.deploy. paste.deploy may be obtained from `the cheese shop
<http://cheeseshop.python.org/pypi/PasteDeploy>`_. Presuming you have setuptools installed,
::
sudo easy_install.py PasteDeploy
This (zif.gzipper) package can be unzipped and installed anywhere on the Python path.
Setup
-----
Follow Sidnei's instructions for setting up zope.paste. It involves putting the
usual zope.paste-configure.zcml file in [zope3 instance]/etc/site-packages.
There is also a parameter to change in [zope3 instance]/etc/zope.conf.
The new twist is a paste.ini file in [zope3 instance]/etc
My paste.ini file looks like:
::
[pipeline:Paste.Main]
pipeline = gzipper jsmin main
[app:main]
paste.app_factory = zope.paste.application:zope_publisher_app_factory
[filter:gzipper]
paste.filter_factory=zif.gzipper.gzipper:filter_factory
compress_level=6
exclude=localimages
nocompress=jp gz zip png
tempfile=1048576
[filter:jsmin]
paste.filter_factory=zif.jsmin.jsmin:filter_factory
compress_level=safe
Configuration
-------------
gzipper should be the first filter in the pipeline. Other filters will
have a hard time reading compressed data output from this filter.
The paste.ini file above shows examples of the configuration options for gzipper.
- *compress_level* is the level of compression for the gzip function. 6 is the
default. 9 is max. 3 is often good enough. Higher numbers use more
processor, but compress smaller.
- *exclude* is a sequence of strings that appear in the a **filename or path**
you wish to exclude from gzipping. If any of these strings appears in the
path or filename, gzipper will not gzip the file.
- *nocompress* is a sequence of strings that appear in **content-types** you wish to
exclude from gzipping. If the string appears
anywhere in the content-type, items with that content-type will
not be gzipped. "jp" will exclude "image/jpg" and "image/jpeg".
"application" will exclude any content-type with the word "application" in
it.
- *tempfile* is the file size above which gzipper will send the gzipped data to
a tempfile on disk. This may help memory usage. It may not. *tempfile=0*
means do not use temporary file. Default is 1 megabyte (1048576).
| zif.gzipper | /zif.gzipper-0.2.tar.gz/zif.gzipper-0.2/src/zif/gzipper/README.txt | README.txt |
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Last Modified: 9 May 2006 Jim Washington
"""
WSGI middleware
Gzip-encodes the response.
"""
import time, struct, zlib, tempfile
from queues import TemporaryFileQueue, StringQueue
class GZStreamIter(object):
#the constant parameters here are guesses
def __init__(self, data, compressLevel=6, write=65536, read=65536,\
tempFileTrigger=1048576):
self.writeBufferSize = write
self.readBufferSize = read
self.tempFileTrigger = tempFileTrigger
self.inputIsNotIterator = False
#make sure data is an iterator
if isinstance(data,tuple) or isinstance(data,list):
data = iter(data)
self.inputIsNotIterator = True
elif isinstance(data,basestring):
data = iter([data])
self.inputIsNotIterator = True
self.data = data
self.queue = StringQueue()
self.usingTempFile = False
self.allReceived = False
#set-up gzipping
self.initFile()
self.crc = zlib.crc32('')
self.size = 0
self.compressor = zlib.compressobj(compressLevel,
zlib.DEFLATED, -zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, 0)
self.compress = self.compressor.compress
self.crc32 = zlib.crc32
#now, get data...
self.getData()
def __len__(self):
#this is the length of the gzipped object
return self.size
def getLength(self):
return self.size
def close(self):
self.queue.__init__()
def __iter__(self):
return self
def initFile(self):
self.queue.write('\037\213\010\000')
self.queue.write(struct.pack('<L', long(time.time())))
self.queue.write('\002\377')
def getData(self):
while len(self.queue) < self.readBufferSize and not self.allReceived:
self.getIter()
def getIter(self):
try:
s = self.data.next()
self.queue.write(self.compress(s))
self.size += len(s)
self.crc = self.crc32(s,self.crc)
if self.tempFileTrigger:
if self.size > self.tempFileTrigger and not self.usingTempFile:
tmp = TemporaryFileQueue()
tmp.write(self.queue.read(None))
self.queue.close()
self.queue = tmp
self.usingTempFile = True
# ValueError comes from spurious "I/O Operation on closed file"
# not sure this is the fix that is true and pure...
#except StopIteration:
except (StopIteration, ValueError):
self.endFile()
self.allReceived = True
if hasattr(self.data,"close"):
self.data.close()
self.data = None
def endFile(self):
self.queue.write(self.compressor.flush())
self.queue.write(struct.pack('<LL',
self.crc & 0xFFFFFFFFL, self.size & 0xFFFFFFFFL))
def next(self):
if self.usingTempFile and self.inputIsNotIterator:
# no point minimizing memory - get all the rest and release the
# incoming object
while not self.allReceived:
self.getIter()
queueLen = len(self.queue)
if queueLen == 0 and self.allReceived:
self.queue.close()
raise StopIteration
dataGetSize = min(queueLen,self.writeBufferSize)
s = self.queue.read(dataGetSize)
if s == '' and self.allReceived:
s = self.queue.read(None)
if not self.allReceived:
self.getData()
return s
length = property(getLength)
class middleware(object):
def __init__(self, application, compress_level=6,nocompress="",
tempfile="1048576",exclude=''):
self.application = application
self.compress_level = int(compress_level)
self.nocompress = nocompress.split()
self.tempFile = int(tempfile)
self.excludes = exclude.split()
def __call__(self, environ, start_response):
doNothing = False
if 'gzip' not in environ.get('HTTP_ACCEPT_ENCODING', ''):
#do nothing. return the app output.
doNothing = True
myGet = environ.get('PATH_INFO')
for filename in self.excludes:
if filename in myGet:
doNothing = True
if doNothing:
return self.application(environ, start_response)
response = GzipResponse(start_response, self.compress_level,
self.nocompress, tempFileTrigger=self.tempFile)
app_iter = self.application(environ,response.initial_decisions)
if response.doProcessing:
app_iter = response.finish_response(app_iter)
return app_iter
class GzipResponse(object):
def __init__(self, start_response, compress_level, nocompress=[],
tempFileTrigger=1048576):
self.start_response = start_response
self.doProcessing = False
self.compress_level = compress_level
self.nocompress = nocompress
self.tempFileTrigger = tempFileTrigger
def initial_decisions(self, status, headers, exc_info=None):
ct = None
ce = None
for name,value in headers:
name = name.lower()
if name == 'content-type':
ct = value
elif name == 'content-encoding':
ce = value
self.doProcessing = False
if ct:
self.doProcessing = True
for k in self.nocompress:
if k in ct:
self.doProcessing = False and self.doProcessing
if ce:
self.doProcessing = False
if self.doProcessing:
d = None
# add gzip header
headers.append(('content-encoding', 'gzip'))
# zap any given content-length;
# server will need to be recalculate or
# decide whether to transfer-encode.
headers = [(name,value) for name,value in headers
if name.lower()<>'content-length']
return self.start_response(status, headers, exc_info)
def finish_response(self,app_iter):
if app_iter:
try:
output = GZStreamIter(app_iter,self.compress_level,
tempFileTrigger= self.tempFileTrigger)
finally:
try:
app_iter.close()
except AttributeError:
pass
if hasattr(app_iter,'__len__') and len(app_iter)==1:
# special case; obviously, we want a 1-tuple output
# like the input
s = ''.join([x for x in output])
return (s,)
return output
else:
return app_iter
def filter_factory(global_conf, compress_level="6", nocompress='', tempfile='1048576',exclude=''):
def filter(application):
return middleware(application,compress_level,nocompress,tempfile,exclude)
return filter | zif.gzipper | /zif.gzipper-0.2.tar.gz/zif.gzipper-0.2/src/zif/gzipper/gzipper.py | gzipper.py |
****************
zif.headincludes
****************
See 'src/zif/headincludes/README.txt' for more information.
Releases
********
================
0.4 (2010/03/12)
================
Added test and buildout configuration.
Properly does requirements-of-requirements now.
Many thanks to an anonymous contributor.
================
0.3 (2007/05/25)
================
Release to include zcml files that were missing from distribution.
================
0.2 (2007/04/13)
================
Correct releases README.
================
0.1 (2007/04/13)
================
Initial release.
| zif.headincludes | /zif.headincludes-0.4.tar.gz/zif.headincludes-0.4/README.txt | README.txt |
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Last Modified: 9 May 2006 Jim Washington
"""
useful queues for wsgi middleware
"""
import tempfile
class TemporaryFileQueue(object):
def __init__(self):
self.file = tempfile.TemporaryFile()
self.readPointer = 0
self.writePointer = 0
def read(self,bytes=None):
self.file.flush()
self.file.seek(self.readPointer)
if bytes:
s = self.file.read(bytes)
else:
s = self.file.read()
self.readPointer = self.file.tell()
return s
def write(self,data):
self.file.seek(self.writePointer)
self.file.write(data)
self.writePointer = self.file.tell()
def __len__(self):
#this is the length of the unread queue
return self.writePointer - self.readPointer
def close(self):
self.file.close()
self.file = None
class StringQueue(object):
# This is Python Licensed
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/426060
def __init__(self, data=""):
self.l_buffer = []
self.s_buffer = ""
self.write(data)
def write(self, data):
#check type here, as wrong data type will cause error on self.read,
#which may be confusing.
if not isinstance(data,basestring):
raise TypeError, "argument 1 must be string, not %s" % \
type(data).__name__
#append data to list, no need to "".join just yet.
self.l_buffer.append(data)
def _build_str(self):
#build a new string out of list
new_string = "".join(self.l_buffer)
#join string buffer and new string
self.s_buffer = "".join((self.s_buffer, new_string))
#clear list
self.l_buffer = []
def __len__(self):
#calculate length without needing to _build_str
return sum(len(i) for i in self.l_buffer) + len(self.s_buffer)
def close(self):
self.__init__()
def read(self, count=None):
#if string doesnt have enough chars to satisfy caller, or caller is
#requesting all data
if count > len(self.s_buffer) or count==None: self._build_str()
#if i don't have enough bytes to satisfy caller, return nothing.
if count > len(self.s_buffer): return ""
#get data requested by caller
result = self.s_buffer[:count]
#remove requested data from string buffer
self.s_buffer = self.s_buffer[len(result):]
return result | zif.headincludes | /zif.headincludes-0.4.tar.gz/zif.headincludes-0.4/src/zif/headincludes/queues.py | queues.py |
from zif.headincludes.resourcelibrary import LibraryInfo, library_info
from zope.app import zapi
from zope.app.publisher.browser import directoryresource
from zope.app.publisher.browser.metadirectives import IBasicResourceInformation
from zope.app.publisher.browser.resourcemeta import allowed_names
from zope.configuration.exceptions import ConfigurationError
from zope.interface import Interface
from zope.publisher.interfaces.browser import IBrowserRequest
from zope.publisher.interfaces.browser import IDefaultBrowserLayer
from zope.security.checker import CheckerPublic, NamesChecker
import os.path
import zope.configuration.fields
#Unchanged from zc.resourcelibrary
class IResourceLibraryDirective(IBasicResourceInformation):
"""
Defines a resource library
"""
name = zope.schema.TextLine(
title=u"The name of the resource library",
description=u"""\
This is the name used to disambiguate resource libraries. No two
libraries can be active with the same name.""",
required=True,
)
require = zope.configuration.fields.Tokens(
title=u"Require",
description=u"The resource libraries on which this library depends.",
required=False,
value_type=zope.schema.Text(),
)
class IDirectoryDirective(Interface):
"""
Identifies a directory to be included in a resource library
"""
source = zope.configuration.fields.Path(
title=u"Source",
description=u"The directory containing the files to add.",
required=True,
)
include = zope.configuration.fields.Tokens(
title=u"Include",
description=u"The files which should be included in HTML pages which "
u"reference this resource library.",
required=False,
value_type=zope.schema.Text(),
)
#leaving the old code for the moment
#def handler(name, dependencies, *provideAdapterArgs):
def handler(name, dependencies, required,provided,adapter_name,factory,info=''):
if dependencies:
for dep in dependencies:
if dep not in library_info:
raise ConfigurationError(
'Resource library "%s" has unsatisfied dependency on "%s".'
% (name, dep))
zapi.getGlobalSiteManager().registerAdapter(
factory, required, provided, adapter_name, info)
#zapi.getGlobalSiteManager().provideAdapter(*provideAdapterArgs)
INCLUDABLE_EXTENTIONS = ('.js', '.css')
class ResourceLibrary(object):
def __init__(self, _context, name, require=(),
layer=IDefaultBrowserLayer, permission='zope.Public'):
self.name = name
self.layer = layer
if permission == 'zope.Public':
permission = CheckerPublic
self.checker = NamesChecker(allowed_names, permission)
# make note of the library in a global registry
library_info[name] = LibraryInfo()
library_info[name].required.extend(require)
def directory(self, _context, source, include=()):
if not os.path.isdir(source):
raise ConfigurationError("Directory %r does not exist" % source)
for file_name in include:
ext = os.path.splitext(file_name)[1]
if ext not in INCLUDABLE_EXTENTIONS:
raise ConfigurationError(
'Resource library doesn\'t know how to include this '
'file: "%s".' % file_name)
# remember which files should be included in the HTML when this library
# is referenced
library_info[self.name].included.extend(include)
factory = directoryresource.DirectoryResourceFactory(
source, self.checker, self.name)
_context.action(
discriminator = ('resource', self.name, IBrowserRequest, self.layer),
callable = handler,
args = (self.name, library_info[self.name].required, (self.layer,),
Interface, self.name, factory, _context.info),
) | zif.headincludes | /zif.headincludes-0.4.tar.gz/zif.headincludes-0.4/src/zif/headincludes/zcml.py | zcml.py |
================
zif.headincludes
================
This is a wsgi middleware application intended for use with
paste.deploy, zope.paste, and zope3.
It serves as a wsgi filter to create on-the-fly < script> and
<style> tags inside the <head> of HTML documents. It was designed
for output from a Zope3 application, but the wsgi filter itself, in
headincluder.py, has no Zope3 dependencies.
The idea is that subobjects of a document may separately need special
resources, but it is difficult to know whether a resource is asked for
multiple times when documents are dynamically generated. For Zope3,
headincludes replaces the functionality of zc.resourcelibrary, which
also implements this idea. In fact, mostly because headincludes
"borrows" code from zc.resourcelibrary, they may be installed
side-by-side, but only one can be used at a time, because they both
implement <zope:resourceLibrary> tags for zcml and the
<tal:resource_library> statement for PageTemplates.
headincludes works by creating a key in the wsgi environment,
'wsgi.html.head.includes' that is a list of urls that need to be
referenced in the head of the HTML document for the current request.
The application registers a need for the reference by appending the url
for the reference to the list. Urls that end in ".css" and ".js" in
that list trigger the middleware to insert <style> or <script>
tags into the <head> of the document after the application is done
creating the HTML.
headincludes tries to maintain as much compatibility as possible with
zc.resourcelibrary. The need() function has been rewritten, and using
the headincludes version will be a simple matter of changing the import
statement. The <tal:resource_library> statement is also still
functional.
One new thing headincludes allows is arbitrary includes without needing
to register the resource. urls can be placed in the includes list at
any time that request.environ can be accessed. Just append any desired
url to the list, e.g.,
::
try:
request.environ['wsgi.html.head.includes'].append('/scripts/my_url.js')
except KeyError:
(handle case when the filter is not available)
Alternatively, headincludes has a utility that provides IHeadIncludeRegistration:
::
from zope.component import getUtility
from zif.headincludes.interfaces import IHeadIncludeRegistration
registrar = getUtility(IHeadIncludeRegistration)
if registrar:
registrar.register('scripts/my_url.js')
Dependencies
------------
For zope3, headincludes requires Sidnei da Silva's zope.paste
zope.paste is available at http://svn.zope.org/zope.paste/trunk/
::
cd [path.to.zope3.src.directory]/zope
svn co http://svn.zope.org/zope.paste/trunk/ paste
Instructions for zope.paste are at http://awkly.org/
zope.paste requires paste.deploy. paste.deploy can be obtained from the cheese
shop. Presuming you have setuptools installed,
::
sudo easy_install.py PasteDeploy
This (headincludes) package can be unzipped and installed anywhere on the Python
path.
Setup
-----
Follow Sidnei's instructions for setting up zope.paste. It involves
putting the usual zope.paste-configure.zcml file in [zope3 instance]/etc/site-packages.
There is also a parameter to change in [zope3 instance]/etc/zope.conf.
The new twist is a paste.ini file in [zope3 instance]/etc
For Zope3, copy the headincludes-configure.zcml and
headincludes-meta.zcml files into [zope3 instance]/etc/package-includes
directory.
An example paste.ini file looks like:
::
[pipeline:Paste.Main]
pipeline = gzipper headincludes main
[app:main]
paste.app_factory = zope.paste.application:zope_publisher_app_factory
[filter:gzipper]
paste.filter_factory=gzipper.gzipper:filter_factory
compress_level=6
nocompress=jp gz zip
tempfile=0
[filter:headincludes]
paste.filter_factory=zif.headincludes.headincluder:filter_factory
location=top
Configuration
-------------
The paste.ini file above shows an example of the configuration option for
headincludes
- **location** is where in the <head> you want the new tags. "top" is the
default, and places the new script and/or style tags just after the <head>
element. Any other value will place the tags just before the </head> tag.
| zif.headincludes | /zif.headincludes-0.4.tar.gz/zif.headincludes-0.4/src/zif/headincludes/README.txt | README.txt |
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Last Modified: 16 Dec 2006 Jim Washington
# made stylesheet include a link tag instead of a style tag jmw 20061216
""" wsgi middleware for inserting script and style tags into the <head>
of an html file. It looks at environ['wsgi.html.head.includes'], which
is a list of the urls to be referenced. If the url ends in '.js', a script
tag is inserted. If it ends in ".css", a style tag is inserted.
this filter takes care of creating the 'wsgi.html.head.includes' key; the
application just needs to insert relative or absolute urls for the files that
need to be referenced. This filter will remove duplicates if the app does
not want to check before adding urls to the list.
urls can be placed in the list at any time that request.environ can be
accessed. Just append any desired url to the list, e.g.,
try:
request.environ['wsgi.html.head.includes'].append('/scripts/my_url.js')
except KeyError:
(handle case when the filter is not available)
parameters:
location - where in the head element to place the includes. 'top' is the
default. Anything else will place it at the bottom.
"""
from queues import StringQueue
class HeadIncludeIter(object):
def __init__(self,result,environ,tag,write=65536,read=65536):
self.readBufferSize = read
self.writeBufferSize = write
self.environ = environ
self.tag = tag
self.queue = StringQueue()
self.madeUpdate = False
if isinstance(result,basestring):
result = (result,)
self.data = iter(result)
self.allReceived = False
self.getData()
def __iter__(self):
return self
def getData(self):
while len(self.queue) < self.readBufferSize and not self.allReceived:
self.getIter()
def getIter(self):
try:
s = self.data.next()
if (self.tag in s) or (self.tag.upper() in s):
s = self.makeInsertion(s)
self.queue.write(s)
except StopIteration:
self.allReceived = True
if hasattr(self.data,"close"):
self.data.close()
self.data = None
def makeInsertion(self,data):
includes = self.environ.get('wsgi.html.head.includes','')
if self.tag.upper() in data:
#OK, we will go with upper-case.
self.tag = self.tag.upper()
if includes:
s = ['<!--start headincludes-->']
for incfile in includes:
if isinstance(incfile,unicode):
# don't want the file to end up unicode. Bleah!
incfile = incfile.encode('ascii')
if incfile.endswith('.js'):
s.append(
'<script type="text/javascript" src="%s"></script>' % incfile)
elif incfile.endswith('.css'):
s.append(
# '<style type="text/css" src=>@import url(%s)</style>' % incfile)
'<link rel="stylesheet" type="text/css" href="%s" />' % incfile)
s.append('<!--end headincludes-->')
if not "/" in self.tag:
s.insert(0,self.tag)
else:
s.append(self.tag)
updated = data.replace(self.tag,"\n".join(s))
else:
updated = data
return updated
def next(self):
queueLen = len(self.queue)
if queueLen == 0 and self.allReceived:
self.queue.close()
raise StopIteration
dataGetSize = min(queueLen,self.writeBufferSize)
s = self.queue.read(dataGetSize)
if s == '' and self.allReceived:
s = self.queue.read(None)
if not self.allReceived:
self.getData()
return s
class middleware(object):
def __init__(self, application,location="top"):
self.application = application
self.location = location
if location == 'top':
self.tag = '<head>'
else:
self.tag = '</head>'
def __call__(self, environ, start_response):
environ['wsgi.html.head.includes'] = []
response = HeadChangeResponse(start_response,self.location)
app_iter = self.application(environ,response.initial_decisions)
if response.doProcessing and len(environ['wsgi.html.head.includes'])>0:
app_iter = response.finish_response(app_iter,environ,self.tag)
return app_iter
class HeadChangeResponse(object):
def __init__(self,start_response,location):
self.start_response = start_response
self.location = location
self.doProcessing = False
def initial_decisions(self,status,headers,exc_info=None):
for name,value in headers:
if name.lower() == 'content-type' and \
(value.startswith('text/html') or \
value.startswith('application/xhtml+xml')):
self.doProcessing = True
break
if self.doProcessing:
headers = [(name,value) for name,value in headers
if name.lower()<>'content-length']
return self.start_response(status,headers,exc_info)
def finish_response(self,app_iter,environ,tag):
if app_iter:
try:
output = HeadIncludeIter(app_iter,environ,tag)
finally:
try:
app_iter.close()
except AttributeError:
pass
if len(app_iter) == 1:
# input was a 1-tuple, so we will return one
s = ''.join([x for x in output])
return (s,)
return output
else:
return app_iter
def filter_factory(global_conf,location="top"):
def filter(application):
return middleware(application,location)
return filter | zif.headincludes | /zif.headincludes-0.4.tar.gz/zif.headincludes-0.4/src/zif/headincludes/headincluder.py | headincluder.py |
# Last Modified: 10 May 2006 Jim Washington
"""
WSGI middleware
does js and css minimization.
"""
import css
import javascript
class middleware(object):
def __init__(self, application, compress_level='full',
compress_types="css js",exclude=''):
#compress level can be 'safe' or 'full'
self.application = application
self.compress_level = compress_level
self.compress_types = compress_types.split()
self.excludes = exclude.split()
def __call__(self, environ, start_response):
response = MinResponse(start_response, self.compress_level,
self.compress_types)
app_iter = self.application(environ,response.initial_decisions)
myGet = environ.get('PATH_INFO')
for filename in self.excludes:
if filename in myGet:
response.doProcessing = False
if response.doProcessing:
app_iter = response.finish_response(app_iter)
return app_iter
class MinResponse(object):
def __init__(self, start_response, compress_level, compress_types):
self.start_response = start_response
self.compress_level = compress_level
self.compress_types = compress_types
self.doProcessing = False
self.compress_type = None
def initial_decisions(self, status, headers, exc_info=None):
ct=None
ce=None
for name,value in headers:
name = name.lower()
if name == 'content-type':
ct = value
elif name == 'content-encoding':
ce = value
self.doProcessing = False
if ct and (('javascript' in ct) or ('ecmascript' in ct) or ('css' in ct)):
self.doProcessing = True
if 'css' in ct:
self.compress_type = 'css'
else:
self.compress_type = 'js'
if ce:
#don't mess with anything compressed.
self.doProcessing = False
#just to be sure that this is really wanted
if self.compress_type not in self.compress_types:
self.doProcessing = False
if self.doProcessing:
headers = [(name,value)for name,value
in headers if name.lower()<>'content-length']
return self.start_response(status, headers, exc_info)
def finish_response(self,app_iter):
#the compressor expects a big string, so we make a big string and
#js- or css- compress it
theString = ''.join([x for x in app_iter])
if self.doProcessing:
if self.compress_type == 'js':
compress = javascript.compress
else:
compress = css.compress
output = compress(theString, self.compress_level)
if hasattr(app_iter,'close'):
app_iter.close()
return (output,)
def filter_factory(global_conf, compress_level='safe', compress_types="js css", exclude=''):
def filter(application):
return middleware(application, compress_level,compress_types,exclude)
return filter | zif.jsmin | /zif.jsmin-0.2.tar.gz/zif.jsmin-0.2/src/zif/jsmin/jsmin.py | jsmin.py |
import re
class KeywordMapper:
def __init__(self, regexp, encoder):
if isinstance(regexp, (str, unicode)):
self.regexp = re.compile(regexp)
else:
self.regexp = regexp
self.encoder = encoder
self.mapping = {}
def analyseKeywords(self, input):
matches = self.regexp.findall(input)
protected = {}
keyword_count = {}
index = 0
for match in matches:
if match not in keyword_count:
keyword_count[match] = 0
protected[self.encoder(index)] = index
index = index + 1
keyword_count[match] = keyword_count[match] + 1
for match in matches:
if match in protected and keyword_count[match]:
keyword_count[match] = 0
protected = {}
for match in keyword_count:
if not keyword_count[match]:
protected[match] = None
## sorted_matches = [(c,len(v),v) for v,c in keyword_count.iteritems()]
# the above line implements the original behaviour, the code below
# removes keywords which have not enough weight to be encoded, in total
# this saves some bytes, because the total length of the generated
# codes is a bit smaller. This needs corresponding code in the
# fast_decode javascript function of the decoder, see comment there
sorted_matches = []
for value, count in keyword_count.iteritems():
weight = count * len(value)
if len(value) >= weight:
keyword_count[value] = 0
sorted_matches.append((0, value))
else:
sorted_matches.append((weight, value))
sorted_matches.sort()
sorted_matches.reverse()
sorted_matches = [x[-1] for x in sorted_matches]
index = 0
mapping = {}
for match in sorted_matches:
if not keyword_count[match]:
if match not in protected:
mapping[match] = (-1, match)
continue
while 1:
encoded = self.encoder(index)
index = index + 1
if encoded in protected:
mapping[encoded] = (index-1, encoded)
continue
else:
break
mapping[match] = (index-1, encoded)
return mapping
def analyse(self, input):
self.mapping = self.analyseKeywords(input)
def getKeywords(self):
sorted = zip(self.mapping.itervalues(), self.mapping.iterkeys())
sorted.sort()
keywords = []
for (index, encoded), value in sorted:
if index >= 0:
if encoded != value:
keywords.append(value)
else:
keywords.append('')
return keywords
def sub(self, input):
def repl(m):
return self.mapping.get(m.group(0), ('', m.group(0)))[1]
return self.regexp.sub(repl, input)
class JavascriptKeywordMapper(KeywordMapper):
def __init__(self, regexp=None, encoder=None):
if regexp is None:
self.regexp = re.compile(r'\w+')
elif isinstance(regexp, (str, unicode)):
self.regexp = re.compile(regexp)
else:
self.regexp = regexp
if encoder is None:
self.encoder = self._encode
else:
self.encoder = encoder
self.mapping = {}
def _encode(self, charCode,
mapping="0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"):
result = []
quotient = charCode
while quotient or not len(result):
quotient, remainder = divmod(quotient, 62)
result.append(mapping[remainder])
result.reverse()
return "".join(result)
def getDecodeFunction(self, fast=True, name=None):
jspacker = JavascriptPacker('full')
# fast boot function
fast_decoder = r"""
// does the browser support String.replace where the
// replacement value is a function?
if (!''.replace(/^/, String)) {
// decode all the values we need
// we have to add the dollar prefix, because $encoded can be
// any keyword in the decode function below. For example
// 'constructor' is an attribute of any object and it would
// return a false positive match in that case.
while ($count--) $decode["$"+$encode($count)] = $keywords[$count] || $encode($count);
// global replacement function
$keywords = [function($encoded){$result = $decode["$"+$encoded]; return $result!=undefined?$result:$encoded}];
// generic match
$encode = function(){return'\\w+'};
// reset the loop counter - we are now doing a global replace
$count = 1;
};"""
if name is None:
# boot function
decoder = r"""
function($packed, $ascii, $count, $keywords, $encode, $decode) {
$encode = function($charCode) {
return ($charCode < $ascii ? "" : $encode(parseInt($charCode / $ascii))) +
(($charCode = $charCode % $ascii) > 35 ? String.fromCharCode($charCode + 29) : $charCode.toString(36));
};
// fastDecodePlaceholder
while ($count--)
if ($keywords[$count])
$packed = $packed.replace(new RegExp("\\b" + $encode($count) + "\\b", "g"), $keywords[$count]);
return $packed;
}"""
if fast:
decoder = decoder.replace('// fastDecodePlaceholder', fast_decoder)
decoder = jspacker.pack(decoder)
else:
decoder = r"""
var %s = function($ascii, $count, $keywords, $encode, $decode) {
$encode = function($charCode) {
return ($charCode < $ascii ? "" : $encode(parseInt($charCode / $ascii))) +
(($charCode = $charCode %% $ascii) > 35 ? String.fromCharCode($charCode + 29) : $charCode.toString(36));
};
// fastDecodePlaceholder
var decoder = function($packed, $ascii1, $count1, $keywords1, $encode1, $decode1) {
$count1 = $count;
while ($count1--)
if ($keywords[$count1])
$packed = $packed.replace(new RegExp("\\b" + $encode($count1) + "\\b", "g"), $keywords[$count1]);
return $packed;
};
return decoder;
}""" % name
if fast:
decoder = decoder.replace('// fastDecodePlaceholder', fast_decoder)
decoder = jspacker.pack(decoder)
keywords = self.getKeywords()
decoder = "%s(62, %i, '%s'.split('|'), 0, {});" % (decoder, len(keywords), "|".join(keywords))
return decoder
def getDecoder(self, input, keyword_var=None, decode_func=None):
if keyword_var is None:
keywords = self.getKeywords()
num_keywords = len(keywords)
keywords = "|".join(keywords)
keywords = "'%s'.split('|')" % keywords
else:
keywords = keyword_var
num_keywords = len(self.getKeywords())
if decode_func is None:
decode_func = self.getDecodeFunction()
escaped_single = input.replace("\\","\\\\").replace("'","\\'").replace('\n','\\n')
escaped_double = input.replace("\\","\\\\").replace('"','\\"').replace('\n','\\n')
if len(escaped_single) < len(escaped_double):
script = "'%s'" % escaped_single
else:
script = '"%s"' % escaped_double
return "eval(%s(%s,62,%i,%s,0,{}))" % (decode_func, script,
num_keywords,
keywords)
class Packer:
def __init__(self):
self.patterns = []
def copy(self):
result = Packer()
result.patterns = self.patterns[:]
return result
def _repl(self, match):
# store protected part
self.replacelist.append(match.group(1))
# return escaped index
return "\x00%i" % len(self.replacelist)
def pack(self, input):
# list of protected parts
self.replacelist = []
# escape the escapechar
output = input.replace('\x00','\x00\x00')
for regexp, replacement, keyword_encoder in self.patterns:
if replacement is None:
if keyword_encoder is None:
# protect the matched parts
output = regexp.sub(self._repl, output)
else:
mapper = KeywordMapper(regexp=regexp,
encoder=keyword_encoder)
# get keywords
mapper.analyse(output)
# replace keywords
output = mapper.sub(output)
else:
# substitute
output = regexp.sub(replacement, output)
# restore protected parts
replacelist = list(enumerate(self.replacelist))
replacelist.reverse() # from back to front, so 1 doesn't break 10 etc.
for index, replacement in replacelist:
# we use lambda in here, so the real string is used and no escaping
# is done on it
before = len(output)
regexp = re.compile('(?<!\x00)\x00%i' % (index+1))
output = regexp.sub(lambda m:replacement, output)
# unescape
output = output.replace('\x00\x00','\x00')
# done
return output
def protect(self, pattern, flags=None):
self.keywordSub(pattern, None, flags)
def sub(self, pattern, replacement, flags=None):
if flags is None:
self.patterns.append((re.compile(pattern), replacement, None))
else:
self.patterns.append((re.compile(pattern, flags), replacement, None))
def keywordSub(self, pattern, keyword_encoder, flags=None):
if flags is None:
self.patterns.append((re.compile(pattern), None, keyword_encoder))
else:
self.patterns.append((re.compile(pattern, flags), None, keyword_encoder))
class JavascriptPacker(Packer):
def __init__(self, level='safe'):
Packer.__init__(self)
if level == 'full':
# encode local variables. those are preceeded by dollar signs
# the amount of dollar signs says how many characters are preserved
# any trailing digits are preserved as well
# $name -> n, $$name -> na, $top1 -> t1, $top2 -> t2
def _dollar_replacement(match):
length = len(match.group(2))
start = length - max(length - len(match.group(3)), 0)
result = match.group(1)[start:start+length] + match.group(4)
return result
self.sub(r"""((\$+)([a-zA-Z\$_]+))(\d*)\b""", _dollar_replacement)
self.keywordSub(r"""\b_[A-Za-z\d]\w*""", lambda i: "_%i" % i)
# protect strings
# this is more correct, but needs more testing
# it has to be more accurate because of the more aggresive packing later
self.protect(r"""(?<=return|..case|.....[=\[|(,?:+])\s*((?P<quote>['"])(?:\\(?P=quote)|\\\n|.)*?(?P=quote))""", re.DOTALL)
else:
# protect strings
# these sometimes catch to much, but in safe mode this doesn't hurt
self.protect(r"""('(?:\\'|\\\n|.)*?')""")
self.protect(r'''("(?:\\"|\\\n|.)*?")''')
# protect regular expressions
self.protect(r"""\s+(\/[^\/\n\r\*][^\/\n\r]*\/g?i?)""")
self.protect(r"""([^\w\$\/'"*)\?:]\/[^\/\n\r\*][^\/\n\r]*\/g?i?)""")
# multiline comments
self.sub(r'/\*.*?\*/', '', re.DOTALL)
# one line comments
self.sub(r'\s*//.*$', '', re.MULTILINE)
# strip whitespace at the beginning and end of each line
self.sub(r'^[ \t\r\f\v]*(.*?)[ \t\r\f\v]*$', r'\1', re.MULTILINE)
# whitespace after some special chars but not
# before function declaration
self.sub(r'([{;\[(,=&|\?:<>%!/])\s+(?!function)', r'\1')
# after an equal sign a function definition is ok
self.sub(r'=\s+(?=function)', r'=')
if level == 'full':
# whitespace after some more special chars
self.sub(r'([};\):,])\s+', r'\1')
# whitespace before some special chars
self.sub(r'\s+([={},&|\?:\.()<>%!/\]])', r'\1')
# whitespace before plus chars if no other plus char before it
self.sub(r'(?<!\+)\s+\+', '+')
# whitespace after plus chars if no other plus char after it
self.sub(r'\+\s+(?!\+)', '+')
# whitespace before minus chars if no other minus char before it
self.sub(r'(?<!-)\s+-', '-')
# whitespace after minus chars if no other minus char after it
self.sub(r'-\s+(?!-)', '-')
# remove redundant semi-colons
self.sub(r';+\s*([};])', r'\1')
# remove any excessive whitespace left except newlines
self.sub(r'[ \t\r\f\v]+', ' ')
# excessive newlines
self.sub(r'\n+', '\n')
# first newline
self.sub(r'^\n', '')
class CSSPacker(Packer):
def __init__(self, level='safe'):
Packer.__init__(self)
# protect strings
# these sometimes catch to much, but in safe mode this doesn't hurt
self.protect(r"""('(?:\\'|\\\n|.)*?')""")
self.protect(r'''("(?:\\"|\\\n|.)*?")''')
# strip whitespace
self.sub(r'^[ \t\r\f\v]*(.*?)[ \t\r\f\v]*$', r'\1', re.MULTILINE)
# remove comment contents
self.sub(r'/\*.*?( ?[\\/*]*\*/)', r'/*\1', re.DOTALL)
# remove lines with comments only (consisting of stars only)
self.sub(r'^/\*+\*/$', '', re.MULTILINE)
# excessive newlines
self.sub(r'\n+', '\n')
# first newline
self.sub(r'^\n', '')
if level == 'full':
#remove more whitespace
self.sub(r'([{,;])\s+', r'\1')
## jspacker = JavascriptPacker('safe')
## jspacker_full = JavascriptPacker('full')
## def run():
## script = open('cssQuery.js').read()
## script = jspacker_full.pack(script)
## open('output.js','w').write(script)
## mapper = JavascriptKeywordMapper()
## mapper.analyse(script)
## keywords = mapper.getKeywords()
## script = mapper.sub(script)
## f = open('output1.js','w')
## #f.write("keywords='%s'.split('|');\n" % "|".join(keywords))
## #f.write(mapper.getDecodeFunction(name='__dEcOdE'))
## f.write(mapper.getDecoder(script))
## for index, keyword in enumerate(keywords):
## encoded = mapper._encode(index)
## if keyword == '':
## replacement = encoded
## else:
## replacement = keyword
## regexp = re.compile(r'\b%s\b' % encoded)
## script = regexp.sub(lambda m: replacement, script)
## open('output2.js','w').write(script)
## if __name__=='__main__':
## run() | zif.jsmin | /zif.jsmin-0.2.tar.gz/zif.jsmin-0.2/src/zif/jsmin/thirdparty/packer.py | packer.py |
****************
zif.jsonserver
****************
See 'src/zif/jsonserver/README.txt' for more information.
Releases
********
================
0.6 (2007/05/25)
================
Updated release notes.
================
0.5 (2007/05/25)
================
Corrections to setup.
================
0.4 (2007/05/25)
================
Incorporated jsolait into release.
================
0.3 (2007/05/24)
================
Repackaging to ensure zcml files are included for zope 3.
================
0.2 (2007/04/18)
================
Corrected setup configuration file to include zif namespace.
================
0.1 (2007/04/13)
================
Initial release.
| zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/README.txt | README.txt |
#2005-06-26 removed JSONRPCNotifyResponse and got JSONRPCResponse to handle
# 'notify' events
#2005-06-26 let the publisher handle encoding
#2005-06-27 set outgoing content-type the same as incoming content-type.
#2005-09-08 updated to work with the new IResult idea (wsgi)
#2005-10-09 unicode handling update
#2006-03-09 enabled gzip compression for large responses
#2006-05-10 removed gzip compression and (prematurely) enabled json-rpc 1.1 jmw
#2006-06-19 updated with ctheune's xmlrpc solution for removing proxies jmw
#2006-09-27 added JSONView class
#2006-12-16 JSONView now uses render() instead of doResponse() jmw
__docformat__ = 'restructuredtext'
from zope.app.publication.http import BaseHTTPPublication
from interfaces import IMethodPublisher, IJSONRPCView, IJSONRPCPublisher,\
IJSONRPCRequest, IJSONReader, IJSONWriter, IJSONRPCPremarshaller, \
IJSONView
from zope.interface import implements
#from zope.publisher.http import IResult
from zope.location.location import Location
from zope.publisher.http import HTTPRequest, HTTPResponse, \
getCharsetUsingRequest, DirectResult
from zope.publisher.browser import BrowserRequest
from zope.security.proxy import isinstance
from zope.publisher.interfaces.browser import IBrowserRequest
from zope.publisher.interfaces.browser import IBrowserApplicationRequest
from zope.component import getUtility
from zope.publisher.browser import BrowserPage
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import traceback
import logging
DEBUG = logging.DEBUG
logger = logging.getLogger()
keyword_key = "pythonKwMaRkEr"
json_charsets = ('utf-8','utf-16', 'utf-32')
#writeProfileData transcribes reads and writes files in the zope3
# instance directory for profiling use.
# profiledata.py has response dicts that can be read as python
# profiledata.js has request javascript objects in javascript
writeProfileData = False
compressionTrigger = 1024
compressionLevel = 6
JSONRPCPublication = BaseHTTPPublication
class JSONRPCPublicationFactory(object):
"""a JSON-RPC Publication handler
modeled after zope.app.publication.xmlrpc.XMLRPCPublicationFactory
"""
def __init__(self,db):
self.__pub = JSONRPCPublication(db)
def __call__(self):
return self.__pub
def intsort(item):
return int(item[0])
class JSONRPCRequest(BrowserRequest):
"""a JSON-RPC Request
modeled after zope.publisher.xmlrpc.XMLRPCRequest
REQUEST from JSON-RPC client
should have
method
params
id
"""
jsonID = 'dummy'
form = {}
#IBrowserRequest is necessary because sometimes complete views may be
#transported that may need to look-up e.g., icons.
implements(IJSONRPCRequest,IBrowserRequest, IBrowserApplicationRequest)
_args = ()
def _createResponse(self):
"""return a response"""
return JSONRPCResponse()
def processInputs(self):
"""take the converted request and make useful args of it"""
json = getUtility(IJSONReader)
stream = self._body_instream
input = []
incoming = stream.read(1000)
while incoming:
input.append(incoming)
incoming = stream.read(1000)
input = ''.join(input)
#make it unicode; we are at the boundary
input = self._decode(input)
data = json.read(input)
if writeProfileData:
infile = 'profiledata.js'
t = open(infile,'a')
t.write('%s\n' % input)
t.close()
logger.log(DEBUG, "processing inputs (%s)" % data)
#print "processing inputs (%s)" % data
functionstr = data['method']
# failure unless we split on '.' Why?
function = functionstr.split('.')
self.jsonID = data['id']
params = data['params']
if isinstance(params,list):
args = params
# now, look for keyword parameters, the old way
kwargs = None
notPositional = []
for k in args:
if isinstance(k,dict):
if k.has_key(keyword_key):
if isinstance(k[keyword_key],dict):
j = k[keyword_key]
kwargs= j
notPositional.append(k)
if notPositional:
for k in notPositional:
args.remove(k)
if kwargs:
for m in kwargs.keys():
self.form[str(m)] = kwargs[m]
elif isinstance(params,dict):
#json-rpc 1.1 (to be proposed)
#get the numeric params for positional params
temp_positional = []
for key in params:
if key.isdigit():
temp_positional.append((key,params[key]))
temp_positional.sort(key=intsort)
args = []
#make args from positional args and remove them from params
for item in temp_positional:
args.append(item[1])
del params[item[0]]
#drop remaining named params into request.form
for named_param in params:
#named_param is unicode; python needs string for param names
self.form[str(named_param)] = params[named_param]
else:
raise TypeError, 'Unsupported type for JSON-RPC "params" (%s)' \
% type(params)
self._args = tuple(args)
#make environment,cookies, etc., available to request.get()
super(JSONRPCRequest,self).processInputs()
self._environ['JSONRPC_MODE'] = True
if function:
self.setPathSuffix(function)
def traverse(self,object):
return super(BrowserRequest,self).traverse(object)
def __getitem__(self,key):
return self.get(key)
class JSONRPCResponse(HTTPResponse):
"""JSON-RPC Response
modeled after zope.publisher.xmlrpc.XMLRPCResponse
"""
#def getBase(self):
# return True
def setResult(self,result):
"""return
{
'id' : matches id in request
'result' : the result or null if error
'error' : the error or null if result
}
"""
id = self._request.jsonID
if id is not None:
result = premarshal(result)
wrapper = {'id':id}
wrapper['result'] = result
wrapper['error'] = None
if writeProfileData:
outfile = 'profiledata.py'
t = open(outfile,'a')
t.write('%s\n' % wrapper)
t.close()
json = getUtility(IJSONWriter)
encoding = getCharsetUsingRequest(self._request)
result = json.write(wrapper)
#body = JSONResult(result, encoding)
body = self._prepareResult(result)
super(JSONRPCResponse,self).setResult(body)
logger.log(DEBUG,"%s" % result)
else:
self.setStatus(204)
super(JSONRPCResponse,self).setResult('')
def _prepareResult(self,result):
#we've asked json to return unicode; result should be unicode
encoding = getCharsetUsingRequest(self._request) or 'utf-8'
enc = encoding.lower()
if not enc in json_charsets:
encoding = 'utf-8'
#at outgoing boundary; encode it.
if isinstance(result,unicode):
body = result.encode(encoding)
charset = encoding
else:
#something's wrong. json did not return unicode.
raise TypeError, "JSON did not return unicode (%s)" % type(result)
#we used to gzip compress here, but that should be decided elsewhere
self.setHeader('content-type',"application/x-javascript;charset=%s" \
% charset)
return body
def handleException(self,exc_info):
t, value = exc_info[:2]
exc_data = []
for file, lineno, function, text in traceback.extract_tb(exc_info[2]):
exc_data.append("%s %s %s %s %s" % (file, "line",
lineno, "in", function))
exc_data.append("%s %s" % ( "=>", repr(text)))
exc_data.append( "** %s: %s" % exc_info[:2])
logger.log(logging.ERROR,"\n".join(exc_data))
s = '%s: %s' % (getattr(t, '__name__', t), value)
wrapper = {'id':self._request.jsonID}
wrapper['result'] = None
wrapper['error'] = s
json = getUtility(IJSONWriter)
result = json.write(wrapper)
body = self._prepareResult(result)
super(JSONRPCResponse,self).setResult(body)
logger.log(DEBUG,"Exception: %s" % result)
self.setStatus(200)
# premarshal code adapted from zope.publisher.xmlrpc.py 20060619
# should just use XMLRPC premarshaling after we do not have to worry about
# code not being in the standard library. This probably will originate in 3.3.
class PreMarshallerBase(object):
"""Abstract base class for pre-marshallers."""
implements(IJSONRPCPremarshaller)
def __init__(self, data):
self.data = data
def __call__(self):
raise Exception, "Not implemented"
class DictPreMarshaller(PreMarshallerBase):
"""Pre-marshaller for dicts"""
def __call__(self):
return dict([(premarshal(k), premarshal(v))
for (k, v) in self.data.items()])
class ListPreMarshaller(PreMarshallerBase):
"""Pre-marshaller for list"""
def __call__(self):
return map(premarshal, self.data)
def premarshal(data):
"""Premarshal data before handing it to JSON writer for marshalling
The initial purpose of this function is to remove security proxies
without resorting to removeSecurityProxy. This way, we can avoid
inadvertently providing access to data that should be protected.
"""
premarshaller = IJSONRPCPremarshaller(data, alternate=None)
if premarshaller is not None:
return premarshaller()
return data
#def premarshal_dict(data):
#"""return a non-proxied dict"""
#return dict([(premarshal(k), premarshal(v))
#for (k, v) in data.items()])
#def premarshal_list(data):
#"""return a non-proxied list"""
#return map(premarshal, data)
##note: no dates or datetimes in json, though supported by xmlrpc
#premarshal_dispatch_table = {
#dict: premarshal_dict,
#list: premarshal_list,
#tuple: premarshal_list,
#}
#premarshal_dispatch = premarshal_dispatch_table.get
#def premarshal(data):
#premarshaller = premarshal_dispatch(data.__class__)
#if premarshaller is not None:
#return premarshaller(data)
#return data
class JSONView(BrowserPage):
"""This is a base view class for 'ordinary' JSON methods.
JSONViews are accessible by ordinary URLs and HTTP GETs.
"""
implements(IJSONView)
contentType = 'application/json'
def render(self, *args, **kw):
"""return the python list or dict that will be the body of the response.
This needs to be overridden in subclasses"""
raise NotImplementedError("Subclasses should override render to "
"provide a response body")
def doCacheControl(self):
""" at the moment, KHTML-based browsers do not handle cached JSON data
properly. This may be Dojo-specific, and may be only necessary for
a short time until Konq and Safari behave like other browsers in this
respect.
Default here is to send 'no-cache' header to KHTML browsers.
For other user agents, a 1-hour public cache is specified.
May be overridden/extended in subclasses.
"""
agent = self.request.get('HTTP_USER_AGENT','')
response = self.request.response
if 'KHTML' in agent:
response.setHeader('cache-control','no-cache')
else:
response.setHeader('cache-control',
'public, must-revalidate, max-age=3600')
def __call__(self, *args, **kw):
"""the render method is called.
First, anything that matches the method signature in request.form is
put in the method's **kw.
After call, the response is JSONized and sent out with appropriate
encoding.
"""
request = self.request
meth = self.render
#introspect the method and set kw params if the arg is in request.form
params = meth.im_func.func_code.co_varnames[1:]
for key in request.form.keys():
if key in params:
kw[str(key)] = request.form.get(key)
try:
resp = premarshal(self.render(*args,**kw))
except TypeError, err:
request.response.setStatus('500')
resp = {'error':'%s' % err}
if not isinstance(resp,dict) and not isinstance(resp,list):
raise ValueError("JSON responses must be dicts or lists")
self.doCacheControl()
encoding = getCharsetUsingRequest(self.request)
enc = encoding.lower()
if not enc in json_charsets:
#we'll allow utf-32, utf-16 or utf-8; if not specified, use utf-8
enc = 'utf-8'
request.response.setHeader('content-type','%s;charset=%s' % (self.contentType,enc))
json = getUtility(IJSONWriter)
s = json.write(resp).encode(enc)
return s
class JSONRPCView(object):
"""A base JSON-RPC view that can be used as mix-in for JSON-RPC views.
like zope.app.publisher.xmlrpc.XMLRPCView
"""
implements(IJSONRPCView)
def __init__(self, context, request):
self.context = context
self.request = request
class MethodPublisher(JSONRPCView, Location):
"""Base class for JSON-RPC views that publish methods
like zope.app.publisher.xmlrpc.MethodPublisher
"""
implements(IMethodPublisher)
def __getParent(self):
return hasattr(self, '_parent') and self._parent or self.context
def __setParent(self, parent):
self._parent = parent
__parent__ = property(__getParent, __setParent)
class MethodTraverser(object):
implements(IJSONRPCPublisher)
__used_for__ = IMethodPublisher
def __init__(self, context, request):
self.context = context
def publishTraverse(self, request, name):
return getattr(self.context, name)
class TestRequest(JSONRPCRequest):
"""modeled after zope.publisher.xmlrpc.TestRequest"""
def __init__(self, body_instream=None, environ=None,
response=None, **kw):
_testEnv = {
'SERVER_URL': 'http://127.0.0.1',
'HTTP_HOST': '127.0.0.1',
'CONTENT_LENGTH': '0',
'GATEWAY_INTERFACE': 'TestFooInterface/1.0',
}
if environ:
_testEnv.update(environ)
if kw:
_testEnv.update(kw)
if body_instream is None:
body_instream = StringIO('')
super(TestRequest, self).__init__(
body_instream, _testEnv, response) | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsonrpc.py | jsonrpc.py |
#2005-08-16 A few changes needed after a zope3 trunk change
#2005-11-07 Allowed IDefaultBrowserLayer in JSONRPCRequest. This permits skin
# lookups
#2006-06-19 Removed reference to IPresentation and added interface for
# Premarshaller jmw
from zope.publisher.interfaces import IPublishTraverse
from zope.publisher.interfaces.http import IHTTPApplicationRequest,\
IHTTPCredentials
from zope.interface import Interface
from zope.component.interfaces import IView
from zope.interface import Attribute
from zope.app.publisher.xmlrpc import IMethodPublisher
from zope.publisher.interfaces.xmlrpc import IXMLRPCPublication
from zope.app.publication.interfaces import IRequestFactory
from zope.publisher.interfaces.browser import IDefaultBrowserLayer, \
IBrowserPage
from zope.schema.interfaces import TextLine
class IJSONRPCRequestFactory(IRequestFactory):
"""Browser request factory"""
class IJSONRPCPublisher(IPublishTraverse):
"""JSON-RPC Publisher
like zope.publisher.interfaces.xmlrpc.IXMLRPCPublisher
"""
class IJSONRPCPublication(IXMLRPCPublication):
"""Object publication framework.
like zope.publisher.interfaces.xmlrpc.IXMLRPCPublication
"""
class IJSONRPCRequest(IHTTPApplicationRequest, IHTTPCredentials, IDefaultBrowserLayer):
"""JSON-RPC Request
like zope.publisher.interfaces.xmlrpc.IXMLRPCRequest
"""
jsonID=Attribute("""JSON-RPC ID for the request""")
class IJSONReader(Interface):
def read(aString):
"""read and interpret a string in JSON as python"""
class IJSONWriter(Interface):
def write(anObject, encoding=None):
"""return a JSON unicode string representation of a python object
Encode if encoding is provided.
"""
class IJSON(IJSONReader,IJSONWriter):
"""read and write JSON"""
#class IMethodPublisher(Interface):
#
# """Marker interface for an object that wants to publish methods
# see zope.app.publisher.xmlrpc.IMethodPublisher
#
# it's commented here for completeness; actually, this uses
# the one in zope.app.publisher.xmlrpc
# """
class IJSONRPCView(IView):
"""JSONRPC View
like zope.app.publisher.interfaces.xmlrpc.IXMLRPCView
"""
class IJSONRPCPremarshaller(Interface):
"""Premarshaller to remove security proxies"""
def __call__():
"""return the object without proxies"""
class IJSONView(IBrowserPage):
"""A view that is a JSON representation of an object"""
contentType = TextLine(title=u"content-type", default=u"application/json")
def doResponse():
"""return the list or dict that is response for this view"""
def doCacheControl():
"""set any cache headers that may be needed. Default sends 'no-cache'
to KHTML browsers. May be extended/overridden in subclasses""" | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/interfaces.py | interfaces.py |
==============
zif.jsonserver
==============
JSON is javascript object notation. JSON-RPC performs the same service
as XML-RPC, except the transport is javascript objects instead of XML.
jsonserver Project:
This project provides the additional functionality of listening and responding
properly to requests of type "application/json-rpc".
Dependencies
------------
This package will work with Zope 3 version 3.3 or greater. The svn version of
jsonserver tries hard to keep up with Zope 3's development version available at
svn://svn.zope.org/repos/main/Zope3/trunk.
jsolait from http://jsolait.net is the recommended client-side javascript
library. Installation of jsolait is covered in the README.txt file in this
package's jsolait folder.
Installation
------------
Install this package in a location accessible to your zope3 instance. The
lib/python folder of the instance is a good choice.
The files in the etc folder should go into etc/package-includes.
A README.txt file in the jsolait folder has instructions for installing
a client javascript library.
Usage
-----
Similar to xmlrpc usage.
jsonserver looks for content-type "application/json-rpc", and handles those
requests as JSON-RPC. Other http requests are not affected and will
presumably work as expected. Now that there is an official mime-type for JSON,
jsonserver also supports "application/json". "application/json-rpc"
may be considered deprecated.
jsonrpc provides another namespace,
'http://namespaces.zope.org/jsonrpc' for zcml configuration.
Statements like
::
<jsonrpc:view for="" permission="" methods="" class="" />
are used in zcml to make jsonrpc methods viewable.
You may create views that appear only if a jsonrpc listener is installed:
::
<configure zcml:condition="have jsonrpc">
<jsonrpc:view
for="someInterface"
permission="zope.View"
methods="blah blecht"
class=".views.JsonViewClass"
/>
</configure>
To make a view class, subclass
zif.jsonserver.jsonrpc.MethodPublisher like this:
::
from zif.jsonserver import MethodPublisher
class MyClass(MethodPublisher):
def myOutput(self):
blah = 'something cool'
return blah
def myOutput1(self,param1):
blecht = self.context.something(param1)
return blecht
where the return value can be a python simple object
(int, long, float, string, etc.) or list or dictionary composed of
simple objects, lists, and/or dictionaries. Composite built-ins
like complex numbers, dates, or classes are not currently
supported. Decompose those, and send a list or dictionary instead.
Multiple returned values will be marshalled into a list.
For web pages, you will need to include a javascript library for the client side
in your page template:
::
<script type="text/javascript" src="/++resource++jsolait/jsolait.js"></script>
will bring in the recommended jsolait library, if it is installed here. The following javascript examples
are for jsolait, but any similar javascript library may be used, or you can write your own. The
xmlHTTPRequest POST must set a content-type of "application/json-rpc" for this package to invoke
json-rpc requests on the server.
From your client javascript code, import the jsonrpc module:
::
var jsonrpc = imprt('jsonrpc');
Then, make a jsolait connection proxy ("." often works fine for addr):
::
addr="address to server object providing jsonrpc view class";
//for better error handling, see http://jsolait.net/wiki/documentation
try{var aServer = new jsonrpc.ServiceProxy(addr, ["myOutput"]);
}catch(e){alert(e);}
then, for async communication, provide a callback function:
::
function doThis(resp,err){
if (!err) {do something with resp} else {do something with err}
}
and call the method:
::
aServer.myOutput(aparam,doThis);
If you want sync communication, call the method without
the name of a function as the last parameter.
For communication other than in a web browser (javascript), minjson.py
or other json implementations have functions for reading and writing
JSON objects.
The text of a JSON-RPC request (v1.0) looks like:
::
{"id":jsonid,"method":remotemethod,"params":methodparams}
where
- jsonid is a string or number that may identify this specific request
- remotemethod is the method to call on the server
- methodparams is a list (javascript Array) of parameters to the method
The text of a JSON-RPC response looks like:
::
{"id":jsonid,"result":returnedresult,"error":returnederr}
where
- jsonid is the same jsonid as sent in the request
- returnedresult is a javascript representation of the result or null
- returnederr is a javascript representation of an error state
Either returnedresult or returnederr will be the javascript null value.
Actual implementation using e.g., urllib is left as an exercise for the
reader. Hint: Use the minjson.write(object) and minjson.read(string)
methods for conversion before and after transport.
Dojo
----
JSON-RPC in Dojo should work out-of-the-box with jsonserver, since it provides a
content-type supported by jsonserver. A preliminary package that serves a
per-object ".smd" file is available at dojosupport.
Dojo is available at "http://dojotoolkit.org":http://dojotoolkit.org .
Simple JSON / Non-POST Views
----------------------------
JSON Views accessible to HTTP GET are also provided with zif.jsonserver.
See "JSONViews":/jsonviews_README.html .
Page Templates, Form Variables, and Named Parameters
----------------------------------------------------
jsonserver will work with page templates and similar
snippets of HTML. Most registered views (browser:page or similar)
are also accessible to json-rpc clients. The simplest way to use a
page template is to call it in javascript just as you would call a
jsonrpc:view. jsonserver sets a request variable, JSONRPC_MODE,
which will be True if a template is requested through json-rpc.
This may be useful if you need json-rpc-specific behavior.
If you need form data, jsonserver has a special facility for this. The
contents of any client object (dict) passed as a parameter to json-rpc
that is (cleverly) named "pythonKwMaRkEr" will be available in the request
as items in request.form. If you call methods with named parameters,
those items also will replace the named parameters as appropriate.
A pythonkw module is provided here for use with jsolait on the client side.
Code like
::
var pythonkw = imprt("pythonkw");
var kwparams = new pythonkw.PythonKw({'parm1': 'aaa', 'parm2': text_value})
var result = aServer.my_portlet(kwparams);
will do the marshalling so you do not have to type "pythonKwMaRkEr".
Here is an example of using a page template through a
jsonrpc:view method (ViewPageTemplateFile is in zope.app.pagetemplate)
::
def my_portlet(self,parm1='bbb',parm2=None):
date = datetime.now()
rand = random.randint(0,2000)
portlet = ViewPageTemplateFile("my_portlet.pt")
return portlet(self,date=date,random=rand,parm1=parm1)
In the above example, parm1 is available to the template as options/parm1
and as request/form/parm1. parm2 may be available to the template as
request/form/parm2 if provided in the request.
Debugging
---------
To get verbose output of requests, responses, and errors,
set level DEBUG for your event log in etc/zope.conf for your
instance. e.g.,
::
<eventlog>
level DEBUG
<logfile>
path $LOGDIR/z3.log
</logfile>
<logfile>
path STDOUT
</logfile>
</eventlog>
You can get pretty much the same results with tcpwatch, except you get the
entire request and response with tcpwatch.
Compatibility
-------------
Most compatibility issues should be about client implementations.
jsonserver will accept any valid JSON-RPC request that is a POST with
content-type "application/json-rpc" or "application/json". Output responses will be of
content-type "application/x-javascript" so that browser clients can know
that the response will be interpreted in javascript.
jsolait should work on any current browser with enabled javascript and
a functioning xmlHTTPRequest POST implementation. This includes most
gecko browsers (Firefox, Mozilla, and Netscape 6.1+), khtml browsers (Safari and
konqueror), recent IEs, and Opera 8.1+. If it will do Google maps, it probably
will do jsolait.
Unicode Support
---------------
jsonserver supports unicode properly now, I think, (maybe?). If you have a
project that depends on unicode, let me know if this does anything unexpected.
| zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/README.txt | README.txt |
# minjson.py
# reads minimal javascript objects.
# str's objects and fixes the text to write javascript.
#UNICODE USAGE: Minjson tries hard to accommodate naive usage in a
#"Do what I mean" manner. Real applications should handle unicode separately.
# The "right" way to use minjson in an application is to provide minjson a
# python unicode string for reading and accept a unicode output from minjson's
# writing. That way, the assumptions for unicode are yours and not minjson's.
# That said, the minjson code has some (optional) unicode handling that you
# may look at as a model for the unicode handling your application may need.
# Thanks to Patrick Logan for starting the json-py project and making so many
# good test cases.
# Additional thanks to Balazs Ree for replacing the writing module.
# Jim Washington 6 Dec 2006.
# 2006-12-06 Thanks to Koen van de Sande, now handles the case where someone
# might want e.g., a literal "\n" in text not a new-line.
# 2005-12-30 writing now traverses the object tree instead of relying on
# str() or unicode()
# 2005-10-10 on reading, looks for \\uxxxx and replaces with u'\uxxxx'
# 2005-10-09 now tries hard to make all strings unicode when reading.
# 2005-10-07 got rid of eval() completely, makes object as found by the
# tokenizer.
# 2005-09-06 imported parsing constants from tokenize; they changed a bit from
# python2.3 to 2.4
# 2005-08-22 replaced the read sanity code
# 2005-08-21 Search for exploits on eval() yielded more default bad operators.
# 2005-08-18 Added optional code from Koen van de Sande to escape
# outgoing unicode chars above 128
from re import compile, sub, search, DOTALL
from token import ENDMARKER, NAME, NUMBER, STRING, OP, ERRORTOKEN
from tokenize import tokenize, TokenError, NL
#Usually, utf-8 will work, set this to utf-16 if you dare.
emergencyEncoding = 'utf-8'
class ReadException(Exception):
pass
class WriteException(Exception):
pass
#################################
# read JSON object #
#################################
slashstarcomment = compile(r'/\*.*?\*/',DOTALL)
doubleslashcomment = compile(r'//.*\n')
unichrRE = compile(r"\\u[0-9a-fA-F]{4,4}")
def unichrReplace(match):
return unichr(int(match.group()[2:],16))
escapeStrs = (('\n',r'\n'),('\b',r'\b'),
('\f',r'\f'),('\t',r'\t'),('\r',r'\r'), ('"',r'\"')
)
class DictToken:
__slots__=[]
pass
class ListToken:
__slots__=[]
pass
class ColonToken:
__slots__=[]
pass
class CommaToken:
__slots__=[]
pass
class JSONReader(object):
"""raise SyntaxError if it is not JSON, and make the object available"""
def __init__(self,data):
self.stop = False
#make an iterator of data so that next() works in tokenize.
self._data = iter([data])
self.lastOp = None
self.objects = []
self.tokenize()
def tokenize(self):
try:
tokenize(self._data.next,self.readTokens)
except TokenError:
raise SyntaxError
def resolveList(self):
#check for empty list
if isinstance(self.objects[-1],ListToken):
self.objects[-1] = []
return
theList = []
commaCount = 0
try:
item = self.objects.pop()
except IndexError:
raise SyntaxError
while not isinstance(item,ListToken):
if isinstance(item,CommaToken):
commaCount += 1
else:
theList.append(item)
try:
item = self.objects.pop()
except IndexError:
raise SyntaxError
if not commaCount == (len(theList) -1):
raise SyntaxError
theList.reverse()
item = theList
self.objects.append(item)
def resolveDict(self):
theList = []
#check for empty dict
if isinstance(self.objects[-1], DictToken):
self.objects[-1] = {}
return
#not empty; must have at least three values
try:
#value (we're going backwards!)
value = self.objects.pop()
except IndexError:
raise SyntaxError
try:
#colon
colon = self.objects.pop()
if not isinstance(colon, ColonToken):
raise SyntaxError
except IndexError:
raise SyntaxError
try:
#key
key = self.objects.pop()
if not isinstance(key,basestring):
raise SyntaxError
except IndexError:
raise SyntaxError
#salt the while
comma = value
while not isinstance(comma,DictToken):
# store the value
theList.append((key,value))
#do it again...
try:
#might be a comma
comma = self.objects.pop()
except IndexError:
raise SyntaxError
if isinstance(comma,CommaToken):
#if it's a comma, get the values
try:
value = self.objects.pop()
except IndexError:
#print self.objects
raise SyntaxError
try:
colon = self.objects.pop()
if not isinstance(colon, ColonToken):
raise SyntaxError
except IndexError:
raise SyntaxError
try:
key = self.objects.pop()
if not isinstance(key,basestring):
raise SyntaxError
except IndexError:
raise SyntaxError
theDict = {}
for k in theList:
theDict[k[0]] = k[1]
self.objects.append(theDict)
def readTokens(self,type, token, (srow, scol), (erow, ecol), line):
# UPPERCASE consts from tokens.py or tokenize.py
if type == OP:
if token not in "[{}],:-":
raise SyntaxError
else:
self.lastOp = token
if token == '[':
self.objects.append(ListToken())
elif token == '{':
self.objects.append(DictToken())
elif token == ']':
self.resolveList()
elif token == '}':
self.resolveDict()
elif token == ':':
self.objects.append(ColonToken())
elif token == ',':
self.objects.append(CommaToken())
elif type == STRING:
tok = token[1:-1]
parts = tok.split("\\\\")
for k in escapeStrs:
if k[1] in tok:
parts = [part.replace(k[1],k[0]) for part in parts]
self.objects.append("\\".join(parts))
elif type == NUMBER:
if self.lastOp == '-':
factor = -1
else:
factor = 1
try:
self.objects.append(factor * int(token))
except ValueError:
self.objects.append(factor * float(token))
elif type == NAME:
try:
self.objects.append({'true':True,
'false':False,'null':None}[token])
except KeyError:
raise SyntaxError
elif type == ENDMARKER:
pass
elif type == NL:
pass
elif type == ERRORTOKEN:
if ecol == len(line):
#it's a char at the end of the line. (mostly) harmless.
pass
else:
raise SyntaxError
else:
raise SyntaxError
def output(self):
try:
assert len(self.objects) == 1
except AssertionError:
raise SyntaxError
return self.objects[0]
def safeRead(aString, encoding=None):
"""read the js, first sanitizing a bit and removing any c-style comments
If the input is a unicode string, great. That's preferred. If the input
is a byte string, strings in the object will be produced as unicode anyway.
"""
# get rid of trailing null. Konqueror appends this.
CHR0 = chr(0)
while aString.endswith(CHR0):
aString = aString[:-1]
# strip leading and trailing whitespace
aString = aString.strip()
# zap /* ... */ comments
aString = slashstarcomment.sub('',aString)
# zap // comments
aString = doubleslashcomment.sub('',aString)
# detect and handle \\u unicode characters. Note: This has the side effect
# of converting the entire string to unicode. This is probably OK.
unicodechars = unichrRE.search(aString)
if unicodechars:
aString = unichrRE.sub(unichrReplace, aString)
#if it's already unicode, we won't try to decode it
if isinstance(aString, unicode):
s = aString
else:
if encoding:
# note: no "try" here. the encoding provided must work for the
# incoming byte string. UnicodeDecode error will be raised
# in that case. Often, it will be best not to provide the encoding
# and allow the default
s = unicode(aString, encoding)
#print "decoded %s from %s" % (s,encoding)
else:
# let's try to decode to unicode in system default encoding
try:
s = unicode(aString)
#import sys
#print "decoded %s from %s" % (s,sys.getdefaultencoding())
except UnicodeDecodeError:
# last choice: handle as emergencyEncoding
enc = emergencyEncoding
s = unicode(aString, enc)
#print "%s decoded from %s" % (s, enc)
# parse and get the object.
try:
data = JSONReader(s).output()
except SyntaxError:
raise ReadException, 'Unacceptable JSON expression: %s' % aString
return data
read = safeRead
#################################
# write object as JSON #
#################################
import re, codecs
from cStringIO import StringIO
### Codec error handler
def jsonreplace_handler(exc):
'''Error handler for json
If encoding fails, \\uxxxx must be emitted. This
is similar to the "backshashreplace" handler, only
that we never emit \\xnn since this is not legal
according to the JSON syntax specs.
'''
if isinstance(exc, UnicodeEncodeError):
part = exc.object[exc.start]
# repr(part) will convert u'\unnnn' to u'u\\nnnn'
return u'\\u%04x' % ord(part), exc.start+1
else:
raise exc
# register the error handler
codecs.register_error('jsonreplace', jsonreplace_handler)
### Writer
def write(input, encoding='utf-8', outputEncoding=None):
writer = JsonWriter(input_encoding=encoding, output_encoding=outputEncoding)
writer.write(input)
return writer.getvalue()
re_strmangle = re.compile('"|\b|\f|\n|\r|\t|\\\\')
def func_strmangle(match):
return {
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
'\\': '\\\\',
}[match.group(0)]
def strmangle(text):
return re_strmangle.sub(func_strmangle, text)
class JsonStream(object):
def __init__(self):
self.buf = []
def write(self, text):
self.buf.append(text)
def getvalue(self):
return ''.join(self.buf)
class JsonWriter(object):
def __init__(self, stream=None, input_encoding='utf-8', output_encoding=None):
'''
- stream is optional, if specified must also give output_encoding
- The input strings can be unicode or in input_encoding
- output_encoding is optional, if omitted, result will be unicode
'''
if stream is not None:
if output_encoding is None:
raise WriteException, 'If a stream is given, output encoding must also be provided'
else:
stream = JsonStream()
self.stream = stream
self.input_encoding = input_encoding
self.output_encoding = output_encoding
def write(self, obj):
if isinstance(obj, (list, tuple)):
self.stream.write('[')
first = True
for elem in obj:
if first:
first = False
else:
self.stream.write(',')
self.write(elem)
self.stream.write(']'),
elif isinstance(obj, dict):
self.stream.write('{')
first = True
for key, value in obj.iteritems():
if first:
first = False
else:
self.stream.write(',')
self.write(key)
self.stream.write(':')
self.write(value)
self.stream.write('}')
elif obj is True:
self.stream.write('true')
elif obj is False:
self.stream.write('false')
elif obj is None:
self.stream.write('null')
elif not isinstance(obj, basestring):
# if we are not baseobj, convert to it
try:
obj = str(obj)
except Exception, exc:
raise WriteException, 'Cannot write object (%s: %s)' % (exc.__class__, exc)
self.stream.write(obj)
else:
# convert to unicode first
if not isinstance(obj, unicode):
try:
obj = unicode(obj, self.input_encoding)
except (UnicodeDecodeError, UnicodeTranslateError):
obj = unicode(obj, 'utf-8', 'replace')
# do the mangling
obj = strmangle(obj)
# make the encoding
if self.output_encoding is not None:
obj = obj.encode(self.output_encoding, 'jsonreplace')
self.stream.write('"')
self.stream.write(obj)
self.stream.write('"')
def getvalue(self):
return self.stream.getvalue() | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/minjson.py | minjson.py |
import zope.interface
from zope.interface import Interface
from zope.security.checker import CheckerPublic, Checker
from zope.configuration.exceptions import ConfigurationError
from interfaces import IJSONRPCRequest
from zope.component.interface import provideInterface
from zope.app.component.metaconfigure import handler
from jsonrpc import MethodPublisher
def view(_context, for_=None, interface=None, methods=None,
class_=None, permission=None, name=None):
interface = interface or []
methods = methods or []
# If there were special permission settings provided, then use them
if permission == 'zope.Public':
permission = CheckerPublic
require = {}
for attr_name in methods:
require[attr_name] = permission
if interface:
for iface in interface:
for field_name in iface:
require[field_name] = permission
_context.action(
discriminator = None,
callable = provideInterface,
args = ('', for_)
)
# Make sure that the class inherits MethodPublisher, so that the views
# have a location
if class_ is None:
class_ = MethodPublisher
original_class = class_
else:
original_class = class_
class_ = type(class_.__name__, (class_, MethodPublisher), {})
if name:
# Register a single view
if permission:
checker = Checker(require)
def proxyView(context, request, class_=class_, checker=checker):
view = class_(context, request)
# We need this in case the resource gets unwrapped and
# needs to be rewrapped
view.__Security_checker__ = checker
return view
class_ = proxyView
class_.factory = original_class
# Register the new view.
_context.action(
discriminator = ('view', for_, name, IJSONRPCRequest),
callable = handler,
args = ('registerAdapter',
class_,(for_, IJSONRPCRequest), Interface, name,
_context.info)
)
else:
if permission:
checker = Checker({'__call__': permission})
else:
checker = None
for name in require:
# create a new callable class with a security checker;
cdict = {'__Security_checker__': checker,
'__call__': getattr(class_, name)}
new_class = type(class_.__name__, (class_,), cdict)
_context.action(
discriminator = ('view', for_, name, IJSONRPCRequest),
callable = handler,
args = ('registerAdapter',
new_class,(for_, IJSONRPCRequest), Interface, name,
_context.info)
)
# Register the used interfaces with the site manager
if for_ is not None:
_context.action(
discriminator = None,
callable = provideInterface,
args = ('', for_)
) | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/metaconfigure.py | metaconfigure.py |
Class=function(name,bases,classScope){
var args=[];
for(var i=0;i<arguments.length;i++){
args[i]=arguments[i];
}
classScope=args.pop();
if((args.length>0)&&(typeof args[0]=='string')){
name=args.shift();
}else{
name="anonymous";
}
var bases=args;
var __class__={__isArray__:false,
__name__:name,
__bases__:bases,
__id__:Class.__idcount__++,
__hash__:function(){
return this.__id__;
},
__str__:function(){
return "[class %s]".format(this.__name__);
}
};
var baseProtos=[];var proto;if(bases.length==0){proto={};
proto.__str__=function(){
return "[%s %s]".format(this.__class__.prototype.__call__===undefined?'object':'callable',this.__class__.__name__);
};
proto.toString=proto.__str__;
__class__.__bases__=[Object];
}else{var baseProto;
for(var i=0;i<bases.length;i++){
var baseClass=bases[i];
baseProtos.push(baseClass.prototype);
if(baseClass.__createProto__!==undefined){
baseProto=baseClass.__createProto__(bases);
}else{
baseProto=new baseClass(Class);
}
__class__.__isArray__=__class__.__isArray__||baseClass.__isArray__;
if(i==0){proto=baseProto;
}else{for(var key in baseProto){
if(proto[key]===undefined){
proto[key]=baseProto[key];
}
}
}
for(var key in baseClass){
if((key!='prototype')&&(__class__[key]===undefined)){
__class__[key]=baseClass[key];
}
}
}
proto.toString=proto.__str__;
}
if(proto.__hash__===undefined){
proto.__hash__=function(){
if(this.__id__===undefined){
this.__id__=Class.__idcount__++;
}
return this.__id__;
};
}
proto.__class__=__class__;
var privId='__priv__'+__class__.__id__;
if(classScope.length-1>baseProtos.length){
classScope.apply(this,[proto,privId].concat(baseProtos));
}else{
classScope.apply(this,[proto].concat(baseProtos));
}
if(proto.__call__){
var NewClass=function(calledBy){
if(calledBy!==Class){
var rslt=function(){
return rslt.__call__.apply(rslt,arguments);
};
var privId='__priv__'+arguments.callee.__id__;
rslt[privId]={};
var proto=arguments.callee.prototype;
for(var n in proto){
rslt[n]=proto[n];
}
rslt.constructor=arguments.callee;
rslt.toString=proto.__str__;
if(rslt.__init__){
rslt.__init__.apply(rslt,arguments);
}
return rslt;
}
};
}else if(__class__.__isArray__){
var NewClass=function(calledBy){
if(calledBy!==Class){
rslt=[];
var privId='__priv__'+arguments.callee.__id__;
rslt[privId]={};
var proto=arguments.callee.prototype;
for(var n in proto){
rslt[n]=proto[n];
}
rslt.constructor=proto;
rslt.toString=proto.__str__;
if(rslt.__init__){
rslt.__init__.apply(rslt,arguments);
}else{if(arguments.lengt==1){
rslt.length=arguments[0];
}else{
for(var i=0;i<arguments.length;i++){
rslt.push(arguments[i]);
}
}
}
return rslt;
}
};
}else{
var NewClass=function(calledBy){
if(calledBy!==Class){
var privId='__priv__'+arguments.callee.__id__;
this[privId]={};
if(this.__init__){
this.__init__.apply(this,arguments);
}}
};
}
proto.constructor=NewClass;
proto.__class__=NewClass;NewClass.prototype=proto;
for(var key in __class__){
NewClass[key]=__class__[key];
}
NewClass.toString=__class__.__str__;
return NewClass;
};Class.__idcount__=0;
Class.toString=function(){
return "[object Class]";
};
Class.__createProto__=function(){throw "Can't use Class as a base class.";
};
Array.__isArray__=true;
Array.__str__=Array.toString=function(){return "[class Array]";};
Array.__createProto__=function(){var r=[];r.__str__=Array.prototype.toString;return r;};
Object.__str__=Object.toString=function(){return "[class Object]";};
Function.__createProto__=function(){throw "Cannot inherit from Function. implement the callabel interface instead using YourClass::__call__.";};
Module=function(name,version,moduleScope){
var newMod={};
newMod.name=name;
newMod.version=version;
newMod.__sourceURI__=Module.currentURI;
newMod.toString=function(){
return "[module '%s' version: %s]".format(this.name,this.version);
};
newMod.Exception=Class(Module.Exception,function(publ,supr){
publ.module=newMod;
});
try{moduleScope.call(newMod,newMod);
}catch(e){
throw new Module.ModuleScopeExecFailed(newMod,e);
}
for(var n in newMod){
var obj=newMod[n];
if(typeof obj=='function'){
obj.__name__=n;
}
}
jsolait.registerModule(newMod);
return newMod;
};
Module.toString=function(){
return "[object Module]";
};
Module.__createProto__=function(){throw "Can't use Module as a base class.";
};
Module.Exception=Class("Exception",function(publ){
publ.__init__=function(msg,trace){
this.name=this.constructor.__name__;
this.message=''+msg;
this.trace=trace;
};
publ.__str__=function(){
var s="%s %s".format(this.name,this.module);
return s;
};
publ.toTraceString=function(indent){
indent=indent==null?0:indent;
var s="%s in %s:\n%s".format(this.name,this.module,this.message.indent(4)).indent(indent);
if(this.trace){
if(this.trace.toTraceString){
s+=('\n\nbecause:\n'+this.trace.toTraceString(indent+4));
}else{
s+=(this.trace+'\n').indent(indent+4);
}
}
return s;
};
publ.name;publ.message;
publ.module="jsolait";
publ.trace;});
Module.ModuleScopeExecFailed=Class("ModuleScopeExecFailed",Module.Exception,function(publ,supr){
publ.__init__=function(module,trace){
supr.__init__.call(this,"Failed to run the module scope for %s".format(module),trace);
this.failedModule=module;
};
publ.module;
});
Module("jsolait","$Revision: 80 $",function(mod){
jsolait=mod;
mod.modules={};
mod.knownModuleURIs={codecs:"%(baseURI)slib/codecs.js",
pythonkw:"%(baseURI)slib/pythonkw.js",
crypto:"%(baseURI)slib/crypto.js",
dom:"%(baseURI)slib/dom.js",
forms:"%(baseURI)slib/forms.js",
iter:"%(baseURI)slib/iter.js",
jsonrpc:"%(baseURI)slib/jsonrpc.js",
lang:"%(baseURI)slib/lang.js",
sets:"%(baseURI)slib/sets.js",
testing:"%(baseURI)slib/testing.js",
urllib:"%(baseURI)slib/urllib.js",
xml:"%(baseURI)slib/xml.js",
xmlrpc:"%(baseURI)slib/xmlrpc.js"};
mod.moduleSearchURIs=[".","%(baseURI)slib"];
mod.baseURI = "/++resource++jsolait";
var getHTTP=function(){
var obj;
try{obj=new XMLHttpRequest();
}catch(e){
try{obj=new ActiveXObject("Msxml2.XMLHTTP.4.0");
}catch(e){
try{obj=new ActiveXObject("Msxml2.XMLHTTP");
}catch(e){
try{obj=new ActiveXObject("microsoft.XMLHTTP");}catch(e){
throw new mod.Exception("Unable to get an HTTP request object.");
}
}}
}
return obj;
};
mod.loadURI=function(uri,headers){
headers=(headers!==undefined)?headers:[];
try{
var xmlhttp=getHTTP();
xmlhttp.open("GET",uri,false);
for(var i=0;i<headers.length;i++){
xmlhttp.setRequestHeader(headers[i][0],headers[i][1]);}
xmlhttp.send("");
}catch(e){
throw new mod.LoadURIFailed(uri,e);
}
if(xmlhttp.status==200||xmlhttp.status==0||xmlhttp.status==null){
var s=new String(xmlhttp.responseText);
s.__sourceURI__=uri;
return s;
}else{
throw new mod.LoadURIFailed(uri,new mod.Exception("Server did not respond with 200"));
}
};
mod.LoadURIFailed=Class(mod.Exception,function(publ,supr){
publ.__init__=function(sourceURI,trace){
supr.__init__.call(this,"Failed to load file: '%s'".format(sourceURI.indent(2)),trace);
this.sourceURI=sourceURI;
};
publ.sourceURI;
});
mod.__imprt__=function(name){
if(mod.modules[name]){return mod.modules[name];
}else{
var src,modPath;
if(mod.knownModuleURIs[name]!=undefined){
modPath=mod.knownModuleURIs[name].format(mod);
try{src=mod.loadURI(modPath);
}catch(e){
throw new mod.ImportFailed(name,[modPath],e);
}
}
if(src==null){var failedURIs=[];
for(var i=0;i<mod.moduleSearchURIs.length;i++){
modPath="%s/%s.js".format(mod.moduleSearchURIs[i].format(mod),name.split(".").join("/"));
try{
src=mod.loadURI(modPath);
break;
}catch(e){
failedURIs.push(e.sourceURI);
}
}
if(src==null){
throw new mod.ImportFailed(name,failedURIs);
}
}
try{var srcURI=src.__sourceURI__;
src='Module.currentURI="%s";\n%s\nModule.currentURI=null;\n'.format(src.__sourceURI__.replace(/\\/g,'\\\\'),src);
var f=new Function("",src);f();
}catch(e){
throw new mod.ImportFailed(name,[srcURI],e);
}
return mod.modules[name];}
};
mod.ImportFailed=Class(mod.Exception,function(publ,supr){
publ.__init__=function(moduleName,moduleURIs,trace){
supr.__init__.call(this,"Failed to import module: '%s' from:\n%s".format(moduleName,moduleURIs.join(',\n').indent(2)),trace);
this.moduleName=moduleName;
this.moduleURIs=moduleURIs;
};
publ.moduleName;
publ.moduleURIs;
});
imprt=function(name){
return mod.__imprt__(name);
};
mod.__registerModule__=function(modObj,modName){
if(modName!='jsolait'){
return mod.modules[modName]=modObj;
}
};
mod.registerModule=function(modObj,modName){
modName=modName===undefined?modObj.name:modName;
return mod.__registerModule__(modObj,modName);
};
var FormatSpecifier=function(s){
var s=s.match(/%(\(\w+\)){0,1}([ 0-]){0,1}(\+){0,1}(\d+){0,1}(\.\d+){0,1}(.)/);
if(s[1]){
this.key=s[1].slice(1,-1);
}else{
this.key=null;
}
this.paddingFlag=s[2];
if(this.paddingFlag==""){
this.paddingFlag=" ";}
this.signed=(s[3]=="+");
this.minLength=parseInt(s[4]);
if(isNaN(this.minLength)){
this.minLength=0;
}
if(s[5]){
this.percision=parseInt(s[5].slice(1,s[5].length));
}else{
this.percision=-1;
}
this.type=s[6];
};
String.prototype.format=function(){
var sf=this.match(/(%(\(\w+\)){0,1}[ 0-]{0,1}(\+){0,1}(\d+){0,1}(\.\d+){0,1}[dibouxXeEfFgGcrs%])|([^%]+)/g);
if(sf){
if(sf.join("")!=this){
throw new mod.Exception("Unsupported formating string.");
}
}else{
throw new mod.Exception("Unsupported formating string.");
}
var rslt="";
var s;
var obj;
var cnt=0;
var frmt;
var sign="";
for(var i=0;i<sf.length;i++){
s=sf[i];
if(s=="%%"){
s="%";
}else if(s=="%s"){if(cnt>=arguments.length){
throw new mod.Exception("Not enough arguments for format string.");
}else{
obj=arguments[cnt];
cnt++;
}
if(obj===null){
obj="null";
}else if(obj===undefined){
obj="undefined";
}
s=obj.toString();
}else if(s.slice(0,1)=="%"){
frmt=new FormatSpecifier(s);if(frmt.key){if((typeof arguments[0])=="object"&&arguments.length==1){
obj=arguments[0][frmt.key];
}else{
throw new mod.Exception("Object or associative array expected as formating value.");
}
}else{if(cnt>=arguments.length){
throw new mod.Exception("Not enough arguments for format string.");
}else{
obj=arguments[cnt];
cnt++;
}
}
if(frmt.type=="s"){if(obj===null){
obj="null";
}else if(obj===undefined){
obj="undefined";
}
s=obj.toString().pad(frmt.paddingFlag,frmt.minLength);
}else if(frmt.type=="c"){if(frmt.paddingFlag=="0"){
frmt.paddingFlag=" ";}
if(typeof obj=="number"){s=String.fromCharCode(obj).pad(frmt.paddingFlag,frmt.minLength);
}else if(typeof obj=="string"){
if(obj.length==1){s=obj.pad(frmt.paddingFlag,frmt.minLength);
}else{
throw new mod.Exception("Character of length 1 required.");
}
}else{
throw new mod.Exception("Character or Byte required.");
}
}else if(typeof obj=="number"){
if(obj<0){
obj=-obj;
sign="-";}else if(frmt.signed){
sign="+";}else{
sign="";
}
switch(frmt.type){
case "f":case "F":
if(frmt.percision>-1){
s=obj.toFixed(frmt.percision).toString();
}else{
s=obj.toString();
}
break;
case "E":case "e":
if(frmt.percision>-1){
s=obj.toExponential(frmt.percision);
}else{
s=obj.toExponential();
}
s=s.replace("e",frmt.type);
break;
case "b":s=obj.toString(2);
s=s.pad("0",frmt.percision);
break;
case "o":s=obj.toString(8);
s=s.pad("0",frmt.percision);
break;
case "x":s=obj.toString(16).toLowerCase();
s=s.pad("0",frmt.percision);
break;
case "X":s=obj.toString(16).toUpperCase();
s=s.pad("0",frmt.percision);
break;
default:s=parseInt(obj).toString();
s=s.pad("0",frmt.percision);
break;
}
if(frmt.paddingFlag=="0"){s=s.pad("0",frmt.minLength-sign.length);
}
s=sign+s;s=s.pad(frmt.paddingFlag,frmt.minLength);}else{
throw new mod.Exception("Number required.");
}
}
rslt+=s;
}
return rslt;
};
String.prototype.pad=function(flag,len){
var s="";
if(flag=="-"){
var c=" ";
}else{
var c=flag;
}
for(var i=0;i<len-this.length;i++){
s+=c;
}
if(flag=="-"){
s=this+s;
}else{
s+=this;
}
return s;
};
String.prototype.indent=function(indent){
var out=[];
var s=this.split('\n');
for(var i=0;i<s.length;i++){
out.push(' '.mul(indent)+s[i]);
}
return out.join('\n');
};
String.prototype.mul=function(l){
var a=new Array(l+1);
return a.join(this);
};
mod.test=function(){
};
});
importModule=imprt | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/jsolait.js | jsolait.js |
import os, md5, urllib
#get the current, working version of jsolait
#this one (a beta of 1.1) works as of now (3 Jun 05)
loc = 'http://jsolait.net/download/'
filename = 'jsolait.2005-11-15.small.zip'
#md5sum = '69014dddf37fb7cb7b88b5eac84d1569'
#md5sum = '475b7a505d6ab911b25818831244bd43'
md5sum = 'c21c32a7a8756a35a0e48a30b710b3e1'
fileurl = loc + filename
#Assure the folders are there.
for directory in ['src','doc','lib','libws']:
if not os.path.exists(directory):
os.mkdir(directory)
zippedfile = os.path.join('src',filename)
#get file unless it is already there
if not os.path.exists(zippedfile):
print "retrieving %s" % fileurl
urllib.urlretrieve(fileurl,zippedfile)
else:
print "%s exists; reprocessing. " % zippedfile
print "to retrieve again, delete or rename %s." % zippedfile
#check file signature
print "checking md5"
filedata = file(zippedfile,'r').read()
check = md5.new(filedata).hexdigest()
if not check == md5sum:
raise ValueError('md5 sums do not match')
#got the file; now, unzip it.
import zipfile
print "unzipping %s" % zippedfile
zip = zipfile.ZipFile(zippedfile,'r')
filesList = zip.namelist()
#put the files in the folders
for k in filesList:
if not k.endswith('/'):
f = os.path.split(k)
#print f
if 'doc' in f[0]:
file(os.path.join('doc',f[1]),'wb').write(zip.read(k))
elif 'libws' in f[0]:
file(os.path.join('libws',f[1]),'wb').write(zip.read(k))
elif 'lib' in f[0]:
file(os.path.join('lib',f[1]),'wb').write(zip.read(k))
else:
file(f[-1],'wb').write(zip.read(k))
# patch to modify paths in init.js
# init.js is no more, replaced by jsolait.js, so fix those paths
linesep = os.linesep
mfile = 'jsolait.js'
print "patching %s" % mfile
t = file(mfile,'U')
lines = t.readlines()
t.close()
t = file(mfile,'w')
moda = False
modb = False
lineadded = False
for k in lines:
d = k.rstrip()
#set the installPath to the resource directory name
if d.find('mod.baseURI="./jsolait"') >= 0:
s = 'mod.baseURI = "/++resource++jsolait";'
#this one works around some javascript clients not using js if it's already in cache
#OK, we load the js each time it's used, but it still works if you refresh the page
#in konqueror or safari.
#elif d.find('xmlhttp.open("GET", url, false);') >= 0:
# s = 'var d = new Date();\nxmlhttp.open("GET", url +"?m="+d.getTime(), false);'
#elif d.find('xmlhttp.open("GET", uri, false);') >= 0:
# s = 'var d = new Date();\nxmlhttp.open("GET", uri +"?m="+d.getTime(), false);'
#elif d.find('xmlhttp.open("GET",uri,false);') >= 0:
# s = 'var d=new Date();%sxmlhttp.open("GET",uri +"?m="+d.getTime(),false);' % linesep
elif d.find('baseURI)s') > 0:
s = d.replace('/','',1)
if lineadded == False:
#add a line for pythonkw
s = s + linesep + 'pythonkw:"%(baseURI)slib/pythonkw.js",'
lineadded = True
elif d.find('if(xmlhttp.status==200') == 0:
s = 'if(xmlhttp.status==200||xmlhttp.status==0||xmlhttp.status==null){'
else:
s = d
t.write('%s%s' % (s,linesep))
t.write('importModule=imprt'+linesep)
t.close()
#for compat with prev version, copy jsolait.js to init.js
t = file(mfile,'r')
z = t.readlines()
t.close()
f = file('init.js','w')
for k in z:
f.write(k)
f.close()
#patch text/plain and text/xml to text/x-json
mfile = 'jsonrpc.js'
os.chdir('lib')
print "patching %s" % mfile
t = file(mfile,'U')
lines = t.readlines()
t.close()
t = file(mfile,'w')
for k in lines:
d = k.rstrip()
if d.find('text/plain') >= 0:
s = d.replace('text/plain','application/json-rpc')
elif d.find('text/xml') >= 0:
s = d.replace('text/xml','application/json-rpc')
elif d.find('ImportFailed(') > 0 and not d.endswith(';'):
s = d + ';'
else:
s = d
t.write('%s%s' % (s,linesep))
t.close()
os.chdir('..')
print "done."
print "original zip file is %s" % zippedfile | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/install_jsolait.py | install_jsolait.py |
Class=function(name,bases,classScope){
var args=[];
for(var i=0;i<arguments.length;i++){
args[i]=arguments[i];
}
classScope=args.pop();
if((args.length>0)&&(typeof args[0]=='string')){
name=args.shift();
}else{
name="anonymous";
}
var bases=args;
var __class__={__isArray__:false,
__name__:name,
__bases__:bases,
__id__:Class.__idcount__++,
__hash__:function(){
return this.__id__;
},
__str__:function(){
return "[class %s]".format(this.__name__);
}
};
var baseProtos=[];var proto;if(bases.length==0){proto={};
proto.__str__=function(){
return "[%s %s]".format(this.__class__.prototype.__call__===undefined?'object':'callable',this.__class__.__name__);
};
proto.toString=proto.__str__;
__class__.__bases__=[Object];
}else{var baseProto;
for(var i=0;i<bases.length;i++){
var baseClass=bases[i];
baseProtos.push(baseClass.prototype);
if(baseClass.__createProto__!==undefined){
baseProto=baseClass.__createProto__(bases);
}else{
baseProto=new baseClass(Class);
}
__class__.__isArray__=__class__.__isArray__||baseClass.__isArray__;
if(i==0){proto=baseProto;
}else{for(var key in baseProto){
if(proto[key]===undefined){
proto[key]=baseProto[key];
}
}
}
for(var key in baseClass){
if((key!='prototype')&&(__class__[key]===undefined)){
__class__[key]=baseClass[key];
}
}
}
proto.toString=proto.__str__;
}
if(proto.__hash__===undefined){
proto.__hash__=function(){
if(this.__id__===undefined){
this.__id__=Class.__idcount__++;
}
return this.__id__;
};
}
proto.__class__=__class__;
var privId='__priv__'+__class__.__id__;
if(classScope.length-1>baseProtos.length){
classScope.apply(this,[proto,privId].concat(baseProtos));
}else{
classScope.apply(this,[proto].concat(baseProtos));
}
if(proto.__call__){
var NewClass=function(calledBy){
if(calledBy!==Class){
var rslt=function(){
return rslt.__call__.apply(rslt,arguments);
};
var privId='__priv__'+arguments.callee.__id__;
rslt[privId]={};
var proto=arguments.callee.prototype;
for(var n in proto){
rslt[n]=proto[n];
}
rslt.constructor=arguments.callee;
rslt.toString=proto.__str__;
if(rslt.__init__){
rslt.__init__.apply(rslt,arguments);
}
return rslt;
}
};
}else if(__class__.__isArray__){
var NewClass=function(calledBy){
if(calledBy!==Class){
rslt=[];
var privId='__priv__'+arguments.callee.__id__;
rslt[privId]={};
var proto=arguments.callee.prototype;
for(var n in proto){
rslt[n]=proto[n];
}
rslt.constructor=proto;
rslt.toString=proto.__str__;
if(rslt.__init__){
rslt.__init__.apply(rslt,arguments);
}else{if(arguments.lengt==1){
rslt.length=arguments[0];
}else{
for(var i=0;i<arguments.length;i++){
rslt.push(arguments[i]);
}
}
}
return rslt;
}
};
}else{
var NewClass=function(calledBy){
if(calledBy!==Class){
var privId='__priv__'+arguments.callee.__id__;
this[privId]={};
if(this.__init__){
this.__init__.apply(this,arguments);
}}
};
}
proto.constructor=NewClass;
proto.__class__=NewClass;NewClass.prototype=proto;
for(var key in __class__){
NewClass[key]=__class__[key];
}
NewClass.toString=__class__.__str__;
return NewClass;
};Class.__idcount__=0;
Class.toString=function(){
return "[object Class]";
};
Class.__createProto__=function(){throw "Can't use Class as a base class.";
};
Array.__isArray__=true;
Array.__str__=Array.toString=function(){return "[class Array]";};
Array.__createProto__=function(){var r=[];r.__str__=Array.prototype.toString;return r;};
Object.__str__=Object.toString=function(){return "[class Object]";};
Function.__createProto__=function(){throw "Cannot inherit from Function. implement the callabel interface instead using YourClass::__call__.";};
Module=function(name,version,moduleScope){
var newMod={};
newMod.name=name;
newMod.version=version;
newMod.__sourceURI__=Module.currentURI;
newMod.toString=function(){
return "[module '%s' version: %s]".format(this.name,this.version);
};
newMod.Exception=Class(Module.Exception,function(publ,supr){
publ.module=newMod;
});
try{moduleScope.call(newMod,newMod);
}catch(e){
throw new Module.ModuleScopeExecFailed(newMod,e);
}
for(var n in newMod){
var obj=newMod[n];
if(typeof obj=='function'){
obj.__name__=n;
}
}
jsolait.registerModule(newMod);
return newMod;
};
Module.toString=function(){
return "[object Module]";
};
Module.__createProto__=function(){throw "Can't use Module as a base class.";
};
Module.Exception=Class("Exception",function(publ){
publ.__init__=function(msg,trace){
this.name=this.constructor.__name__;
this.message=''+msg;
this.trace=trace;
};
publ.__str__=function(){
var s="%s %s".format(this.name,this.module);
return s;
};
publ.toTraceString=function(indent){
indent=indent==null?0:indent;
var s="%s in %s:\n%s".format(this.name,this.module,this.message.indent(4)).indent(indent);
if(this.trace){
if(this.trace.toTraceString){
s+=('\n\nbecause:\n'+this.trace.toTraceString(indent+4));
}else{
s+=(this.trace+'\n').indent(indent+4);
}
}
return s;
};
publ.name;publ.message;
publ.module="jsolait";
publ.trace;});
Module.ModuleScopeExecFailed=Class("ModuleScopeExecFailed",Module.Exception,function(publ,supr){
publ.__init__=function(module,trace){
supr.__init__.call(this,"Failed to run the module scope for %s".format(module),trace);
this.failedModule=module;
};
publ.module;
});
Module("jsolait","$Revision: 80 $",function(mod){
jsolait=mod;
mod.modules={};
mod.knownModuleURIs={codecs:"%(baseURI)slib/codecs.js",
pythonkw:"%(baseURI)slib/pythonkw.js",
crypto:"%(baseURI)slib/crypto.js",
dom:"%(baseURI)slib/dom.js",
forms:"%(baseURI)slib/forms.js",
iter:"%(baseURI)slib/iter.js",
jsonrpc:"%(baseURI)slib/jsonrpc.js",
lang:"%(baseURI)slib/lang.js",
sets:"%(baseURI)slib/sets.js",
testing:"%(baseURI)slib/testing.js",
urllib:"%(baseURI)slib/urllib.js",
xml:"%(baseURI)slib/xml.js",
xmlrpc:"%(baseURI)slib/xmlrpc.js"};
mod.moduleSearchURIs=[".","%(baseURI)slib"];
mod.baseURI = "/++resource++jsolait";
var getHTTP=function(){
var obj;
try{obj=new XMLHttpRequest();
}catch(e){
try{obj=new ActiveXObject("Msxml2.XMLHTTP.4.0");
}catch(e){
try{obj=new ActiveXObject("Msxml2.XMLHTTP");
}catch(e){
try{obj=new ActiveXObject("microsoft.XMLHTTP");}catch(e){
throw new mod.Exception("Unable to get an HTTP request object.");
}
}}
}
return obj;
};
mod.loadURI=function(uri,headers){
headers=(headers!==undefined)?headers:[];
try{
var xmlhttp=getHTTP();
xmlhttp.open("GET",uri,false);
for(var i=0;i<headers.length;i++){
xmlhttp.setRequestHeader(headers[i][0],headers[i][1]);}
xmlhttp.send("");
}catch(e){
throw new mod.LoadURIFailed(uri,e);
}
if(xmlhttp.status==200||xmlhttp.status==0||xmlhttp.status==null){
var s=new String(xmlhttp.responseText);
s.__sourceURI__=uri;
return s;
}else{
throw new mod.LoadURIFailed(uri,new mod.Exception("Server did not respond with 200"));
}
};
mod.LoadURIFailed=Class(mod.Exception,function(publ,supr){
publ.__init__=function(sourceURI,trace){
supr.__init__.call(this,"Failed to load file: '%s'".format(sourceURI.indent(2)),trace);
this.sourceURI=sourceURI;
};
publ.sourceURI;
});
mod.__imprt__=function(name){
if(mod.modules[name]){return mod.modules[name];
}else{
var src,modPath;
if(mod.knownModuleURIs[name]!=undefined){
modPath=mod.knownModuleURIs[name].format(mod);
try{src=mod.loadURI(modPath);
}catch(e){
throw new mod.ImportFailed(name,[modPath],e);
}
}
if(src==null){var failedURIs=[];
for(var i=0;i<mod.moduleSearchURIs.length;i++){
modPath="%s/%s.js".format(mod.moduleSearchURIs[i].format(mod),name.split(".").join("/"));
try{
src=mod.loadURI(modPath);
break;
}catch(e){
failedURIs.push(e.sourceURI);
}
}
if(src==null){
throw new mod.ImportFailed(name,failedURIs);
}
}
try{var srcURI=src.__sourceURI__;
src='Module.currentURI="%s";\n%s\nModule.currentURI=null;\n'.format(src.__sourceURI__.replace(/\\/g,'\\\\'),src);
var f=new Function("",src);f();
}catch(e){
throw new mod.ImportFailed(name,[srcURI],e);
}
return mod.modules[name];}
};
mod.ImportFailed=Class(mod.Exception,function(publ,supr){
publ.__init__=function(moduleName,moduleURIs,trace){
supr.__init__.call(this,"Failed to import module: '%s' from:\n%s".format(moduleName,moduleURIs.join(',\n').indent(2)),trace);
this.moduleName=moduleName;
this.moduleURIs=moduleURIs;
};
publ.moduleName;
publ.moduleURIs;
});
imprt=function(name){
return mod.__imprt__(name);
};
mod.__registerModule__=function(modObj,modName){
if(modName!='jsolait'){
return mod.modules[modName]=modObj;
}
};
mod.registerModule=function(modObj,modName){
modName=modName===undefined?modObj.name:modName;
return mod.__registerModule__(modObj,modName);
};
var FormatSpecifier=function(s){
var s=s.match(/%(\(\w+\)){0,1}([ 0-]){0,1}(\+){0,1}(\d+){0,1}(\.\d+){0,1}(.)/);
if(s[1]){
this.key=s[1].slice(1,-1);
}else{
this.key=null;
}
this.paddingFlag=s[2];
if(this.paddingFlag==""){
this.paddingFlag=" ";}
this.signed=(s[3]=="+");
this.minLength=parseInt(s[4]);
if(isNaN(this.minLength)){
this.minLength=0;
}
if(s[5]){
this.percision=parseInt(s[5].slice(1,s[5].length));
}else{
this.percision=-1;
}
this.type=s[6];
};
String.prototype.format=function(){
var sf=this.match(/(%(\(\w+\)){0,1}[ 0-]{0,1}(\+){0,1}(\d+){0,1}(\.\d+){0,1}[dibouxXeEfFgGcrs%])|([^%]+)/g);
if(sf){
if(sf.join("")!=this){
throw new mod.Exception("Unsupported formating string.");
}
}else{
throw new mod.Exception("Unsupported formating string.");
}
var rslt="";
var s;
var obj;
var cnt=0;
var frmt;
var sign="";
for(var i=0;i<sf.length;i++){
s=sf[i];
if(s=="%%"){
s="%";
}else if(s=="%s"){if(cnt>=arguments.length){
throw new mod.Exception("Not enough arguments for format string.");
}else{
obj=arguments[cnt];
cnt++;
}
if(obj===null){
obj="null";
}else if(obj===undefined){
obj="undefined";
}
s=obj.toString();
}else if(s.slice(0,1)=="%"){
frmt=new FormatSpecifier(s);if(frmt.key){if((typeof arguments[0])=="object"&&arguments.length==1){
obj=arguments[0][frmt.key];
}else{
throw new mod.Exception("Object or associative array expected as formating value.");
}
}else{if(cnt>=arguments.length){
throw new mod.Exception("Not enough arguments for format string.");
}else{
obj=arguments[cnt];
cnt++;
}
}
if(frmt.type=="s"){if(obj===null){
obj="null";
}else if(obj===undefined){
obj="undefined";
}
s=obj.toString().pad(frmt.paddingFlag,frmt.minLength);
}else if(frmt.type=="c"){if(frmt.paddingFlag=="0"){
frmt.paddingFlag=" ";}
if(typeof obj=="number"){s=String.fromCharCode(obj).pad(frmt.paddingFlag,frmt.minLength);
}else if(typeof obj=="string"){
if(obj.length==1){s=obj.pad(frmt.paddingFlag,frmt.minLength);
}else{
throw new mod.Exception("Character of length 1 required.");
}
}else{
throw new mod.Exception("Character or Byte required.");
}
}else if(typeof obj=="number"){
if(obj<0){
obj=-obj;
sign="-";}else if(frmt.signed){
sign="+";}else{
sign="";
}
switch(frmt.type){
case "f":case "F":
if(frmt.percision>-1){
s=obj.toFixed(frmt.percision).toString();
}else{
s=obj.toString();
}
break;
case "E":case "e":
if(frmt.percision>-1){
s=obj.toExponential(frmt.percision);
}else{
s=obj.toExponential();
}
s=s.replace("e",frmt.type);
break;
case "b":s=obj.toString(2);
s=s.pad("0",frmt.percision);
break;
case "o":s=obj.toString(8);
s=s.pad("0",frmt.percision);
break;
case "x":s=obj.toString(16).toLowerCase();
s=s.pad("0",frmt.percision);
break;
case "X":s=obj.toString(16).toUpperCase();
s=s.pad("0",frmt.percision);
break;
default:s=parseInt(obj).toString();
s=s.pad("0",frmt.percision);
break;
}
if(frmt.paddingFlag=="0"){s=s.pad("0",frmt.minLength-sign.length);
}
s=sign+s;s=s.pad(frmt.paddingFlag,frmt.minLength);}else{
throw new mod.Exception("Number required.");
}
}
rslt+=s;
}
return rslt;
};
String.prototype.pad=function(flag,len){
var s="";
if(flag=="-"){
var c=" ";
}else{
var c=flag;
}
for(var i=0;i<len-this.length;i++){
s+=c;
}
if(flag=="-"){
s=this+s;
}else{
s+=this;
}
return s;
};
String.prototype.indent=function(indent){
var out=[];
var s=this.split('\n');
for(var i=0;i<s.length;i++){
out.push(' '.mul(indent)+s[i]);
}
return out.join('\n');
};
String.prototype.mul=function(l){
var a=new Array(l+1);
return a.join(this);
};
mod.test=function(){
};
});
importModule=imprt | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/init.js | init.js |
==========================
jsolait javascript library
==========================
zif.jsonserver is now bundled with jsolait javascript library.
Documentation for the library is avaialable at:
http://jsolait.net/wiki/documentation
jsolait will be accessible as a resource::
'/@@/jsolait/...'
Historical manual installation notes
------------------------------------
The following manual installation notes are have been left for reference. You
NO LONGER need to manually install jsolait for the zif.jsonserver package::
* obtain the library
* extract it
* fix the paths in jsolait.js so that all .js files are accessible from
the /@@/jsolait resourceDirectory, i.e., this folder
* fix the submodule GET so that things still work after page refresh on
konqueror and safari (just add a ?s="[timestamp]" so the file gets
refetched instead of mouldering in cache)
* fix content-types in jsonrpc.js from text/plain or text/xml to text/x-json
Executing the following script in the jsolait folder with do this for you.
python install_jsolait.py
Note, the script uses urllib and writes files in this directory, so expect
it to fail if you do not have write privileges or an accessible internet
connection.
| zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/README.txt | README.txt |
Module("sets","$Revision: 80 $",function(mod){
mod.ItemNotFoundInSet=Class(mod.Exception,function(publ,supr){
publ.set;
publ.item;
publ.__init__=function(set,item){
this.set=set;
this.item=item;
};
});
mod.Set=Class(function(publ,supr){
publ.__init__=function(elem){
this.items={};
var elems=[];
if(arguments.length>1){
elems=arguments;
}else if(arguments.length==1){
elems=arguments[0];
if(elems instanceof Array){
}else if(typeof elems=="string"){
elems=elems.split("");
}else if(elems.__iter__){
var i=iterable.__iter__();
var item;
while(item=i.next()!==undefined){
this.add(item);
}
return ;
}else{
throw new mod.Exception("Array,String or iterable object expected but found %s".format(elems));
}
}
for(var i=0;i<elems.length;i++){
this.add(elems[i]);
}
};
publ.add=function(item){
var h;
if(item.__hash__){
h='@'+item.__hash__();
}else{
h='#'+item;
}
this.items[h]=item;
return item;
};
publ.remove=function(item){
var h;
if(item.__hash__){
h='@'+item.__hash__();
}else{
h='#'+item;
}
if(this.items[h]===undefined){
throw new mod.ItemNotFoundInSet(this,item);
}else{
item=this.items[h];
delete this.items[h];return item;
}
};
publ.discard=function(item){
var h;
if(item.__hash__){
h='@'+item.__hash__();
}else{
h='#'+item;
}
item=this.items[h];
delete this.items[h];return item;
};
publ.contains=function(item){
var h;
if(item.__hash__){
h='@'+item.__hash__();
}else{
h='#'+item;
}
return(this.items[h]!==undefined);
};
publ.isSubSet=function(setObj){
for(var n in this.items){
if(setObj.contains(this.items[n])==false){
return false;
}
}
return true;
};
publ.isSuperSet=function(setObj){
return setObj.isSubSet(this);
};
publ.equals=function(setObj){
return(this.isSubSet(setObj)&&setObj.isSubSet(this));
};
publ.__equals__=function(setObj){
if(setObj instanceof publ.constructor){
return this.equals(setObj);
}else{
return false;
}
};
publ.union=function(setObj){
var ns=this.copy();
ns.unionUpdate(setObj);
return ns;
};
publ.intersection=function(setObj){
var ns=new mod.Set();
for(var n in this.items){
var item=this.items[n];
if(setObj.contains(item)){
ns.add(item);
}
}
return ns;
};
publ.difference=function(setObj){
var ns=new mod.Set();
for(var n in this.items){
var item=this.items[n];
if(setObj.contains(item)==false){
ns.add(item);
}
}
return ns;
};
publ.symmDifference=function(setObj){
var ns=this.difference(setObj);
return ns.unionUpdate(setObj.difference(this));
};
publ.unionUpdate=function(setObj){
for(var n in setObj.items){
this.add(setObj.items[n]);
}
return this;
};
publ.intersectionUpdate=function(setObj){
for(var n in this.items){
var item=this.items[n];
if(setObj.contains(item)==false){
this.remove(item);
}
}
return this;
};
publ.differenceUpdate=function(setObj){
for(var n in this.items){
var item=this.items[n];
if(setObj.contains(item)){
this.remove(item);
}
}
return this;
};
publ.symmDifferenceUpdate=function(setObj){
var union=setObj.difference(this);
this.differenceUpdate(setObj);
return this.unionUpdate(union);
};
publ.copy=function(){
var ns=new mod.Set();
return ns.unionUpdate(this);
};
publ.clear=function(){
this.items={};
};
publ.toArray=function(){
var a=[];
for(var n in this.items){
a.push(this.items[n]);
}
return a;
};
publ.toString=function(){
var items=[];
for(var n in this.items){
items.push(this.items[n]);
}
return "{"+items.join(",")+"}";
};
});
mod.__main__=function(){
var s1=new mod.Set("0123456");
var s2=new mod.Set("3456789");
var testing=imprt('testing');
print(testing.test(function(){
testing.assertEquals("checking %s | %s".format(s1,s2),new mod.Set("0123456789"),s1.union(s2));
testing.assertEquals("checking %s | %s".format(s2,s1),
new mod.Set("0123456789"),s2.union(s1));
testing.assertEquals("checking %s & %s".format(s1,s2),
new mod.Set("3456"),s1.intersection(s2));
testing.assertEquals("checking %s & %s".format(s2,s1),
new mod.Set("3456"),s2.intersection(s1));
testing.assertEquals("checking %s - %s".format(s1,s2),
new mod.Set("012"),s1.difference(s2));
testing.assertEquals("checking %s - %s".format(s2,s1),
new mod.Set("789"),s2.difference(s1));
testing.assertEquals("checking %s ^ %s".format(s1,s2),
new mod.Set("012789"),s1.symmDifference(s2));
testing.assertEquals("checking %s ^ %s".format(s2,s1),
new mod.Set("012789"),s2.symmDifference(s1));
}));
};
}); | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/lib/sets.js | sets.js |
Module("xmlrpc","$Revision: 80 $",function(mod){
var xmlext=imprt("xml");
var urllib=imprt("urllib");
mod.InvalidServerResponse=Class(mod.Exception,function(publ,supr){
publ.__init__=function(status){
supr.__init__.call(this,"The server did not respond with a status 200 (OK) but with: "+status);
this.status=status;
};
publ.status;
});
mod.MalformedXmlRpc=Class(mod.Exception,function(publ,supr){
publ.__init__=function(msg,xml,trace){
supr.__init__.call(this,msg,trace);
this.xml=xml;
};
publ.xml;
});
mod.Fault=Class(mod.Exception,function(publ,supr){
publ.__init__=function(faultCode,faultString){
supr.__init__.call(this,"XML-RPC Fault: "+faultCode+"\n\n"+faultString);
this.faultCode=faultCode;
this.faultString=faultString;
};
publ.faultCode;
publ.faultString;
});
mod.marshall=function(obj){
if(obj.toXmlRpc!=null){
return obj.toXmlRpc();
}else{
var s="<struct>";
for(var attr in obj){
if(typeof obj[attr]!="function"){
s+="<member><name>"+attr+"</name><value>"+mod.marshall(obj[attr])+"</value></member>";
}
}
s+="</struct>";
return s;
}
};
mod.unmarshall=function(xml){
try{var doc=xmlext.parseXML(xml);
}catch(e){
throw new mod.MalformedXmlRpc("The server's response could not be parsed.",xml,e);
}
var rslt=mod.unmarshallDoc(doc,xml);
doc=null;
return rslt;
};
mod.unmarshallDoc=function(doc,xml){
try{
var node=doc.documentElement;
if(node==null){throw new mod.MalformedXmlRpc("No documentElement found.",xml);
}
switch(node.tagName){
case "methodResponse":
return parseMethodResponse(node);
case "methodCall":
return parseMethodCall(node);
default:throw new mod.MalformedXmlRpc("'methodCall' or 'methodResponse' element expected.\nFound: '"+node.tagName+"'",xml);
}
}catch(e){
if(e instanceof mod.Fault){throw e;
}else{
throw new mod.MalformedXmlRpc("Unmarshalling of XML failed.",xml,e);}
}
};
var parseMethodResponse=function(node){
try{
for(var i=0;i<node.childNodes.length;i++){
var child=node.childNodes.item(i);
if(child.nodeType==1){
switch(child.tagName){
case "fault":throw parseFault(child);
case "params":
var params=parseParams(child);
if(params.length==1){return params[0];
}else{
throw new mod.MalformedXmlRpc("'params' element inside 'methodResponse' must have exactly ONE 'param' child element.\nFound: "+params.length);
}
default:
throw new mod.MalformedXmlRpc("'fault' or 'params' element expected.\nFound: '"+child.tagName+"'");}
}
}
throw new mod.MalformedXmlRpc("No child elements found.");}catch(e){
if(e instanceof mod.Fault){
throw e;
}else{
throw new mod.MalformedXmlRpc("'methodResponse' element could not be parsed.",null,e);}
}
};
var parseMethodCall=function(node){
try{
var methodName=null;
var params=new Array();for(var i=0;i<node.childNodes.length;i++){
var child=node.childNodes.item(i);
if(child.nodeType==1){
switch(child.tagName){
case "methodName":
methodName=new String(child.firstChild.nodeValue);
break;
case "params":
params=parseParams(child);
break;
default:
throw new mod.MalformedXmlRpc("'methodName' or 'params' element expected.\nFound: '"+child.tagName+"'");}
}
}
if(methodName==null){
throw new mod.MalformedXmlRpc("'methodName' element expected.");
}else{
return new Array(methodName,params);
}
}catch(e){
throw new mod.MalformedXmlRpc("'methodCall' element could not be parsed.",null,e);}
};
var parseParams=function(node){
try{
var params=new Array();
for(var i=0;i<node.childNodes.length;i++){
var child=node.childNodes.item(i);
if(child.nodeType==1){
switch(child.tagName){
case "param":
params.push(parseParam(child));
break;
default:
throw new mod.MalformedXmlRpc("'param' element expected.\nFound: '"+child.tagName+"'");}
}
}
return params;
}catch(e){
throw new mod.MalformedXmlRpc("'params' element could not be parsed.",null,e);}
};
var parseParam=function(node){
try{
for(var i=0;i<node.childNodes.length;i++){
var child=node.childNodes.item(i);
if(child.nodeType==1){
switch(child.tagName){
case "value":
return parseValue(child);
default:
throw new mod.MalformedXmlRpc("'value' element expected.\nFound: '"+child.tagName+"'");}
}
}
throw new mod.MalformedXmlRpc("'value' element expected.But none found.");
}catch(e){
throw new mod.MalformedXmlRpc("'param' element could not be parsed.",null,e);}
};
var parseValue=function(node){
try{
for(var i=0;i<node.childNodes.length;i++){
var child=node.childNodes.item(i);
if(child.nodeType==1){
switch(child.tagName){
case "string":
var s="";
for(var j=0;j<child.childNodes.length;j++){
s+=new String(child.childNodes.item(j).nodeValue);
}
return s;
case "int":
case "i4":
case "double":
return(child.firstChild)?Number(child.firstChild.nodeValue):0;
case "boolean":
return Boolean(isNaN(parseInt(child.firstChild.nodeValue))?(child.firstChild.nodeValue=="true"):parseInt(child.firstChild.nodeValue));
case "base64":
return parseBase64(child);
case "dateTime.iso8601":
return parseDateTime(child);
case "array":
return parseArray(child);
case "struct":
return parseStruct(child);
case "nil":return null;
default:
throw new mod.MalformedXmlRpc("'string','int','i4','double','boolean','base64','dateTime.iso8601','array' or 'struct' element expected.\nFound: '"+child.tagName+"'");}
}
}
if(node.firstChild){
var s="";
for(var j=0;j<node.childNodes.length;j++){
s+=new String(node.childNodes.item(j).nodeValue);
}
return s;
}else{
return "";
}
}catch(e){
throw new mod.MalformedXmlRpc("'value' element could not be parsed.",null,e);}
};
var parseBase64=function(node){
try{
var s=node.firstChild.nodeValue;
return s.decode("base64");
}catch(e){
throw new mod.MalformedXmlRpc("'base64' element could not be parsed.",null,e);}
};
var parseDateTime=function(node){
try{
if(/^(\d{4})-?(\d{2})-?(\d{2})T(\d{2}):?(\d{2}):?(\d{2})/.test(node.firstChild.nodeValue)){
return new Date(Date.UTC(RegExp.$1,RegExp.$2-1,RegExp.$3,RegExp.$4,RegExp.$5,RegExp.$6));
}else{throw new mod.MalformedXmlRpc("Could not convert the given date.");
}
}catch(e){
throw new mod.MalformedXmlRpc("'dateTime.iso8601' element could not be parsed.",null,e);}
};
var parseArray=function(node){
try{
for(var i=0;i<node.childNodes.length;i++){
var child=node.childNodes.item(i);
if(child.nodeType==1){
switch(child.tagName){
case "data":
return parseData(child);
default:
throw new mod.MalformedXmlRpc("'data' element expected.\nFound: '"+child.tagName+"'");}
}
}
throw new mod.MalformedXmlRpc("'data' element expected. But not found.");}catch(e){
throw new mod.MalformedXmlRpc("'array' element could not be parsed.",null,e);}
};
var parseData=function(node){
try{
var rslt=new Array();
for(var i=0;i<node.childNodes.length;i++){
var child=node.childNodes.item(i);
if(child.nodeType==1){
switch(child.tagName){
case "value":
rslt.push(parseValue(child));
break;
default:
throw new mod.MalformedXmlRpc("'value' element expected.\nFound: '"+child.tagName+"'");}
}}
return rslt;
}catch(e){
throw new mod.MalformedXmlRpc("'data' element could not be parsed.",null,e);}
};
var parseStruct=function(node){
try{
var struct=new Object();
for(var i=0;i<node.childNodes.length;i++){
var child=node.childNodes.item(i);
if(child.nodeType==1){
switch(child.tagName){
case "member":
var member=parseMember(child);if(member[0]!=""){
struct[member[0]]=member[1];
}
break;
default:
throw new mod.MalformedXmlRpc("'data' element expected.\nFound: '"+child.tagName+"'");}
}
}
return struct;
}catch(e){
throw new mod.MalformedXmlRpc("'struct' element could not be parsed.",null,e);}
};
var parseMember=function(node){
try{
var name="";
var value=null;
for(var i=0;i<node.childNodes.length;i++){
var child=node.childNodes.item(i);
if(child.nodeType==1){
switch(child.tagName){
case "value":
value=parseValue(child);break;
case "name":
if(child.hasChildNodes()){
name=new String(child.firstChild.nodeValue);
}
break;
default:
throw new mod.MalformedXmlRpc("'value' or 'name' element expected.\nFound: '"+child.tagName+"'");}
}
}
return[name,value];
}catch(e){
throw new mod.MalformedXmlRpc("'member' element could not be parsed.",null,e);}
};
var parseFault=function(node){
try{
for(var i=0;i<node.childNodes.length;i++){
var child=node.childNodes.item(i);
if(child.nodeType==1){
switch(child.tagName){
case "value":
var flt=parseValue(child);return new mod.Fault(flt.faultCode,flt.faultString);
default:
throw new mod.MalformedXmlRpc("'value' element expected.\nFound: '"+child.tagName+"'");}
}
}
throw new mod.MalformedXmlRpc("'value' element expected. But not found.");}catch(e){
throw new mod.MalformedXmlRpc("'fault' element could not be parsed.",null,e);}
};
mod.XMLRPCMethod=Class(function(publ){
var postData=function(url,user,pass,data,callback){
if(callback==null){
var rslt=urllib.postURL(url,user,pass,data,[["Content-Type","text/xml"]]);
return rslt;
}else{
return urllib.postURL(url,user,pass,data,[["Content-Type","text/xml"]],callback);
}
};
var handleResponse=function(resp){
var status=null;
try{status=resp.status;
}catch(e){
}
if(status==200){
var respDoc=null;
try{
respDoc=resp.responseXML;
}catch(e){
}
var respTxt="";try{respTxt=resp.responseText;
}catch(e){
}
if(respDoc==null){
if(respTxt==null||respTxt==""){
throw new mod.MalformedXmlRpc("The server responded with an empty document.","");
}else{
return mod.unmarshall(respTxt);
}
}else{return mod.unmarshallDoc(respDoc,respTxt);
}
}else{
throw new mod.InvalidServerResponse(status);
}
};
var getXML=function(methodName,args){
var data='<?xml version="1.0"?><methodCall><methodName>'+methodName+'</methodName>';
if(args.length>0){
data+="<params>";
for(var i=0;i<args.length;i++){
data+='<param><value>'+mod.marshall(args[i])+'</value></param>';
}
data+='</params>';
}
data+='</methodCall>';
return data;
};
publ.__init__=function(url,methodName,user,pass){
this.methodName=methodName;
this.url=url;
this.user=user;
this.password=pass;
};
publ.__call__=function(){
if(!(arguments[arguments.length-1] instanceof Function)){
var data=getXML(this.methodName,arguments);
var resp=postData(this.url,this.user,this.password,data);
return handleResponse(resp);
}else{
var args=new Array();
for(var i=0;i<arguments.length;i++){
args.push(arguments[i]);
}
var cb=args.pop();
var data=getXML(this.methodName,args);
return postData(this.url,this.user,this.password,data,function(resp){
var rslt=null;
var exc=null;
try{
rslt=handleResponse(resp);
}catch(e){
exc=e;
}
try{cb(rslt,exc);
}catch(e){
}
args=null;
resp=null;
});
}
};
publ.toMulticall=function(){
var multiCallable=new Object();
multiCallable.methodName=this.methodName;
var params=[];
for(var i=0;i<arguments.length;i++){
params[i]=arguments[i];
}
multiCallable.params=params;
return multiCallable;
};
publ.setAuthentication=function(user,pass){
this.user=user;
this.password=pass;
};
publ.methodName;
publ.url;
publ.user;
publ.password;
});
mod.ServiceProxy=Class(function(publ){
publ.__init__=function(url,methodNames,user,pass){
if(methodNames instanceof Array){
if(methodNames.length>0){
var tryIntrospection=false;
}else{
var tryIntrospection=true;
}
}else{
pass=user;
user=methodNames;
methodNames=[];
var tryIntrospection=true;
}
this._url=url;
this._user=user;
this._password=pass;
this._addMethodNames(methodNames);
if(tryIntrospection){
try{this._introspect();
}catch(e){
}
}
};
publ._addMethodNames=function(methodNames){
for(var i=0;i<methodNames.length;i++){
var obj=this;
var names=methodNames[i].split(".");
for(var n=0;n<names.length-1;n++){
var name=names[n];
if(obj[name]){
obj=obj[name];
}else{
obj[name]=new Object();
obj=obj[name];
}
}
var name=names[names.length-1];
if(obj[name]){
}else{
var mth=new mod.XMLRPCMethod(this._url,methodNames[i],this._user,this._password);
obj[name]=mth;
this._methods.push(mth);
}
}
};
publ._setAuthentication=function(user,pass){
this._user=user;
this._password=pass;
for(var i=0;i<this._methods.length;i++){
this._methods[i].setAuthentication(user,pass);
}
};
publ._introspect=function(){
this._addMethodNames(["system.listMethods","system.methodHelp","system.methodSignature"]);
var m=this.system.listMethods();
this._addMethodNames(m);
};
publ._url;
publ._user;
publ._password;
publ._methods=new Array();
});
mod.ServerProxy=mod.ServiceProxy;
String.prototype.toXmlRpc=function(){
return "<string>"+this.replace(/&/g,"&").replace(/</g,"<")+"</string>";
};
Number.prototype.toXmlRpc=function(){
if(this==parseInt(this)){
return "<int>"+this+"</int>";
}else if(this==parseFloat(this)){
return "<double>"+this+"</double>";
}else{
return false.toXmlRpc();
}
};
Boolean.prototype.toXmlRpc=function(){
if(this==true){
return "<boolean>1</boolean>";
}else{
return "<boolean>0</boolean>";
}
};
Date.prototype.toXmlRpc=function(){
var padd=function(s,p){
s=p+s;
return s.substring(s.length-p.length);
};
var y=padd(this.getUTCFullYear(),"0000");
var m=padd(this.getUTCMonth()+1,"00");
var d=padd(this.getUTCDate(),"00");
var h=padd(this.getUTCHours(),"00");
var min=padd(this.getUTCMinutes(),"00");
var s=padd(this.getUTCSeconds(),"00");
var isodate=y+m+d+"T"+h+":"+min+":"+s;
return "<dateTime.iso8601>"+isodate+"</dateTime.iso8601>";
};
Array.prototype.toXmlRpc=function(){
var retstr="<array><data>";
for(var i=0;i<this.length;i++){
retstr+="<value>"+mod.marshall(this[i])+"</value>";
}
return retstr+"</data></array>";
};
mod.__main__=function(){
var s=new mod.ServiceProxy("http://jsolait.net/test.py",['echo']);
print("creating ServiceProxy object using introspection for method construction...\n");
print("%s created\n".format(s));
print("creating and marshalling test data:\n");
var o=[1.234,5,{a:"Hello & < ",b:new Date()}];
print(mod.marshall(o));
print("\ncalling echo() on remote service...\n");
var r=s.echo(o);
print("service returned data(marshalled again):\n");
print(mod.marshall(r));
};
}); | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/lib/xmlrpc.js | xmlrpc.js |
Module("forms","$Revision: 80 $",function(mod){
mod.Form=Class(function(publ,supr){
publ.elements=[];
publ.action="";
publ.method="GET";
publ.__init__=function(action,method){
this.elements=[];
this.action=(action==null)?"":action;
this.method=(method==null)?"GET":method;
};
publ.set=function(name,value){
var f=null;
for(var i=0;i<this.elements;i++){
if(name==this.elements[i].name){
f=this.elements[i];
f.value=value;
}
}
if(f==null){f=new mod.Element(name,value);
this.elements.push(f);
}
if(this[name]==null){
this[name]=f;
}
return f;
};
publ.encode=function(){
var data=[];
for(var i=0;i<this.elements.length;i++){
data.push(this.elements[i].encode());
}
return data.join("&");
};
publ.queryString=function(){
return this.action+"?"+this.encode();
};
publ.submit=function(){
if(this.method.toLowerCase()=="get"){
try{location.href=this.queryString();
}catch(e){
try{var s='location="'+this.queryString().replace(/(["\\])/g,'\\$1')+'"';
browserEval(encodeURI(s));}catch(e){
throw "Cannot set new location.";
}
}
}else{var frm=document.createElement("form");
frm.setAttribute("action",this.action);
frm.setAttribute("method",this.method);
document.getElementsByTagName("body")[0].appendChild(frm);
for(var i=0;i<this.elements.length;i++){
var elem=this.elements[i];
var inp=document.createElement("input");
inp.setAttribute("type","hidden");
inp.setAttribute("name",elem.name);
inp.setAttribute("value",elem.value);
frm.appendChild(inp);
}
frm.submit();
}
};
publ.submitNoReload=function(callback){
if(this.action&&this.method){
var urllib=imprt("urllib");
switch(this.method.toLowerCase()){
case "get":
return urllib.getURL(this.queryString(),[["Content-Type","application/x-www-form-urlencoded"]],callback);
break;
case "post":
return urllib.postURL(this.action,this.encode(),[["Content-Type","application/x-www-form-urlencoded"]],callback);
break;
default:
throw "Method can only be POST or GET but is: "+this.method;
}
}else{
throw "No action and/or method defined";
}
};
});
mod.Element=Class(function(publ,supr){
publ.name="";
publ.value="";
publ.__init__=function(name,value){
this.name=name;
this.value=value;
};
publ.encode=function(){
return encodeURIComponent(this.name)+"="+encodeURIComponent(this.value);
};
});mod.__main__=function(){
var fm=new mod.Form("http://localhost/echoform.py","get");
print("testing all sorts of chars, the should be encoded.");
fm.set("testchars","abcdefghijklmnopqrstuvwxyz1234567890 \n\t!@#$%^&*()_+-=[]{};'\\:\"|,./<>?");
print(fm.encode());
try{
print(fm.submitNoReload().responseText);
}catch(e){
print(e);
}
fm.method="post";
print(fm.submitNoReload().responseText);
};
}); | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/lib/forms.js | forms.js |
Module("jsonrpc","$Revision: 80 $",function(mod){
var urllib=imprt("urllib");
mod.InvalidServerResponse=Class(mod.Exception,function(publ,supr){
publ.__init__=function(status){
supr.__init__.call(this,"The server did not respond with a status 200 (OK) but with: "+status);
this.status=status;
};
publ.status;
});
mod.MalformedJSONRpc=Class(mod.Exception,function(publ,supr){
publ.__init__=function(msg,s,trace){
supr.__init__.call(this,msg,trace);
this.source=s;
};
publ.source;
});
mod.JSONRPCError=Class(mod.Exception,function(publ,supr){
publ.__init__=function(err,trace){
supr.__init__.call(this,err,trace);
};
});
mod.marshall=function(obj){
if(obj==null){
return "null";
}else if(obj.toJSON){
return obj.toJSON();
}else{
var v=[];
for(var attr in obj){
if(typeof obj[attr]!="function"){
v.push('"'+attr+'": '+mod.marshall(obj[attr]));
}
}
return "{"+v.join(", ")+"}";
}
};
mod.unmarshall=function(source){
try{
var obj;
eval("obj="+source);
return obj;
}catch(e){
throw new mod.MalformedJSONRpc("The server's response could not be parsed.",source,e);
}
};
mod.JSONRPCMethod=Class(function(publ){
var postData=function(url,user,pass,data,callback){
if(callback==null){var rslt=urllib.postURL(url,user,pass,data,[["Content-Type","application/json-rpc"]]);
return rslt;
}else{
return urllib.postURL(url,user,pass,data,[["Content-Type","application/json-rpc"]],callback);
}
};
var handleResponse=function(resp){
var status=null;
try{status=resp.status;
}catch(e){
}
if(status==200){
var respTxt="";try{respTxt=resp.responseText;
}catch(e){
}
if(respTxt==null||respTxt==""){
throw new mod.MalformedJSONRpc("The server responded with an empty document.","");
}else{
var rslt=mod.unmarshall(respTxt);
if(rslt.error!=null){
throw new mod.JSONRPCError(rslt.error);
}else{
return rslt.result;
}
}
}else{
throw new mod.InvalidServerResponse(status);
}
};
var jsonRequest=function(id,methodName,args){
var p=[mod.marshall(id),mod.marshall(methodName),mod.marshall(args)];
return '{"id":'+p[0]+', "method":'+p[1]+', "params":'+p[2]+"}";
};
publ.__init__=function(url,methodName,user,pass){
this.methodName=methodName;
this.url=url;
this.user=user;
this.password=pass;
};
publ.__call__=function(){
var args=new Array();
for(var i=0;i<arguments.length;i++){
args.push(arguments[i]);
}
if(!(arguments[arguments.length-1] instanceof Function)){
var data=jsonRequest("httpReq",this.methodName,args);
var resp=postData(this.url,this.user,this.password,data);
return handleResponse(resp);
}else{
var cb=args.pop();var data=jsonRequest("httpReq",this.methodName,args);
return postData(this.url,this.user,this.password,data,function(resp){
var rslt=null;
var exc=null;
try{
rslt=handleResponse(resp);
}catch(e){
exc=e;
}
try{cb(rslt,exc);
}catch(e){
}
args=null;
resp=null;
});
}
};
publ.setAuthentication=function(user,pass){
this.user=user;
this.password=pass;
};
publ.notify=function(){
var args=new Array();
for(var i=0;i<arguments.length;i++){
args.push(arguments[i]);
}
var data=jsonRequest(null,this.methodName,args);
postData(this.url,this.user,this.password,data,function(resp){});
};
publ.methodName;
publ.url;
publ.user;
publ.password;
});
mod.ServiceProxy=Class(function(publ){
publ.__init__=function(url,methodNames,user,pass){
this._url=url;
this._user=user;
this._password=pass;
this._addMethodNames(methodNames);
};
publ._addMethodNames=function(methodNames){
for(var i=0;i<methodNames.length;i++){
var obj=this;
var names=methodNames[i].split(".");
for(var n=0;n<names.length-1;n++){
var name=names[n];
if(obj[name]){
obj=obj[name];
}else{
obj[name]=new Object();
obj=obj[name];
}
}
var name=names[names.length-1];
if(obj[name]){
}else{
var mth=new mod.JSONRPCMethod(this._url,methodNames[i],this._user,this._password);
obj[name]=mth;
this._methods.push(mth);
}
}
};
publ._setAuthentication=function(user,pass){
this._user=user;
this._password=pass;
for(var i=0;i<this._methods.length;i++){
this._methods[i].setAuthentication(user,pass);
}
};
publ._url;
publ._user;
publ._password;
publ._methods=new Array();
});
mod.ServerProxy=mod.ServiceProxy;
String.prototype.toJSON=function(){
var s='"'+this.replace(/(["\\])/g,'\\$1')+'"';
s=s.replace(/(\n)/g,"\\n");
return s;
};
Number.prototype.toJSON=function(){
return this.toString();
};
Boolean.prototype.toJSON=function(){
return this.toString();
};
Date.prototype.toJSON=function(){
var padd=function(s,p){
s=p+s;
return s.substring(s.length-p.length);
};
var y=padd(this.getUTCFullYear(),"0000");
var m=padd(this.getUTCMonth()+1,"00");
var d=padd(this.getUTCDate(),"00");
var h=padd(this.getUTCHours(),"00");
var min=padd(this.getUTCMinutes(),"00");
var s=padd(this.getUTCSeconds(),"00");
var isodate=y+m+d+"T"+h+":"+min+":"+s;
return '{"jsonclass":["sys.ISODate", ["'+isodate+'"]]}';
};
Array.prototype.toJSON=function(){
var v=[];
for(var i=0;i<this.length;i++){
v.push(mod.marshall(this[i]));
}
return "["+v.join(", ")+"]";
};
mod.__main__=function(){
print("creating ServiceProxy object using introspection for method construction...\n");
var s=new mod.ServiceProxy("http://jsolait.net/testj.py",["echo"]);
print("%s created\n".format(s));
print("creating and marshalling test data:\n");
var o=[1.234,5,{a:"Hello ' \" World",b:new Date()}];
print(mod.marshall(o));
print("\ncalling echo() on remote service...\n");
var r=s.echo(o);
print("service returned data(marshalled again):\n");
print(mod.marshall(r));
};
}); | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/lib/jsonrpc.js | jsonrpc.js |
Module("lang","$Revision: 80 $",function(mod){
var sets=imprt('sets');
var extractQoutedText=function(s,startEndChar){
s=s.slice(startEndChar.length);
var rs=startEndChar;
var p=s.indexOf(startEndChar);
while(p>=0){if(s.charAt(p-1)=="\\"){
rs+=s.slice(0,p+1);s=s.slice(p+1);}else{
return rs+s.slice(0,p+1);
}
p=s.indexOf(startEndChar);
}
throw new mod.Exception(startEndChar+" expected.");
};
var extractSLComment=function(s){
var p=s.search(/\n/);
if(p>=0){
return s.slice(0,p+1);
}else{
return s;
}
};
var extractMLComment=function(s){
var p=s.search(/\*\//);
if(p>=0){
return s.slice(0,p+2);
}else{
throw new mod.Exception("End of comment expected.");
}
};
mod.Token=Class(function(publ,supr){
publ.__init__=function(value,pos,err){
this.value=value;
this.pos=pos;
this.err=err;
};
publ.toString=function(){
return "["+this.constructor.__name__+" "+this.value+"]";
};
});
mod.TokenWhiteSpace=Class(mod.Token,function(publ,supr){});
mod.TokenPunctuator=Class(mod.Token,function(publ,supr){});
mod.TokenNewLine=Class(mod.Token,function(publ,supr){});
mod.TokenNumber=Class(mod.Token,function(publ,supr){});
mod.TokenKeyword=Class(mod.Token,function(publ,supr){});
mod.TokenString=Class(mod.Token,function(publ,supr){});
mod.TokenRegExp=Class(mod.Token,function(publ,supr){});
mod.TokenIdentifier=Class(mod.Token,function(publ,supr){});
mod.TokenComment=Class(mod.Token,function(publ,supr){});
mod.TokenDocComment=Class(mod.TokenComment,function(publ,supr){});
var arithmaticOperators=new sets.Set(['/','+','-','*','%']);
var relationalOperators=new sets.Set(['<','>','<=','>=','instanceof']);
var equalityOperators=new sets.Set(['===','!==','==','!=']);var unaryPrefixOperators=new sets.Set(['!','++','--','-','~','typeof']);
var unaryPostfixOperators=new sets.Set(['++','--']);
var unaryOperators=unaryPrefixOperators;
var bitwiseShiftOperators=new sets.Set(['>>','<<','>>>']);
var binaryBitwiseOperaters=new sets.Set(['&','|','^']);
var binaryLogicalOperators=new sets.Set(['||','^^','&&']);
var conditionalOperators=new sets.Set(['?']);
var propertyOperators=new sets.Set(['.']);
var assignmentOperators=new sets.Set(['=','+=','-=','*=','%=','&=','|=','^=','/=','<<=','>>=','>>>=']);
var operators=(new sets.Set()).unionUpdate(
arithmaticOperators).unionUpdate(
relationalOperators).unionUpdate(
equalityOperators).unionUpdate(
unaryOperators).unionUpdate(
bitwiseShiftOperators).unionUpdate(
binaryBitwiseOperaters).unionUpdate(
binaryLogicalOperators).unionUpdate(
propertyOperators).unionUpdate(
conditionalOperators).unionUpdate(
assignmentOperators);
var punctuators=(new sets.Set(['{','}','(',')','[',']',';',',',':'])).unionUpdate(operators);
var valueKeywords=new sets.Set(['null','undefined','true','false','this']);var operatorKeywords=new sets.Set(['instanceof','typeof','new']);
var jsolaitStartStatementKeywords=new sets.Set(['Module','mod','publ']);
var startStatementKeywords=new sets.Set(['var','return','for','switch','while','continue','break','with','if','throw','delete','try','this','function']);
var subStatementKeywords=new sets.Set(['else','var','catch','case','default']);
var startStatementToken=startStatementKeywords.union(new sets.Set(['(']));
var keywords=(new sets.Set()).unionUpdate(
valueKeywords).unionUpdate(
operatorKeywords).unionUpdate(
startStatementKeywords).unionUpdate(
subStatementKeywords).unionUpdate(
startStatementToken);
var whiteSpace=/^[\s\t\f]+/;
var stringSQ=/^'((\\[^\x00-\x1f]|[^\x00-\x1f'\\])*)'/;
var stringDQ=/^"((\\[^\x00-\x1f]|[^\x00-\x1f"\\])*)"/;
var regExp=/^\/(\\[^\x00-\x1f]|\[(\\[^\x00-\x1f]|[^\x00-\x1f\\\/])*\]|[^\x00-\x1f\\\/\[])+\/[gim]*/;var identifiers=/^[a-zA-Z_$][\w_$]*\b/;
var intNumber=/^-?[1-9]\d*|0\b/;
var floatNumber=/^-?([1-9]\d*|0)\.\d+/;
var expNumber=/^-?([1-9]\d*|0)\.\d+e-?[1-9]\d*/;
var hexNumber=/^-?0x[0-9a-fA-F]+/;
mod.Tokenizer=Class(function(publ,supr){
publ.__init__=function(s){
this._working=s;
this.source=s;
};
publ.next=function(){
if(this._working==""){
return undefined;
}var s1=this._working.charAt(0);
var s2=s1+this._working.charAt(1);
var s3=s2+this._working.charAt(2);
var isWS=false;
if(s1==" "||s1=="\t"||s1=="\f"){
tkn=new mod.TokenWhiteSpace(whiteSpace.exec(this._working)[0]);
isWS=true;
}else if(s1=="\n"||s1=="\r"){
tkn=new mod.TokenNewLine(s1);
}else if(s1=='"'||s1=="'"){
if(tkn=(s1=="'"?stringSQ:stringDQ).exec(this._working)){
tkn=new mod.TokenString(tkn[0]);
}else{
throw "String expected";
}
}else if(s3=="///"){
tkn=new mod.TokenDocComment(extractSLComment(this._working),this.source.length-this._working.length);
}else if(s3=="/**"){
tkn=new mod.TokenDocComment(extractMLComment(this._working),this.source.length-this._working.length);
}else if(s2=="//"){
tkn=new mod.TokenComment(extractSLComment(this._working),this.source.length-this._working.length);
}else if(s2=="/*"){
tkn=new mod.TokenComment(extractMLComment(this._working),this.source.length-this._working.length);}else if(punctuators.contains(s3)){
tkn=new mod.TokenPunctuator(s3);
}else if(punctuators.contains(s2)){
tkn=new mod.TokenPunctuator(s2);
}else if(punctuators.contains(s1)){
if(s1=="/"&&(",(=+[{".indexOf(this._lastNonWSTkn.value)>-1)){
if(tkn=regExp.exec(this._working)){
tkn=new mod.TokenRegExp(tkn[0]);
}else{
tkn=new mod.TokenPunctuator(s1);}
}else{
tkn=new mod.TokenPunctuator(s1);}
}else if(tkn=identifiers.exec(this._working)){
tkn=tkn[0];
if(keywords.contains(tkn)){
tkn=new mod.TokenKeyword(tkn);
}else{
tkn=new mod.TokenIdentifier(tkn);
}
}else if(tkn=hexNumber.exec(this._working)){
tkn=new mod.TokenNumber(tkn[0],this.source.length-this._working.length);
}else if(tkn=expNumber.exec(this._working)){
tkn=new mod.TokenNumber(tkn[0],this.source.length-this._working.length);
}else if(tkn=floatNumber.exec(this._working)){
tkn=new mod.TokenNumber(tkn[0],this.source.length-this._working.length);
}else if(tkn=intNumber.exec(this._working)){
tkn=new mod.TokenNumber(tkn[0],this.source.length-this._working.length);
}else{
throw "Unrecognized token at char %s, near:\n%s".format(this.source.length-this._working.length,this._working.slice(0,50));}
if(!isWS){
this._lastNonWSTkn=tkn;
}
this._working=this._working.slice(tkn.value.length);
return tkn;};
publ.nextNonWhiteSpace=function(newLineIsWS){
while(tkn=this.next()){
if(!(tkn instanceof mod.TokenWhiteSpace)){
if(!(newLineIsWS&&(tkn instanceof mod.TokenNewLine))){
break;
}
}
}
return tkn;
};
publ.__iter__=function(){
return new mod.Tokenizer(this.source);
};
publ.getPosition=function(){
var a=this.source.split("\n");
var p=this.source.length-this._working.length;
for(var i=0;i<a.length;i++){
p=p-(a[i].length+1);
if(p<=0){
return[i+1,a[i].length+p];
}
}
};
});
var LookAhead=1;
mod.Script=Class(function(publ,supr){
publ.__init__=function(source){
this.publics=[];
this.modules=[];
this.tokens=new mod.Tokenizer(source);
};
publ.parse=function(){
var tkn=this.tokens.next(IgonreWSAndNewLine,LookAhead);
while(tkn!==undefined){
if(tkn instanceof mod.TokenDocComment){
this.parseDocComment();
}else if(this['parseStatement_'+tkn.value]){
this['parseStatement_'+tkn.value].call(this);
}else if(tkn instanceof mod.TokenIdentifier){
this.parseStatement_callOrAssignment();
}else{
throw "Beginning of a statement expected but found %s".format(tkn);
}
}
};
publ.parseStatement_callOrAssignment=function(){
};
publ.parseStatement_Module=function(){
var tkn=this.lookAheadNonWhitespace();
if(tkn.value="("){
var m=this.appendChild(new mod.ModuleNode(this));
this.publics.push(m);
m.parse();
}else{
throw "Module not allowed here";
}
};
});
mod.CodeNode=Class(function(publ,supr){
publ.__init__=function(){
this.childNodes=[];
this.dependencies=[];
};
publ.appendChild=function(child){
this.childNodes.push(child);
child.parentNode=this;
return child;
};
publ.dependencies=[];
publ.parentNode;
publ.childNodes=[];
});
mod.PropertyNode=Class(mod.CodeNode,function(publ,supr){
publ.name='';
publ.value=null;
});
mod.ScopeBase=Class(mod.CodeNode,function(publ,supr){
publ.__init__=function(parentScope){
this.childNodes=[];
this.publics=[];
this.parameters=[];
this.dependencies=[];
};
publ.addPublic=function(node){
this.appendChild(node);
this.publics.push(node);
return node;
};
publ.name='';
});
mod.GlobalNode=Class(mod.ScopeBase,function(publ,supr){
});mod.ModuleNode=Class(mod.ScopeBase,function(publ,supr){
});
mod.ClassNode=Class(mod.ScopeBase,function(publ,supr){
});
mod.MethodNode=Class(mod.ScopeBase,function(publ,supr){
publ.name='';
});
mod.Parser=Class(mod.Tokenizer,function(publ,supr){
publ.__init__=function(s,globalNode){
supr.__init__.call(this,s);
globalNode=globalNode===undefined?new mod.GlobalNode():globalNode;
this.currentNode=globalNode;
this.globalNode=globalNode;
this.lastDoc='';
};
var isStatementStartToken=function(tkn){
return(tkn instanceof mod.TokenIdentifier)||(startStatementToken.contains(tkn.value));
};
publ.nextNonWhiteSpaceExpect=function(expected,nlIsWS){
var tkn=this.nextNonWhiteSpace();
return this.expect(expected,tkn);
};
publ.getDocumentation=function(){
var d=this.lastDoc;
this.lastDoc='';
return d;
};
publ.expect=function(expected,tkn){
if(typeof expected=='string'){if(tkn.value==expected){
return tkn;
}else{
throw "Expected '%s' but found %s".format(expected,tkn);
}
}else if(expected instanceof mod.Token){
if(tkn.value==expected.value){
return tkn;
}else{
throw "Expected %s but found %s".format(expected,tkn);
}
}else{
if(tkn instanceof expected){
return tkn;
}else{
throw "Expected token of type %s but found %s".format(expected,tkn);
}
}
};
publ.parseSource=function(){
var tkn=this.parseStatements(this.next());
if(tkn!==undefined){
throw mod.Expected("Expected end of source but found % .".format(tkn));
}
};
publ.parseStatements=function(tkn){
while(tkn!==undefined){
if(tkn instanceof mod.TokenDocComment){
tkn=this.parseDocComment(tkn);
}else if(isStatementStartToken(tkn)){tkn=this.parseStatement(tkn);
}else if((tkn instanceof mod.TokenWhiteSpace)||(tkn instanceof mod.TokenNewLine)||(tkn instanceof mod.TokenComment)){
tkn=this.nextNonWhiteSpace(true);
}else{
return tkn;
}
}
return tkn;
};
publ.parseDocComment=function(tkn){
if(tkn.value.charAt(2)=='*'){
this.lastDoc=tkn.value.slice(3,-3);
}else{
this.lastDoc=tkn.value.slice(3);
}
tkn=this.nextNonWhiteSpace(true);
return tkn;
};
publ.parseStatement=function(tkn){
if(this['parseStatement_'+tkn.constructor.__name__]){
tkn=this['parseStatement_'+tkn.constructor.__name__].call(this,tkn);
}else if(this['parseStatement_'+tkn.value]){
tkn=this['parseStatement_'+tkn.value].call(this,tkn);
}else if(tkn instanceof mod.TokenIdentifier){tkn=this.parseExpression(tkn);
tkn=this.parseEndOfStatement(tkn);
}else{
throw "Beginning of a statement expected but found %s".format(tkn);
}
return tkn;
};
publ.parseEndOfStatement=function(tkn){
if((tkn!==undefined)&&(tkn.value==";")){
return this.nextNonWhiteSpace(true);
}else{throw "Expected ';' at end of statement but found %s".format(tkn);
}
};
publ.parseStatement_this=function(tkn){
tkn=this.parseExpression(tkn);
tkn=this.parseEndOfStatement(tkn);
return tkn;
};
publ.parseStatement_var=function(tkn){
tkn=this.parseExpression_var(tkn);
tkn=this.parseEndOfStatement(tkn);
return tkn;
};
publ.parseStatement_break=function(tkn){
tkn=this.nextNonWhiteSpace();
return this.parseEndOfStatement(tkn);
};publ.parseStatement_return=function(tkn){
tkn=this.parseExpression(this.nextNonWhiteSpace());
return this.parseEndOfStatement(tkn);
};
publ.parseStatement_continue=function(tkn){
tkn=this.nextNonWhiteSpace();
return this.parseEndOfStatement(tkn);
};
publ.parseStatement_delete=function(tkn){
tkn=this.parseExpression_objectAccess(this.nextNonWhiteSpace());
return this.parseEndOfStatement(tkn);
};
publ.parseStatement_for=function(tkn){
tkn=this.nextNonWhiteSpaceExpect('(');
tkn=this.parseCommaExpressions(this.nextNonWhiteSpace());
if(tkn.value=='in'){
tkn=this.nextNonWhiteSpace();
tkn=this.parseExpression_objectAccess(tkn);
}else{
this.expect(';',tkn);
tkn=this.parseCommaExpressions(this.nextNonWhiteSpace(true));
this.expect(';',tkn);
tkn=this.parseCommaExpressions(this.nextNonWhiteSpace(true));
}
this.expect(')',tkn);
return this.parseBlock(this.nextNonWhiteSpace(true));
};
publ.parseCondition=function(tkn){
this.expect('(',tkn);
tkn=this.parseExpression(this.nextNonWhiteSpace());
this.expect(')',tkn);
return this.nextNonWhiteSpace(true);
};
publ.parseStatement_while=function(tkn){
tkn=this.parseCondition(this.nextNonWhiteSpace());
return this.parseBlock(tkn);
};
publ.parseStatement_if=function(tkn){
tkn=this.parseCondition(this.nextNonWhiteSpace());
tkn=this.parseBlock(tkn);
if(tkn.value=="else"){
tkn=this.nextNonWhiteSpace(true);
if(tkn.value=='if'){
tkn=this.parseStatement_if(tkn);
}else{
tkn=this.parseBlock(tkn);
}
}
return tkn;
};
publ.parseStatement_switch=function(tkn){
tkn=this.parseCondition(this.nextNonWhiteSpace());
this.expect('{',tkn);
tkn=this.nextNonWhiteSpace(true);
while((tkn.value=="}")||(tkn.value=="case")||(tkn.value=="default")){
if(tkn.value=="}"){
return this.nextNonWhiteSpace(true);
}else{
if(tkn.value=="case"){
tkn=this.parseExpression(this.nextNonWhiteSpace());
}else{
tkn=this.nextNonWhiteSpace();
}
this.expect(':',tkn);
tkn=this.parseStatements(this.nextNonWhiteSpace(true));
}
}
throw "Expected 'case', 'default' or '}' inside switch block but found %s".format(tkn);
};
publ.parseStatement_throw=function(tkn){
var tkn=this.parseExpression(this.nextNonWhiteSpace());
return this.parseEndOfStatement(tkn);
};
publ.parseStatement_try=function(tkn){
tkn=this.parseBlock(this.nextNonWhiteSpace(true));
this.expect('catch',tkn);
tkn=this.nextNonWhiteSpaceExpect('(');
tkn=this.nextNonWhiteSpaceExpect(mod.TokenIdentifier);
tkn=this.nextNonWhiteSpaceExpect(')');
tkn=this.nextNonWhiteSpace(true);
tkn=this.parseBlock(tkn);
return tkn;
};
publ.parseStatement_Module=function(tkn){
tkn=this.nextNonWhiteSpace();
if(tkn.value=='('){
this.currentNode=this.currentNode.addPublic(new mod.ModuleNode());
tkn=this.nextNonWhiteSpaceExpect(mod.TokenString,true);
this.currentNode.name=tkn.value.slice(1,-1);
tkn=this.nextNonWhiteSpaceExpect(',');
tkn=this.nextNonWhiteSpaceExpect(mod.TokenString,true);
this.currentNode.version=tkn.value.slice(1,-1);
this.currentNode.description=this.getDocumentation();
tkn=this.nextNonWhiteSpaceExpect(',');
tkn=this.nextNonWhiteSpaceExpect('function',true);
tkn=this.nextNonWhiteSpaceExpect('(');
tkn=this.nextNonWhiteSpaceExpect('mod',true);
tkn=this.nextNonWhiteSpaceExpect(')',true);
tkn=this.nextNonWhiteSpaceExpect('{');
tkn=this.parseBlock(tkn);
this.expect(")",tkn);
tkn=this.nextNonWhiteSpace();
tkn=this.parseEndOfStatement(tkn);
}else if(tkn.value=='='){
tkn=this.nextNonWhiteSpaceExpect('function');this.currentNode=this.currentNode.addPublic(new mod.MethodNode());
tkn=this.parseExpression_function(tkn);
tkn=this.parseEndOfStatement(tkn);
}else if(tkn.value=='.'){
tkn=this.next();
tkn=this.parseExpression(tkn);
tkn=this.parseEndOfStatement(tkn);
return tkn;
}else{
return tkn;
}
this.currentNode=this.currentNode.parentNode;
return tkn;
};
publ.parseStatement_Class=function(tkn){
tkn=this.nextNonWhiteSpace();
if(tkn.value=='='){
tkn=this.nextNonWhiteSpaceExpect('function');
tkn=this.parseExpression_function(tkn);
}else if(tkn.value=='.'){
tkn=this.next();
tkn=this.parseExpression(tkn);
}
tkn=this.parseEndOfStatement(tkn);
return tkn;
};
publ.parseStatement_imprt=function(tkn){
tkn=this.nextNonWhiteSpace();
if(tkn.value='='){
tkn=this.nextNonWhiteSpaceExpect('function');tkn=this.parseExpression_function(tkn);
tkn=this.parseEndOfStatement(tkn);
}else{
tkn=this.expect('(');
tkn=this.nextNonWhiteSpace(mod.TokenString);
this.currentNode.dependencies.push(tkn.value.slice(1,-1));
tkn=this.nextNonWhiteSpaceExpect(')');
tkn=this.nextNonWhiteSpace();
tkn=this.parseEndOfStatement(tkn);
}
return tkn;
};
publ.parseStatement_mod=function(tkn){
if(this.currentNode instanceof mod.ModuleNode){
return this.parseStatement_publProp(tkn);
}else{
tkn=this.parseExpression(tkn);
return this.parseEndOfStatement(tkn);
}
};
publ.parseStatement_publ=function(tkn){
return this.parseStatement_publProp(tkn);
};
publ.parseStatement_publProp=function(tkn){
tkn=this.nextNonWhiteSpaceExpect('.');
tkn=this.nextNonWhiteSpaceExpect(mod.TokenIdentifier);
var name=tkn.value;
tkn=this.nextNonWhiteSpace();
if(tkn.value=="="){
tkn=this.nextNonWhiteSpace(true);
switch(tkn.value){
case 'function':
this.currentNode=this.currentNode.addPublic(new mod.MethodNode());
this.currentNode.name=name;
this.currentNode.description=this.getDocumentation();
tkn=this.parseExpression_function(tkn);
break;
case 'Class':
this.currentNode=this.currentNode.addPublic(new mod.ClassNode());
this.currentNode.name=name;
this.currentNode.description=this.getDocumentation();
tkn=this.parseExpression_Class(tkn);
break;
default:
this.currentNode=this.currentNode.addPublic(new mod.PropertyNode());
this.currentNode.name=name;
this.currentNode.description=this.getDocumentation();
tkn=this.parseExpression(tkn);
}
}else{
this.currentNode=this.currentNode.addPublic(new mod.PropertyNode());
this.currentNode.name=name;
this.currentNode.description=this.getDocumentation();
}
this.currentNode=this.currentNode.parentNode;
tkn=this.parseEndOfStatement(tkn);
return tkn;
};
publ.parseBlock=function(tkn){
this.expect('{',tkn);
tkn=this.parseStatements(this.nextNonWhiteSpace(true));
this.expect('}',tkn);
tkn=this.nextNonWhiteSpace(true);
return tkn;
};
publ.parseCommaExpressions=function(tkn){
tkn=this.parseExpression(tkn);
while(tkn.value==','){
tkn=this.parseExpression(this.nextNonWhiteSpace(true));
}
return tkn;
};
publ.parseExpression=function(tkn){
var objectAccessAllowed=true;var invokationAllowed=true;if(tkn.value=='imprt'){
tkn=this.parseExpression_imprt(tkn);
}else if((tkn instanceof mod.TokenIdentifier)||(tkn.value=='this')){
tkn=this.parseExpression_objectAccess(tkn);
}else if(tkn.value=='new'){
tkn=this.parseExpression_new(tkn);
invokationAllowed=false;
objectAccessAllowed=false;
}else if(tkn.value=='var'){
tkn=this.parseExpression_var(tkn);
return tkn;
}else if(tkn.value=="("){
tkn=this.parseExpression_parens(tkn);
}else if(tkn.value=='function'){
tkn=this.parseExpression_function(tkn);
objectAccessAllowed=false;
return tkn;
}else if(tkn.value=='{'){
tkn=this.parseExpression_object(tkn);
invokationAllowed=false;
}else if(tkn.value=='['){
tkn=this.parseExpression_array(tkn);
invokationAllowed=false;
}else if(tkn instanceof mod.TokenString){
tkn=this.nextNonWhiteSpace();
invokationAllowed=false;
}else if(tkn instanceof mod.TokenRegExp){
tkn=this.nextNonWhiteSpace();
invokationAllowed=false;
}else if(tkn instanceof mod.TokenNumber){
tkn=this.nextNonWhiteSpace();
objectAccessAllowed=false;
invokationAllowed=false;
}else if(valueKeywords.contains(tkn.value)){
tkn=this.nextNonWhiteSpace();
objectAccessAllowed=false;
invokationAllowed=false;
}else if(unaryPrefixOperators.contains(tkn.value)){
tkn=this.parseExpression_prefixOperator(tkn);
}else{
return tkn;
}
if(objectAccessAllowed||invokationAllowed){
while((tkn.value=='.')||(tkn.value=="[")||(tkn.value=="(")){
if((tkn.value=='.')||(tkn.value=="[")){
tkn=this.parsePropertyAccess(tkn);
}else if(tkn.value=="("){
tkn=this.parseInvokation(tkn);
}
}
}
if(tkn instanceof mod.TokenWhiteSpace){
tkn=this.nextNonWhiteSpace();
}
if(tkn.value=="?"){
tkn=this.parseExpression_conditional(tkn);
}else if(operators.contains(tkn.value)){
tkn=this.parseExpression_operator(tkn);
}else{
return tkn;
}
return tkn;
};
publ.parsePropertyAccess=function(tkn){
while((tkn!==undefined)&&((tkn.value=='.')||(tkn.value=='['))){
switch(tkn.value){
case '.':
tkn=this.nextNonWhiteSpaceExpect(mod.TokenIdentifier);
tkn=this.next();
break;
case '[':
tkn=this.parseExpression(this.nextNonWhiteSpace(true));
this.expect(']',tkn);
tkn=this.nextNonWhiteSpace();
break;
}
}
return tkn;
};
publ.parseExpression_Class=function(tkn){
tkn=this.nextNonWhiteSpace();
tkn=this.nextNonWhiteSpace();
if(tkn instanceof mod.TokenString){
this.currentNode.name=tkn.value.slice(1,-1);
tkn=this.nextNonWhiteSpaceExpect(',');
tkn=this.nextNonWhiteSpace(true);
}
if(tkn instanceof mod.TokenIdentifier){
while(tkn instanceof mod.TokenIdentifier){
tkn=this.parseExpression_objectAccess(tkn);
this.expect(',',tkn);
tkn=this.nextNonWhiteSpace(true);
}
}
this.expect('function',tkn);
tkn=this.nextNonWhiteSpaceExpect('(');
tkn=this.nextNonWhiteSpaceExpect('publ');
tkn=this.nextNonWhiteSpace();
if(tkn.value==','){
tkn=this.nextNonWhiteSpaceExpect('supr');
tkn=this.nextNonWhiteSpaceExpect(')');
}else{
this.expect(')',tkn);
}
tkn=this.nextNonWhiteSpaceExpect('{');
tkn=this.parseBlock(tkn);
this.expect(")",tkn);
tkn=this.nextNonWhiteSpace(true);
return tkn;
};
publ.parseExpression_imprt=function(tkn){
tkn=this.nextNonWhiteSpaceExpect('(');
tkn=this.nextNonWhiteSpaceExpect(mod.TokenString);
this.currentNode.dependencies.push(tkn.value.slice(1,-1));
tkn=this.nextNonWhiteSpaceExpect(')');
tkn=this.nextNonWhiteSpace(true);
return tkn;
};
publ.parseExpression_objectAccess=function(tkn){
tkn=this.next();
tkn=this.parsePropertyAccess(tkn);
return tkn;
};publ.parseExpression_prefixOperator=function(tkn){
tkn=this.nextNonWhiteSpace();
tkn=this.parseExpression(tkn);
return tkn;
};
publ.parseExpression_parens=function(tkn){
tkn=this.parseExpression(this.nextNonWhiteSpace());
this.expect(')',tkn);
return this.nextNonWhiteSpace(true);
};
publ.parseExpression_operator=function(tkn){
switch(tkn.value){
case "=":
tkn=this.parseExpression_assignment(tkn);
break;
default:
tkn=this.nextNonWhiteSpace(true);tkn=this.parseExpression(tkn);
}
return tkn;
};
publ.parseExpression_assignment=function(tkn){
tkn=this.nextNonWhiteSpace(true);
tkn=this.parseExpression(tkn);
return tkn;
};
publ.parseExpression_conditional=function(tkn){
tkn=this.nextNonWhiteSpace();
tkn=this.parseExpression(tkn);
this.expect(":",tkn);
tkn=this.parseExpression(this.nextNonWhiteSpace());
return tkn;
};
publ.parseExpression_array=function(tkn){
tkn=this.nextNonWhiteSpace(true);
if(tkn.value==']'){
return this.nextNonWhiteSpace();
}
tkn=this.parseCommaExpressions(tkn);
this.expect(']',tkn);
tkn=this.nextNonWhiteSpace();
return tkn;
};
publ.parseExpression_object=function(tkn){
tkn=this.nextNonWhiteSpace();
while(tkn.value!='}'){
if(tkn instanceof mod.TokenString){
}else{
this.expect(mod.TokenIdentifier,tkn);
}
tkn=this.nextNonWhiteSpaceExpect(':');
tkn=this.nextNonWhiteSpace(true);
tkn=this.parseExpression(tkn);
if(tkn.value==','){
tkn=this.nextNonWhiteSpace(true);
}
}
this.expect('}',tkn);
tkn=this.nextNonWhiteSpace();
return tkn;
};
publ.parseExpression_function=function(tkn){
tkn=this.nextNonWhiteSpaceExpect('(');
tkn=this.nextNonWhiteSpace(true);
while(tkn instanceof mod.TokenIdentifier){
try{
this.currentNode.parameters.push(tkn.value);
}catch(e){
}
tkn=this.nextNonWhiteSpace(true);
if(tkn.value==","){
tkn=this.nextNonWhiteSpace(true);
}else{
break;
}
}
this.expect(')',tkn);
tkn=this.parseBlock(this.nextNonWhiteSpace(true));
return tkn;
};
publ.parseExpression_new=function(tkn){
tkn=this.nextNonWhiteSpace(true);
tkn=this.parseExpression_objectAccess(tkn);
tkn=this.parseInvokation(tkn);
return tkn;
};
publ.parseExpression_var=function(tkn){
var tkn=this.nextNonWhiteSpace();
while(tkn instanceof mod.TokenIdentifier){
var varName=tkn.value;
tkn=this.nextNonWhiteSpace();
switch(tkn.value){
case ',':tkn=this.nextNonWhiteSpace(true);
break;
case '=':tkn=this.parseExpression(this.nextNonWhiteSpace(true));if(tkn.value==","){
tkn=this.nextNonWhiteSpace(true);
}else{
return tkn;}
break;
default:
return tkn;
}
}
throw "Identifier expected in 'var'-statement but found %s".format(tkn===undefined?'undefined':tkn);
};
publ.parseInvokation=function(tkn){
this.expect('(',tkn);
tkn=this.parseCommaExpressions(this.nextNonWhiteSpace(true));
this.expect(')',tkn);
tkn=this.nextNonWhiteSpace();
return tkn;
};});
mod.Compressor=Class(mod.Tokenizer,function(publ,supr){
publ.__init__=function(source){
supr.__init__.call(this,source);
this.wsNeeded=false;
};
var leftAndRightSpace=new sets.Set(['instanceof','in']);
var rightSpace=new sets.Set(['var','delete','throw','new','return','else','instanceof','in','case','typeof']);
publ.next=function(){
if(this.bufferedTkn){
var tkn=this.bufferedTkn;
this.bufferedTkn=null;
}else{
var tkn=supr.next.call(this);
while((tkn instanceof mod.TokenWhiteSpace)||(tkn instanceof mod.TokenComment)||((tkn instanceof mod.TokenNewLine)&&(this.lastTkn instanceof mod.TokenNewLine))){
tkn=supr.next.call(this);
}
if(tkn===undefined){
return tkn;
}else{
if(leftAndRightSpace.contains(tkn.value)){
this.wsNeeded=false;
this.bufferedTkn=tkn;
return new mod.TokenWhiteSpace(' ');
}
}
}
switch(tkn.value){
case '(':case '{':case '[':case '"':case "'":case "!":
this.wsNeeded=false;
break;
}
if(this.wsNeeded){
this.bufferedTkn=tkn;
var tkn=new mod.TokenWhiteSpace(' ');
this.wsNeeded=false;
}else{
if(rightSpace.contains(tkn.value)){
this.wsNeeded=true;
}
}
this.lastTkn=tkn;
return tkn;
};
});
mod.DocParser=Class(function(publ,supr){
publ.__init__=function(file){
this.file=file;
};
publ.pprint=function(m,indent){
var m=m.split("\n");
indent=(indent===undefined)?0:indent;
if(indent<0){
this.pprintIndent+=indent;
}
var s=[];
for(var i=0;i<this.pprintIndent;i++){
s.push(' ');
}
s=s.join('');
for(var i=0;i<m.length;i++){
this.file.write(s+m[i]+'\n');
}
if(indent>0){
this.pprintIndent+=indent;
}
};
publ.pprintIndent=0;
publ.printGlobalNode=function(n){
this.pprint('<global>',4);
this.pprint('<modules>',4);
for(var i=0;i<n.publics.length;i++){
var nn=n.publics[i];
if(nn instanceof mod.ModuleNode){
this.printModuleNode(nn);
}
}
this.pprint('</modules>',-4);
this.pprint('</global>',-4);
};publ.printModuleNode=function(n){
this.pprint('<module>',4);
this.pprint('<name>'+n.name+'</name>');
this.pprint('<description>',4);
this.pprint(n.description);
this.pprint('</description>',-4);
this.pprint('<dependencies>'+n.dependencies+'</dependencies>');
this.printPublics(n);
this.pprint('</module>',-4);
};
publ.printClassNode=function(n){
this.pprint('<class>',4);
this.pprint('<name>'+n.name+'</name>');
this.pprint('<description>',4);
this.pprint(n.description);
this.pprint('</description>',-4);
this.printPublics(n);
this.pprint('</class>',-4);
};
publ.printPublics=function(n){
var classes=[];
var props=[];
var methods=[];
for(var i=0;i<n.publics.length;i++){
var nn=n.publics[i];
if(nn instanceof mod.ClassNode){
classes.push(nn);
}else if(nn instanceof mod.MethodNode){
methods.push(nn);
}else if(nn instanceof mod.PropertyNode){
props.push(nn);
}
}
if(n.publics.length>0){
this.pprint('<publics>',4);
if(classes.length>0){
this.pprint('<classes>',4);
for(var i=0;i<classes.length;i++){
this.printClassNode(classes[i]);
}
this.pprint('</classes>',-4);
}
if(methods.length>0){
this.pprint('<methods>',4);
for(var i=0;i<methods.length;i++){
this.printMethodNode(methods[i]);
}
this.pprint('</methods>',-4);
}
if(props.length>0){
this.pprint('<properties>',4);
for(var i=0;i<props.length;i++){
this.printPropertyNode(props[i]);
}
this.pprint('</properties>',-4);
}
this.pprint('</publics>',-4);
}
};
publ.printPropertyNode=function(n){
this.pprint('<property>',4);
this.pprint('<name>'+n.name+'</name>');
this.pprint('<description>',4);
this.pprint(n.description);
this.pprint('</description>',-4);
this.pprint('</property>',-4);
};
publ.printMethodNode=function(n){
this.pprint('<method>',4);
this.pprint('<name>'+n.name+'('+n.parameters.join(', ')+')</name>');
this.pprint('<description>',4);
this.pprint(n.description);
this.pprint('</description>',-4);
this.pprint('</method>',-4);
};
});
mod.__main__=function(){
var it=imprt('iter');
var c=imprt('codecs');
var filenames=['jsolait.js','lib/codecs.js','lib/crypto.js',
'lib/dom.js',
'lib/forms.js',
'lib/iter.js',
'lib/jsonrpc.js',
'lib/lang.js',
'lib/sets.js',
'lib/testing.js',
'lib/urllib.js',
'lib/xml.js',
'lib/xmlrpc.js'];
var gn=new mod.GlobalNode();
iter(filenames,function(fname){
fname=jsolait.baseURI+'/'+fname;
var s=jsolait.loadURI(fname);
var p=new mod.Parser(s,gn);
try{
p.parseStatements(p.next());
}catch(e){
var l=p.getPosition();
throw new mod.Exception(fname.slice('file://'.length)+'('+(l[0])+','+l[1]+') '+e+' near:\n'+p._working.slice(0,200));
}});
};
}); | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/lib/lang.js | lang.js |
Module("codecs","$Revision: 80 $",function(mod){
mod.listEncoders=function(){
var c=[];
for(var attr in String.prototype){
if(attr.slice(0,7)=="encode_"){
c.push(attr.slice(7));
}
}
return c;
};
mod.listDecoders=function(){
var c=[];
for(var attr in String.prototype){
if(attr.slice(0,7)=="decode_"){
c.push(attr.slice(7));
}
}
return c;
};
String.prototype.decode=function(codec){
var n="decode_"+codec;
if(String.prototype[n]){
var args=[];
for(var i=1;i<arguments.length;i++){
args[i-1]=arguments[i];
}
return String.prototype[n].apply(this,args);
}else{
throw new mod.Exception("Decoder '%s' not found.".format(codec));
}
};
String.prototype.encode=function(codec){
var n="encode_"+codec;
if(String.prototype[n]){
var args=[];
for(var i=1;i<arguments.length;i++){
args[i-1]=arguments[i];
}
return String.prototype[n].apply(this,args);
}else{
throw new mod.Exception("Ecnoder '%s' not found.".format(codec));
}
};
String.prototype.decode_base64=function(){
if((this.length%4)==0){
if(typeof(atob)!="undefined"){return atob(this);
}else{
var nBits;
var sDecoded=new Array(this.length/4);
var base64='ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
for(var i=0;i<this.length;i+=4){
nBits=(base64.indexOf(this.charAt(i))&0xff)<<18|
(base64.indexOf(this.charAt(i+1))&0xff)<<12|
(base64.indexOf(this.charAt(i+2))&0xff)<<6|
base64.indexOf(this.charAt(i+3))&0xff;
sDecoded[i]=String.fromCharCode((nBits&0xff0000)>>16,(nBits&0xff00)>>8,nBits&0xff);
}
sDecoded[sDecoded.length-1]=sDecoded[sDecoded.length-1].substring(0,3-((this.charCodeAt(i-2)==61)?2:(this.charCodeAt(i-1)==61?1:0)));
return sDecoded.join("");
}
}else{
throw new mod.Exception("String length must be divisible by 4.");
}
};
String.prototype.encode_base64=function(){
if(typeof(btoa)!="undefined"){return btoa(this);
}else{
var base64=['A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z',
'a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z',
'0','1','2','3','4','5','6','7','8','9','+','/'];
var sbin;
var pad=0;
var s=""+this;
if((s.length%3)==1){
s+=String.fromCharCode(0);
s+=String.fromCharCode(0);
pad=2;
}else if((s.length%3)==2){
s+=String.fromCharCode(0);
pad=1;
}
var rslt=new Array(s.length/3);
var ri=0;
for(var i=0;i<s.length;i+=3){
sbin=((s.charCodeAt(i)&0xff)<<16)|((s.charCodeAt(i+1)&0xff)<<8)|(s.charCodeAt(i+2)&0xff);rslt[ri]=(base64[(sbin>>18)&0x3f]+base64[(sbin>>12)&0x3f]+base64[(sbin>>6)&0x3f]+base64[sbin&0x3f]);
ri++;
}
if(pad>0){
rslt[rslt.length-1]=rslt[rslt.length-1].substr(0,4-pad)+((pad==2)?"==":(pad==1)?"=":"");
}
return rslt.join("");
}
};
String.prototype.decode_uri=function(){
return decodeURIComponent(this);
};
String.prototype.encode_uri=function(){
return encodeURIComponent(this);
};
}); | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/lib/codecs.js | codecs.js |
Module("urllib","$Revision: 80 $",function(mod){
mod.NoHTTPRequestObject=Class(mod.Exception,function(publ,supr){
publ.__init__=function(trace){
supr.__init__.call(this,"Could not create an HTTP request object",trace);
};
});
mod.RequestOpenFailed=Class(mod.Exception,function(publ,supr){
publ.__init__=function(trace){
supr.__init__.call(this,"Opening of HTTP request failed.",trace);
};
});
mod.SendFailed=Class(mod.Exception,function(publ,supr){
publ.__init__=function(trace){
supr.__init__.call(this,"Sending of HTTP request failed.",trace);
};
});
var ASVRequest=Class(function(publ){
publ.__init__=function(){
if((getURL==null)||(postURL==null)){
throw "getURL and postURL are not available!";
}else{
this.readyState=0;
this.responseText="";
this.__contType="";
this.status=200;
}
};
publ.open=function(type,url,async){
if(async==false){
throw "Can only open asynchronous connections!";
}
this.__type=type;
this.__url=url;
this.readyState=0;
};
publ.setRequestHeader=function(name,value){
if(name=="Content-Type"){
this.__contType=value;
}
};
publ.send=function(data){
var self=this;
var cbh=new Object();
cbh.operationComplete=function(rsp){
self.readyState=4;
self.responseText=rsp.content;
if(this.ignoreComplete==false){
if(self.onreadystatechange){
self.onreadystatechange();
}
}
};
cbh.ignoreComplete=false;
try{
if(this.__type=="GET"){
getURL(this.__url,cbh);
}else if(this.__type=="POST"){
postURL(this.__url,data,cbh,this.__contType);
}
}catch(e){
cbh.ignoreComplete=true;
throw e;
}
};
});
var getHTTP=function(){
var obj;
try{obj=new XMLHttpRequest();
}catch(e){
try{obj=new ActiveXObject("Msxml2.XMLHTTP.4.0");
}catch(e){
try{obj=new ActiveXObject("Msxml2.XMLHTTP");
}catch(e){
try{obj=new ActiveXObject("microsoft.XMLHTTP");}catch(e){
try{obj=new ASVRequest();
}catch(e){
throw new mod.NoHTTPRequestObject("Neither Mozilla, IE nor ASV found. Can't do HTTP request without them.");
}
}
}}
}
return obj;
};
mod.sendRequest=function(type,url,user,pass,data,headers,callback){
var async=false;
if(arguments[arguments.length-1] instanceof Function){
var async=true;
callback=arguments[arguments.length-1];
}
var headindex=arguments.length-((async||arguments[arguments.length-1]==null)?2:1);
if(arguments[headindex] instanceof Array){
headers=arguments[headindex];
}else{
headers=[];
}
if(typeof user=="string"&&typeof pass=="string"){
if(typeof data!="string"){
data="";
}
}else if(typeof user=="string"){
data=user;
user=null;
pass=null;
}else{
user=null;
pass=null;
}
var xmlhttp=getHTTP();
try{
if(user!=null){
xmlhttp.open(type,url,async,user,pass);
}else{
xmlhttp.open(type,url,async);
}
}catch(e){
throw new mod.RequestOpenFailed(e);
}
for(var i=0;i<headers.length;i++){
try{xmlhttp.setRequestHeader(headers[i][0],headers[i][1]);}catch(e){
}
}
if(async){xmlhttp.onreadystatechange=function(){
if(xmlhttp.readyState==4){
callback(xmlhttp);
xmlhttp=null;}else if(xmlhttp.readyState==2){
try{var isNetscape=netscape;
try{var s=xmlhttp.status;
}catch(e){callback(xmlhttp);
xmlhttp=null;
}
}catch(e){
}
}
};
}
try{
xmlhttp.send(data);
}catch(e){if(async){
callback(xmlhttp,e);
xmlhttp=null;
}else{
throw new mod.SendFailed(e);
}
}
return xmlhttp;
};
mod.getURL=function(url,user,pass,headers,callback){var a=["GET"];
for(var i=0;i<arguments.length;i++){
a.push(arguments[i]);
}
return mod.sendRequest.apply(this,a);
};
mod.postURL=function(url,user,pass,data,headers,callback){var a=["POST"];
for(var i=0;i<arguments.length;i++){
a.push(arguments[i]);
}
return mod.sendRequest.apply(this,a);
};
}); | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/lib/urllib.js | urllib.js |
Module("xml","$Revision: 80 $",function(mod){
mod.XMLNS="http://www.w3.org/2000/xmlns/";
mod.NSXML="http://www.w3.org/XML/1998/namespace";
mod.nsPrefixMap={"http://www.w3.org/2000/xmlns/":"xmlns","http://www.w3.org/XML/1998/namespace":"xml"};
mod.NoXMLParser=Class(mod.Exception,function(publ,supr){
publ.__init__=function(trace){
supr.__init__.call(this,"Could not create an XML parser.",trace);
};
});
mod.ParsingFailed=Class(mod.Exception,function(publ,supr){
publ.__init__=function(xml,trace){
supr.__init__.call(this,"Failed parsing XML document.",trace);
this.xml=xml;
};
publ.xml;
});
mod.parseXML=function(xml){
var obj=null;
var isMoz=false;
var isIE=false;
var isASV=false;
try{var p=window.parseXML;
if(p==null){
throw "No ASV paseXML";
}
isASV=true;
}catch(e){
try{obj=new DOMParser();
isMoz=true;
}catch(e){
try{obj=new ActiveXObject("Msxml2.DomDocument.4.0");isIE=true;
}catch(e){
try{obj=new ActiveXObject("Msxml2.DomDocument");isIE=true;
}catch(e){
try{obj=new ActiveXObject("microsoft.XMLDOM");isIE=true;
}catch(e){
throw new mod.NoXMLParser(e);
}
}
}
}
}
try{
if(isMoz){
obj=obj.parseFromString(xml,"text/xml");
return obj;
}else if(isIE){
obj.loadXML(xml);
return obj;
}else if(isASV){
return window.parseXML(xml,null);
}
}catch(e){
throw new mod.ParsingFailed(xml,e);
}
};
mod.importNode=function(importedNode,deep){
deep=(deep==null)?true:deep;
var ELEMENT_NODE=1;
var ATTRIBUTE_NODE=2;
var TEXT_NODE=3;
var CDATA_SECTION_NODE=4;
var ENTITY_REFERENCE_NODE=5;
var ENTITY_NODE=6;
var PROCESSING_INSTRUCTION_NODE=7;
var COMMENT_NODE=8;
var DOCUMENT_NODE=9;
var DOCUMENT_TYPE_NODE=10;
var DOCUMENT_FRAGMENT_NODE=11;
var NOTATION_NODE=12;
var importChildren=function(srcNode,parent){
if(deep){
for(var i=0;i<srcNode.childNodes.length;i++){
var n=mod.importNode(srcNode.childNodes.item(i),true);
parent.appendChild(n);
}
}
};
var node=null;
switch(importedNode.nodeType){
case ATTRIBUTE_NODE:
node=document.createAttributeNS(importedNode.namespaceURI,importedNode.nodeName);
node.value=importedNode.value;
break;
case DOCUMENT_FRAGMENT_NODE:
node=document.createDocumentFragment();
importChildren(importedNode,node);
break;
case ELEMENT_NODE:
node=document.createElementNS(importedNode.namespaceURI,importedNode.tagName);
for(var i=0;i<importedNode.attributes.length;i++){
var attr=this.importNode(importedNode.attributes.item(i),deep);
node.setAttributeNodeNS(attr);
}
importChildren(importedNode,node);
break;
case ENTITY_REFERENCE_NODE:
node=importedNode;
break;
case PROCESSING_INSTRUCTION_NODE:
node=document.createProcessingInstruction(importedNode.target,importedNode.data);
break;
case TEXT_NODE:
case CDATA_SECTION_NODE:
case COMMENT_NODE:
node=document.createTextNode(importedNode.nodeValue);
break;
case DOCUMENT_NODE:
case DOCUMENT_TYPE_NODE:
case NOTATION_NODE:
case ENTITY_NODE:
throw "not supported in DOM2";
break;
}
return node;
};
var getNSPrefix=function(node,namespaceURI,nsPrefixMap){
if(!namespaceURI){
return "";
}else if(mod.nsPrefixMap[namespaceURI]){
return mod.nsPrefixMap[namespaceURI]+":";
}else if(nsPrefixMap[namespaceURI]!=null){
return nsPrefixMap[namespaceURI]+":";
}
if(node.nodeType==1){
for(var i=0;i<node.attributes.length;i++){
var attr=node.attributes.item(i);
if(attr.namespaceURI==mod.XMLNS&&attr.value==namespaceURI){
return attr.localName+":";
}
}
}else{
throw new Error("Cannot find a namespace prefix for "+namespaceURI);
}
if(node.parentNode){return getNSPrefix(node.parentNode,namespaceURI,nsPrefixMap);}else{
throw new Error("Cannot find a namespace prefix for "+namespaceURI);
}
};
mod.node2XML=function(node,nsPrefixMap,attrParent){
nsPrefixMap=(nsPrefixMap==null)?{}:nsPrefixMap;
var ELEMENT_NODE=1;
var ATTRIBUTE_NODE=2;
var TEXT_NODE=3;
var CDATA_SECTION_NODE=4;
var ENTITY_REFERENCE_NODE=5;
var ENTITY_NODE=6;
var PROCESSING_INSTRUCTION_NODE=7;
var COMMENT_NODE=8;
var DOCUMENT_NODE=9;
var DOCUMENT_TYPE_NODE=10;
var DOCUMENT_FRAGMENT_NODE=11;
var NOTATION_NODE=12;
var s="";
switch(node.nodeType){
case ATTRIBUTE_NODE:
try{
var nsprefix=getNSPrefix(attrParent,node.namespaceURI,nsPrefixMap);
}catch(e){
alert(node.namespaceURI+"\n"+e.message);
}
if(nsprefix+node.localName=="xmlns:xmlns"){
nsprefix="";
}s+=nsprefix+node.localName+'="'+node.value+'"';
break;
case DOCUMENT_NODE:
s+=this.node2XML(node.documentElement,nsPrefixMap);
break;
case ELEMENT_NODE:
s+="<"+node.tagName;
for(var i=0;i<node.attributes.length;i++){
s+=" "+this.node2XML(node.attributes.item(i),nsPrefixMap,node);
}
if(node.childNodes.length==0){
s+="/>\n";
}else{
s+=">";
for(var child=node.firstChild;child!=null;child=child.nextSibling){
s+=this.node2XML(child,nsPrefixMap);
}
s+="</"+node.tagName+">\n";
}
break;
case PROCESSING_INSTRUCTION_NODE:
s+="<?"+node.target+" "+node.data+" ?>";
break;
case TEXT_NODE:
s+=node.nodeValue;
break;
case CDATA_SECTION_NODE:
s+="<"+"![CDATA["+node.nodeValue+"]"+"]>";
break;
case COMMENT_NODE:
s+="<!--"+node.nodeValue+"-->";
break;
case ENTITY_REFERENCE_NODE:
case DOCUMENT_FRAGMENT_NODE:
case DOCUMENT_TYPE_NODE:
case NOTATION_NODE:
case ENTITY_NODE:
throw new mod.Exception("Nodetype(%s) not supported.".format(node.nodeType));
break;
}
return s;
};
}); | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/lib/xml.js | xml.js |
Module("iter","$Revision: 80 $",function(mod){
mod.Iterator=Class(function(publ,supr){
publ.next=function(){
return undefined;
};
publ.__iter__=function(){
return this;
};
});
mod.Range=Class(mod.Iterator,function(publ,supr){
publ.__init__=function(start,end,step){
switch(arguments.length){
case 1:
this.start=0;
this.end=start;
this.step=1;
break;
case 2:
this.start=start;
this.end=end;
this.step=1;
break;
default:
this.start=start;
this.end=end;
this.step=step;
break;
}
this.current=this.start-this.step;
};
publ.next=function(){
if(this.current+this.step>this.end){
this.current=this.start;
return undefined;
}else{
this.current=this.current+this.step;
return this.current;
}
};
});
Range=mod.Range;
mod.ArrayItereator=Class(mod.Iterator,function(publ,supr){
publ.__init__=function(array){
this.array=array;
this.index=-1;
};
publ.next=function(){
this.index+=1;
if(this.index>=this.array.length){
return undefined;
}else{
return this.array[this.index];
}
};
});
mod.ObjectIterator=Class(mod.Iterator,function(publ,supr){
publ.__init__=function(obj){
this.obj=obj;
this.keys=[];
for(var n in obj){
this.keys.push(n);
}
this.index=-1;
};
publ.next=function(){
this.index+=1;
if(this.index>=this.keys.length){
return undefined;
}else{
var key=this.keys[this.index];
var rslt={key:key};
try{
rslt.value=this.obj[key];
}catch(e){
}
return rslt;
}
};
});
Array.prototype.__iter__=function(){
return new mod.ArrayItereator(this);
};
mod.IterationCallback=function(item,iteration){};
mod.Iteration=Class(function(publ,supr){
publ.__init__=function(iterable,thisObj,callback){
this.doStop=false;
this.thisObj=thisObj;
if(iterable.__iter__!==undefined){
this.iterator=iterable.__iter__();
}else{
this.iterator=new mod.ObjectIterator(iterable);
}
this.callback=callback;
};
publ.resume=function(){
this.doStop=false;
var item;
while(!this.doStop){
item=this.iterator.next();
if(item===undefined){
this.stop();
}else{
this.callback.call(this.thisObj==null?this:this.thisObj,item,this);
}
}
};
publ.stop=function(){
this.doStop=true;
};
publ.start=function(){
this.resume();
};
});
mod.AsyncIteration=Class(mod.Iteration,function(publ,supr){
publ.__init__=function(iterable,interval,thisObj,callback){
this.doStop=false;
this.thisObj=thisObj;
if(iterable.__iter__!==undefined){
this.iterator=iterable.__iter__();
}else{
this.iterator=new mod.ObjectIterator(iterable);
}
this.interval=interval;
this.callback=callback;
this.isRunning=false;
};
publ.stop=function(){
if(this.isRunning){
this.isRunning=false;
clearTimeout(this.timeout);delete iter.iterations[this.id];
}
};
publ.resume=function(){
if(this.isRunning==false){
this.isRunning=true;
var id=0;while(iter.iterations[id]!==undefined){
this.id++;
}
this.id=""+id;
iter.iterations[this.id]=this;
this.timeout=setTimeout("iter.handleAsyncStep('"+this.id+"')",this.interval);
}
};
publ.handleAsyncStep=function(){
if(this.isRunning){
tem=this.iterator.next();
if(item===undefined){
this.stop();
}else{
this.callback.call(this.thisObj==null?this:this.thisObj,item,this);
this.timeout=setTimeout("iter.handleAsyncStep('"+this.id+"')",this.interval);
}
}
};
});
iter=function(iterable,delay,thisObj,cb){
cb=arguments[arguments.length-1];
if((arguments.length==3)&&(typeof delay=='object')){
thisObj=delay;
delay=-1;
}else{
thisObj=null;
}
if(delay>-1){
var it=new mod.AsyncIteration(iterable,delay,thisObj,cb);}else{
var it=new mod.Iteration(iterable,thisObj,cb);
}
it.start();
return it;
};
iter.handleAsyncStep=function(id){
if(iter.iterations[id]){
iter.iterations[id].handleAsyncStep();
}
};
iter.iterations={};
mod.__main__=function(){
var testing=imprt('testing');
var task=function(){
var s='';
for(var i=0;i<10;i++){
s+=i;
}
};
r=[];
for(var i=0;i<100;i++){
r[i]=i;
}
print("for loop \t\t\t"+testing.timeExec(100,function(){
var s='';
for(var i=0;i<100;i++){
s+=r[i];
task();
}
}));
print("Range iter \t\t"+testing.timeExec(100,function(){
var s='';
iter(new mod.Range(100),function(item,i){
s+=r[item];
task();
});
}));
print("Array iter \t\t\t"+testing.timeExec(100,function(){
var s='';
iter(r,function(item,i){
s+=item;
task();
});
}));
print("for in on Array \t\t"+testing.timeExec(100,function(){
var s='';
for(var i in r){
s+=r[i];
task();
}
}));
r=[];
for(var i=0;i<100;i++){
r["k"+i]=i;
}
print("for in on as.Array \t"+testing.timeExec(100,function(){
var s='';
for(var i in r){
s+=r[i];
task();
}
}));
r={};
for(var i=0;i<100;i++){
r["k"+i]=i;
}
print("for in on dictionary \t"+testing.timeExec(100,function(){
var s='';
for(var i in r){
s+=r[i];
task();
}
}));
r=[];
for(var i=0;i<100;i++){
r[i]=i;
}
print("for on Array + iter \t"+testing.timeExec(100,function(){
var s='';
for(i=r.__iter__();item=i.next()!==undefined;){
s+=item;
task();
}
}));
};
}); | zif.jsonserver | /zif.jsonserver-0.6.tar.gz/zif.jsonserver-0.6/src/zif/jsonserver/jsolait/lib/iter.js | iter.js |
****************
zif.sedna
****************
Sedna, available at http://modis.ispras.ru/sedna/, under Apache 2.0 license, is
a transactional native XML database operating over TCP/IP. Sedna is open
source and has excellent documentation. The mailing list, [Sedna-discussion],
is responsive for questions. Sedna currently runs on Windows 2000/XP and
Linux x86, available in source and binary formats.
Analogous to an SQL database, a Sedna database is a set of related XML
documents and collections of XML documents. Documents hold data in an XML
structure, not restricted to any particular (e.g., tabular) format. Collections
are used to organize documents with similar schemas so that those documents may
be queried together or separately.
A Sedna server may handle multiple databases. A database may contain multiple
XML documents and multiple collections of multiple XML documents. Data size is
unrestricted.
Analogous to an SQL database, data access is through a query language. Sedna's
query language is XQuery, http://www.w3.org/TR/xquery/. XQuery is more
like python or C or perl than like XML. Particularly, XPath expressions are
like list generators, obtaining data elements by their type, value, and/or
path, and FLOWR expressions are like list comprehensions. There are several
XQuery tutorials on the web. Like SQL, XQuery may get complicated, but the
easy stuff is often powerful enough for your needs.
Sedna has extensions to XQuery for inserting, updating, deleting, etc., which
makes Sedna a worthy option for object persistence. ZODB can store anything
picklable. Similarly, Sedna can store anything that can be represented in XML.
Sedna has ACID transactions, triggers, indexes, support for ODBC within XQuery,
SQL database-like user/permission management, and many other interesting and
useful features.
zif.sedna provides
a connection and query interface to a Sedna server
a dbapi-like interface (e.g., connections and cursors)
a database adapter for zope(3) with connection pooling and (provisional)
thread safety.
sednaobject, which provides pythonic interfaces to the Sedna server for
CRUD operations. It abstracts read-only query results into python
sequence-like items and also provides a read-write elementtree-like
interface for managing individual elements and their descendents.
See 'src/zif/sedna/README.txt' for more information and doctest examples.
See 'src/zif/sedna/README_da.txt' to use the zope3 database adapter in zope.
See 'src/zif/sedna/README_sednaobject.txt' for sednaobject usage and doctests.
See 'src/zif/sedna/README_pylons.txt' to use the zope3 database adapter in
pylons.
Releases
********
================
0.9 beta (2008/02/07)
================
Initial release
================
0.9 beta2 (2008/02/15)
================
Support pyformat %(var)s for atomic values in BasicCursor.
Improved thread friendliness.
Preliminary instructions for use with pylons.
================
0.10 alpha1 (2008/03/23)
================
sednaobject provided. Abstractions for pythonic CRUD operations.
lxml now required
| zif.sedna | /zif.sedna-0.10alpha1.tar.gz/zif.sedna-0.10alpha1/README.txt | README.txt |
# altered 2 feb08 to remove explicit dependencies on SQLAlchemy by jwashin
# specific alterations annotated with __jmw__
"""Connection pooling for DB-API connections.
Provides a number of connection pool implementations for a variety of
usage scenarios and thread behavior requirements imposed by the
application, DB-API or database itself.
Also provides a DB-API 2.0 connection proxying mechanism allowing
regular DB-API connect() methods to be transparently managed by a
SQLAlchemy connection pool.
"""
import weakref, time
# __jmw__ use local dbapiexceptions instead of sqlalchemy's
from dbapiexceptions import *
# __jmw__ use python logging instead of sqlalchemy's
import logging
# __jmw__ use python's queue instead of sqlalchemy's
#import queue as Queue
import Queue
# __jmw__ use python cPickle instead of sqlalchemy's
import cPickle as pickle
# __jmw__ use python thread and threading instead of sqlalchemy's
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
try:
import thread as _thread
except ImportError:
import dummy_thread as _thread
proxies = {}
def manage(module, **params):
"""Return a proxy for a DB-API module that automatically pools connections.
Given a DB-API 2.0 module and pool management parameters, returns
a proxy for the module that will automatically pool connections,
creating new connection pools for each distinct set of connection
arguments sent to the decorated module's connect() function.
Arguments:
module
A DB-API 2.0 database module.
poolclass
The class used by the pool module to provide pooling. Defaults
to ``QueuePool``.
See the ``Pool`` class for options.
"""
try:
return proxies[module]
except KeyError:
return proxies.setdefault(module, _DBProxy(module, **params))
def clear_managers():
"""Remove all current DB-API 2.0 managers.
All pools and connections are disposed.
"""
for manager in proxies.values():
manager.close()
proxies.clear()
class Pool(object):
"""Base class for connection pools.
This is an abstract class, implemented by various subclasses
including:
QueuePool
Pools multiple connections using ``Queue.Queue``.
SingletonThreadPool
Stores a single connection per execution thread.
NullPool
Doesn't do any pooling; opens and closes connections.
AssertionPool
Stores only one connection, and asserts that only one connection
is checked out at a time.
The main argument, `creator`, is a callable function that returns
a newly connected DB-API connection object.
Options that are understood by Pool are:
echo
If set to True, connections being pulled and retrieved from/to
the pool will be logged to the standard output, as well as pool
sizing information. Echoing can also be achieved by enabling
logging for the "sqlalchemy.pool" namespace. Defaults to False.
use_threadlocal
If set to True, repeated calls to ``connect()`` within the same
application thread will be guaranteed to return the same
connection object, if one has already been retrieved from the
pool and has not been returned yet. This allows code to retrieve
a connection from the pool, and then while still holding on to
that connection, to call other functions which also ask the pool
for a connection of the same arguments; those functions will act
upon the same connection that the calling method is using.
Defaults to True.
recycle
If set to non -1, a number of seconds between connection
recycling, which means upon checkout, if this timeout is
surpassed the connection will be closed and replaced with a
newly opened connection. Defaults to -1.
listeners
A list of ``PoolListener``-like objects that receive events when
DB-API connections are created, checked out and checked in to
the pool.
"""
def __init__(self, creator, recycle=-1, echo=None, use_threadlocal=True,
listeners=None):
self.logger = logging.getLogger()
# the WeakValueDictionary works more nicely than a regular dict
# of weakrefs. the latter can pile up dead reference objects which don't
# get cleaned out. WVD adds from 1-6 method calls to a checkout operation.
self._threadconns = weakref.WeakValueDictionary()
self._creator = creator
self._recycle = recycle
self._use_threadlocal = use_threadlocal
self.echo = echo
self.listeners = []
self._on_connect = []
self._on_checkout = []
self._on_checkin = []
self._should_log_info = False
if listeners:
for l in listeners:
self.add_listener(l)
def unique_connection(self):
return _ConnectionFairy(self).checkout()
def create_connection(self):
return _ConnectionRecord(self)
def recreate(self):
"""Return a new instance with identical creation arguments."""
raise NotImplementedError()
def dispose(self):
"""Dispose of this pool.
This method leaves the possibility of checked-out connections
remaining open, It is advised to not reuse the pool once dispose()
is called, and to instead use a new pool constructed by the
recreate() method.
"""
raise NotImplementedError()
def connect(self):
if not self._use_threadlocal:
return _ConnectionFairy(self).checkout()
try:
#__jmw__ _thread instead of thread
return self._threadconns[_thread.get_ident()].checkout()
except KeyError:
agent = _ConnectionFairy(self)
#__jmw__ _thread instead of thread
self._threadconns[_thread.get_ident()] = agent
return agent.checkout()
def return_conn(self, record):
#__jmw__ _thread instead of thread
if self._use_threadlocal and _thread.get_ident() in self._threadconns:
#__jmw__ _thread instead of thread
del self._threadconns[_thread.get_ident()]
self.do_return_conn(record)
def get(self):
return self.do_get()
def do_get(self):
raise NotImplementedError()
def do_return_conn(self, conn):
raise NotImplementedError()
def status(self):
raise NotImplementedError()
def add_listener(self, listener):
"""Add a ``PoolListener``-like object to this pool."""
self.listeners.append(listener)
if hasattr(listener, 'connect'):
self._on_connect.append(listener)
if hasattr(listener, 'checkout'):
self._on_checkout.append(listener)
if hasattr(listener, 'checkin'):
self._on_checkin.append(listener)
def log(self, msg):
self.logger.info(msg)
class _ConnectionRecord(object):
def __init__(self, pool):
self.__pool = pool
self.connection = self.__connect()
self.info = {}
if pool._on_connect:
for l in pool._on_connect:
l.connect(self.connection, self)
def close(self):
if self.connection is not None:
if self.__pool._should_log_info:
self.__pool.log("Closing connection %r" % self.connection)
try:
self.connection.close()
except (SystemExit, KeyboardInterrupt):
raise
except:
if self.__pool._should_log_info:
self.__pool.log("Exception closing connection %r" %
self.connection)
def invalidate(self, e=None):
if self.__pool._should_log_info:
if e is not None:
self.__pool.log("Invalidate connection %r (reason: %s:%s)" %
(self.connection, e.__class__.__name__, e))
else:
self.__pool.log("Invalidate connection %r" % self.connection)
self.__close()
self.connection = None
def get_connection(self):
if self.connection is None:
self.connection = self.__connect()
self.info.clear()
if self.__pool._on_connect:
for l in self.__pool._on_connect:
l.connect(self.connection, self)
elif (self.__pool._recycle > -1 and time.time() - self.starttime > self.__pool._recycle):
if self.__pool._should_log_info:
self.__pool.log("Connection %r exceeded timeout; recycling" %
self.connection)
self.__close()
self.connection = self.__connect()
self.info.clear()
if self.__pool._on_connect:
for l in self.__pool._on_connect:
l.connect(self.connection, self)
return self.connection
def __close(self):
try:
if self.__pool._should_log_info:
self.__pool.log("Closing connection %r" % self.connection)
self.connection.close()
except Exception, e:
if self.__pool._should_log_info:
self.__pool.log("Connection %r threw an error on close: %s" %
(self.connection, e))
if isinstance(e, (SystemExit, KeyboardInterrupt)):
raise
def __connect(self):
try:
self.starttime = time.time()
connection = self.__pool._creator()
if self.__pool._should_log_info:
self.__pool.log("Created new connection %r" % connection)
return connection
except Exception, e:
if self.__pool._should_log_info:
self.__pool.log("Error on connect(): %s" % e)
raise
properties = property(lambda self: self.info,
doc="A synonym for .info, will be removed in 0.5.")
# __jmw__ added __del__ to nicely close connections when threads die
def __del__(self):
self.close()
def _finalize_fairy(connection, connection_record, pool, ref=None):
if ref is not None and connection_record.backref is not ref:
return
if connection is not None:
try:
# __jmw__ Zope takes care of rollback, and not nice to do it twice.
#connection.rollback()
# Immediately close detached instances
if connection_record is None:
connection.close()
except Exception, e:
if connection_record is not None:
connection_record.invalidate(e=e)
if isinstance(e, (SystemExit, KeyboardInterrupt)):
connection.close()
raise
if connection_record is not None:
connection_record.backref = None
if pool._should_log_info:
pool.log("Connection %r being returned to pool" % connection)
if pool._on_checkin:
for l in pool._on_checkin:
l.checkin(connection, connection_record)
pool.return_conn(connection_record)
class _ConnectionFairy(object):
"""Proxies a DB-API connection and provides return-on-dereference support."""
def __init__(self, pool):
self._pool = pool
self.__counter = 0
try:
rec = self._connection_record = pool.get()
conn = self.connection = self._connection_record.get_connection()
self._connection_record.backref = weakref.ref(self, lambda ref:_finalize_fairy(conn, rec, pool, ref))
except:
self.connection = None # helps with endless __getattr__ loops later on
self._connection_record = None
raise
if self._pool._should_log_info:
self._pool.log("Connection %r checked out from pool" %
self.connection)
_logger = property(lambda self: self._pool.logger)
is_valid = property(lambda self:self.connection is not None)
def _get_info(self):
"""An info collection unique to this DB-API connection."""
try:
return self._connection_record.info
except AttributeError:
if self.connection is None:
# __jmw__ originally sqlalchemy.InvalidRequestError
raise DatabaseError("This connection is closed")
try:
return self._detached_info
except AttributeError:
self._detached_info = value = {}
return value
info = property(_get_info)
properties = property(_get_info)
def invalidate(self, e=None):
"""Mark this connection as invalidated.
The connection will be immediately closed. The containing
ConnectionRecord will create a new connection when next used.
"""
if self.connection is None:
# __jmw__ originally sqlalchemy.InvalidRequestError
raise DatabaseError("This connection is closed")
if self._connection_record is not None:
self._connection_record.invalidate(e=e)
self.connection = None
self._close()
def cursor(self, *args, **kwargs):
try:
c = self.connection.cursor(*args, **kwargs)
return _CursorFairy(self, c)
except Exception, e:
self.invalidate(e=e)
raise
def __getattr__(self, key):
return getattr(self.connection, key)
def checkout(self):
if self.connection is None:
# __jmw__ originally sqlalchemy.InvalidRequestError
raise DatabaseError("This connection is closed")
self.__counter +=1
if not self._pool._on_checkout or self.__counter != 1:
return self
# Pool listeners can trigger a reconnection on checkout
attempts = 2
while attempts > 0:
try:
for l in self._pool._on_checkout:
l.checkout(self.connection, self._connection_record, self)
return self
# __jmw__ ??? sqlalchemy.exceptions.DisconnectionError
# this is apparently never raised in sqlalchemy. Leave as is.
except DatabaseError, e:
if self._pool._should_log_info:
self._pool.log(
"Disconnection detected on checkout: %s" % e)
self._connection_record.invalidate(e)
self.connection = self._connection_record.get_connection()
attempts -= 1
if self._pool._should_log_info:
self._pool.log("Reconnection attempts exhausted on checkout")
self.invalidate()
# __jmw__ originally sqlalchemy.InvalidRequestError
raise DatabaseError("This connection is closed")
def detach(self):
"""Separate this connection from its Pool.
This means that the connection will no longer be returned to the
pool when closed, and will instead be literally closed. The
containing ConnectionRecord is separated from the DB-API connection,
and will create a new connection when next used.
Note that any overall connection limiting constraints imposed by a
Pool implementation may be violated after a detach, as the detached
connection is removed from the pool's knowledge and control.
"""
if self._connection_record is not None:
self._connection_record.connection = None
self._connection_record.backref = None
self._pool.do_return_conn(self._connection_record)
self._detached_info = \
self._connection_record.info.copy()
self._connection_record = None
def close(self):
self.__counter -=1
if self.__counter == 0:
self._close()
def _close(self):
_finalize_fairy(self.connection, self._connection_record, self._pool)
self.connection = None
self._connection_record = None
class _CursorFairy(object):
def __init__(self, parent, cursor):
self.__parent = parent
self.cursor = cursor
def invalidate(self, e=None):
self.__parent.invalidate(e=e)
def close(self):
try:
self.cursor.close()
except Exception, e:
self.__parent._logger.warn("Error closing cursor: " + e)
if isinstance(e, (SystemExit, KeyboardInterrupt)):
raise
def __getattr__(self, key):
return getattr(self.cursor, key)
class SingletonThreadPool(Pool):
"""A Pool that maintains one connection per thread.
Maintains one connection per each thread, never moving a connection to a
thread other than the one which it was created in.
This is used for SQLite, which both does not handle multithreading by
default, and also requires a singleton connection if a :memory: database
is being used.
Options are the same as those of Pool, as well as:
pool_size: 5
The number of threads in which to maintain connections at once.
"""
def __init__(self, creator, pool_size=5, **params):
params['use_threadlocal'] = True
Pool.__init__(self, creator, **params)
self._conns = {}
self.size = pool_size
def recreate(self):
self.log("Pool recreating")
return SingletonThreadPool(self._creator, pool_size=self.size, recycle=self._recycle, echo=self._should_log_info, use_threadlocal=self._use_threadlocal, listeners=self.listeners)
def dispose(self):
"""Dispose of this pool.
this method leaves the possibility of checked-out connections
remaining opened, so it is advised to not reuse the pool once
dispose() is called, and to instead use a new pool constructed
by the recreate() method.
"""
for key, conn in self._conns.items():
try:
conn.close()
except (SystemExit, KeyboardInterrupt):
raise
except:
# sqlite won't even let you close a conn from a thread
# that didn't create it
pass
del self._conns[key]
def dispose_local(self):
try:
#__jmw__ _thread instead of thread
del self._conns[_thread.get_ident()]
except KeyError:
pass
def cleanup(self):
for key in self._conns.keys():
try:
del self._conns[key]
except KeyError:
pass
if len(self._conns) <= self.size:
return
def status(self):
#__jmw__ _thread instead of thread
return "SingletonThreadPool id:%d thread:%d size: %d" % (id(self), _thread.get_ident(), len(self._conns))
def do_return_conn(self, conn):
pass
def do_get(self):
try:
#__jmw__ _thread instead of thread
return self._conns[_thread.get_ident()]
except KeyError:
c = self.create_connection()
#__jmw__ _thread instead of thread
self._conns[_thread.get_ident()] = c
if len(self._conns) > self.size:
self.cleanup()
return c
class QueuePool(Pool):
"""A Pool that imposes a limit on the number of open connections.
Arguments include all those used by the base Pool class, as well
as:
pool_size
The size of the pool to be maintained. This is the largest
number of connections that will be kept persistently in the
pool. Note that the pool begins with no connections; once this
number of connections is requested, that number of connections
will remain. Defaults to 5.
max_overflow
The maximum overflow size of the pool. When the number of
checked-out connections reaches the size set in pool_size,
additional connections will be returned up to this limit. When
those additional connections are returned to the pool, they are
disconnected and discarded. It follows then that the total
number of simultaneous connections the pool will allow is
pool_size + `max_overflow`, and the total number of "sleeping"
connections the pool will allow is pool_size. `max_overflow` can
be set to -1 to indicate no overflow limit; no limit will be
placed on the total number of concurrent connections. Defaults
to 10.
timeout
The number of seconds to wait before giving up on returning a
connection. Defaults to 30.
"""
def __init__(self, creator, pool_size = 5, max_overflow = 10, timeout=30, **params):
Pool.__init__(self, creator, **params)
self._pool = Queue.Queue(pool_size)
self._overflow = 0 - pool_size
self._max_overflow = max_overflow
self._timeout = timeout
#__jmw__ _threading instead of threading
self._overflow_lock = self._max_overflow > -1 and _threading.Lock() or None
def recreate(self):
self.log("Pool recreating")
return QueuePool(self._creator, pool_size=self._pool.maxsize, max_overflow=self._max_overflow, timeout=self._timeout, recycle=self._recycle, echo=self._should_log_info, use_threadlocal=self._use_threadlocal, listeners=self.listeners)
def do_return_conn(self, conn):
try:
self._pool.put(conn, False)
except Queue.Full:
if self._overflow_lock is None:
self._overflow -= 1
else:
self._overflow_lock.acquire()
try:
self._overflow -= 1
finally:
self._overflow_lock.release()
def do_get(self):
try:
wait = self._max_overflow > -1 and self._overflow >= self._max_overflow
return self._pool.get(wait, self._timeout)
except Queue.Empty:
if self._max_overflow > -1 and self._overflow >= self._max_overflow:
if not wait:
return self.do_get()
else:
raise exceptions.TimeoutError("QueuePool limit of size %d overflow %d reached, connection timed out, timeout %d" % (self.size(), self.overflow(), self._timeout))
if self._overflow_lock is not None:
self._overflow_lock.acquire()
if self._max_overflow > -1 and self._overflow >= self._max_overflow:
if self._overflow_lock is not None:
self._overflow_lock.release()
return self.do_get()
try:
con = self.create_connection()
self._overflow += 1
finally:
if self._overflow_lock is not None:
self._overflow_lock.release()
return con
def dispose(self):
while True:
try:
conn = self._pool.get(False)
conn.close()
except Queue.Empty:
break
self._overflow = 0 - self.size()
if self._should_log_info:
self.log("Pool disposed. " + self.status())
def status(self):
tup = (self.size(), self.checkedin(), self.overflow(), self.checkedout())
return "Pool size: %d Connections in pool: %d Current Overflow: %d Current Checked out connections: %d" % tup
def size(self):
return self._pool.maxsize
def checkedin(self):
return self._pool.qsize()
def overflow(self):
return self._overflow
def checkedout(self):
return self._pool.maxsize - self._pool.qsize() + self._overflow
class NullPool(Pool):
"""A Pool which does not pool connections.
Instead it literally opens and closes the underlying DB-API connection
per each connection open/close.
"""
def status(self):
return "NullPool"
def do_return_conn(self, conn):
conn.close()
def do_return_invalid(self, conn):
pass
def do_get(self):
return self.create_connection()
class StaticPool(Pool):
"""A Pool of exactly one connection, used for all requests."""
def __init__(self, creator, **params):
Pool.__init__(self, creator, **params)
self._conn = creator()
self.connection = _ConnectionRecord(self)
def status(self):
return "StaticPool"
def create_connection(self):
return self._conn
def do_return_conn(self, conn):
pass
def do_return_invalid(self, conn):
pass
def do_get(self):
return self.connection
class AssertionPool(Pool):
"""A Pool that allows at most one checked out connection at any given time.
This will raise an exception if more than one connection is checked out
at a time. Useful for debugging code that is using more connections
than desired.
"""
## TODO: modify this to handle an arbitrary connection count.
def __init__(self, creator, **params):
Pool.__init__(self, creator, **params)
self.connection = _ConnectionRecord(self)
self._conn = self.connection
def status(self):
return "AssertionPool"
def create_connection(self):
raise "Invalid"
def do_return_conn(self, conn):
assert conn is self._conn and self.connection is None
self.connection = conn
def do_return_invalid(self, conn):
raise "Invalid"
def do_get(self):
assert self.connection is not None
c = self.connection
self.connection = None
return c
class _DBProxy(object):
"""Layers connection pooling behavior on top of a standard DB-API module.
Proxies a DB-API 2.0 connect() call to a connection pool keyed to the
specific connect parameters. Other functions and attributes are delegated
to the underlying DB-API module.
"""
def __init__(self, module, poolclass=QueuePool, **params):
"""Initializes a new proxy.
module
a DB-API 2.0 module
poolclass
a Pool class, defaulting to QueuePool
Other parameters are sent to the Pool object's constructor.
"""
self.module = module
self.params = params
self.poolclass = poolclass
self.pools = {}
def close(self):
for key in self.pools.keys():
del self.pools[key]
def __del__(self):
self.close()
def __getattr__(self, key):
return getattr(self.module, key)
def get_pool(self, *args, **params):
key = self._serialize(*args, **params)
try:
return self.pools[key]
except KeyError:
pool = self.poolclass(lambda: self.module.connect(*args, **params), **self.params)
self.pools[key] = pool
return pool
def connect(self, *args, **params):
"""Activate a connection to the database.
Connect to the database using this DBProxy's module and the given
connect arguments. If the arguments match an existing pool, the
connection will be returned from the pool's current thread-local
connection instance, or if there is no thread-local connection
instance it will be checked out from the set of pooled connections.
If the pool has no available connections and allows new connections
to be created, a new database connection will be made.
"""
return self.get_pool(*args, **params).connect()
def dispose(self, *args, **params):
"""Dispose the connection pool referenced by the given connect arguments."""
key = self._serialize(*args, **params)
try:
del self.pools[key]
except KeyError:
pass
def _serialize(self, *args, **params):
return pickle.dumps([args, params]) | zif.sedna | /zif.sedna-0.10alpha1.tar.gz/zif.sedna-0.10alpha1/src/zif/sedna/pool.py | pool.py |
README_pylons.txt
This is simple (preliminary) instructions for using the sedna zope3 da in
Pylons. Read through once before proceeding. Suggestions for being more
"pylons-like" will be graciously accepted.
You need zope.rdb
# easy_install zope.rdb
This has a lot of recursive dependencies and will download a large chunk of the
zope world, but the important parts are ZODB (only the transaction part),
zope.rdb itself, zope.component, and zope.interface. Use a virtualenv, if
you are not already.
You need zif.sedna
# easy_install zif.sedna
For transaction support, you want repoze.tm
Yes, you want transaction support. If you do not do this, you will need to be
very diligent with begin() and commit()/rollback() on connections. For thread
safety, the adapter blocks beginning transactions in a thread until the current
transaction is complete.
# easy_install -i http://dist.repoze.org/simple repoze.tm
NOTE: When I did this, setuptools complained about differing versions of ZODB.
I think this is harmless, but maybe installing repoze.tm before zope.rdb may
silence the complaint.
Do the "Configuring the Application" part of
http://blog.repoze.org/repoze.tm_with_pylons-20071218.html
For example, an app named testapp would have something like the following
stanzas in the paste .ini file
[pipeline:main]
pipeline=
egg:Paste#cgitb
egg:Paste#httpexceptions
egg:repoze.tm#tm
test
[app:test]
use = egg:testapp
full_stack = false
cache_dir = %(here)s/data
beaker.session.key = testapp
beaker.session.secret = somesecret
Somewhere in a module that executes once at start-up (maybe environment.py),
you want the following:
# imports
import zope.component
from zope.rdb.interfaces import IZopeDatabaseAdapter
from zif.sedna.da import SednaAdapter
# get the zope.component site manager for component registration
sm = zope.component.getSiteManager()
Then, in the same file, register utilities for Sedna database access. Here, we
register a database adapter utility named "testsedna". You may register more
than one, maybe one with read-write and another with readonly, and maybe another
with a different Sedna database. The last parameter is the name you will
access the particular connection by.
# sedna database adapter registration
# always "dbi://", then username:password@host:port/dbname
sm.registerUtility(SednaAdapter("dbi://SYSTEM:MANAGER@localhost:5050/test"),
IZopeDatabaseAdapter,'testsedna')
In your application's controller code, use the following
# imports at module level
from zope.rdb.interfaces import IZopeDatabaseAdapter
from zope.component import getUtility
Put the following in a method that accesses the database. Note the call () at
the end. 'testsedna' is the utility name by which you registered the connection,
above.
sednaConn = getUtility(IZopeDatabaseAdapter,'testsedna')()
This is a connection, much like any other database adapter.
Obtain a cursor
c = sednaConn.cursor()
and do some queries. Here, we use elementtree syntax to put Chapter 1 of
Genesis into a page. body is the 'body' element of the page we are generating.
res = c.execute(u'doc("ot")/tstmt/bookcoll[1]/book[1]/chapter[1]/v/text()')
theList = c.fetchall()
ol = SubElement(body,'ol')
for k in theList:
p = SubElement(ol,'li')
p.text = k.strip()
fetchall() is one way of doing this; you may also iterate the result directly.
res = c.execute(u'doc("ot")/tstmt/bookcoll[1]/book[1]/chapter[1]/v/text()')
ol = SubElement(body,'ol')
for k in res:
p = SubElement(ol,'li')
p.text = k.strip()
A query result may be a boolean for updates, inserts, etc. Otherwise, it is
an iterable that produces python unicode strings. Here, the xquery obtained
the text content, but we could have written the query without "text()" and
returned the full "v" elements and parsed them with an XML parser.
Generally, failing queries will raise an exception. The database adapter
takes care of begin(). repoze.tm takes care of commit() and rollback().
Generally, commit() is called by default, and rollback() is called when there
is an exception.
| zif.sedna | /zif.sedna-0.10alpha1.tar.gz/zif.sedna-0.10alpha1/src/zif/sedna/README_pylons.txt | README_pylons.txt |
import time, random
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
try:
import thread as _thread
except ImportError:
import dummy_thread as _thread
from zope.interface import implements
from zope.rdb import parseDSN, ZopeConnection, ZopeCursor
from zope.rdb.interfaces import IManageableZopeDatabaseAdapter
import pool
import dbapi
# use a module-level connection pool so the connections may survive when
# the thread dies. Under Paste, threads die periodically.
#local = threading.local()
connectionPool = pool.manage(dbapi)
#connectionPool = pool.manage(dbapi,poolclass=pool.SingletonThreadPool)
lock = _threading.Lock()
DEFAULT_ENCODING = 'utf-8'
class SednaTypeInfo(object):
paramstyle = 'pyformat'
threadsafety = 1
encoding = 'utf-8'
def getEncoding(self):
return self.encoding
def setEncoding(self,encoding):
raise RuntimeError('Cannot set Sedna encoding.')
def getConverter(self,anything):
return identity
def identity(x):
return x
class SednaCursor(ZopeCursor):
"""a zope.rdb.cursor with conversion disabled"""
def _convertTypes(self,results):
return results
def execute(self, operation, parameters=None):
"""Executes an operation, registering the underlying
connection with the transaction system. """
#operation, parameters = self._prepareOperation(operation, parameters)
self.connection.registerForTxn()
if parameters is None:
return self.cursor.execute(operation)
return self.cursor.execute(operation,parameters)
class SednaConnection(ZopeConnection):
"""a zope.rdb.ZopeConnection with conversions disabled"""
def getTypeInfo(self):
return SednaTypeInfo()
def registerForTxn(self):
if not self._txn_registered:
self.conn.begin()
super(SednaConnection,self).registerForTxn()
def cursor(self):
curs = SednaCursor(self.conn.cursor(),self)
return curs
def debugOn(self):
self.conn.debugOn()
def debugOff(self):
self.conn.debugOff()
def traceOn(self):
self.conn.traceOn()
def traceOff(self):
self.conn.traceOff()
class SednaAdapter(object):
"""This is zope.rdb.ZopeDatabaseAdapter, but not Persistent
Since Sedna Adapter does not want any results conversion,
A SednaConnection is returned instead of a
ZopeConnection.
"""
implements(IManageableZopeDatabaseAdapter)
def __init__(self, dsn):
self.setDSN(dsn)
self._unique_id = '%s.%s.%s' % (
time.time(), random.random(), _thread.get_ident()
)
def _connection_factory(self):
return connectionPool.connect(self.dsn)
def setDSN(self, dsn):
assert dsn.startswith('dbi://'), "The DSN has to start with 'dbi://'"
self.dsn = dsn
def getDSN(self):
return self.dsn
def connect(self):
self.connection = SednaConnection(self._connection_factory(), self)
def disconnect(self):
if self.isConnected:
self.connection.close()
self.connection = None
def isConnected(self):
return self.connection
def __call__(self):
"""
we lock so other threads cannot get a connection while this
thread is getting a connection. Two threads will not get the same
connection, presumably.
"""
lock.acquire()
try:
self.connect()
finally:
lock.release()
return self.connection
# Pessimistic defaults
paramstyle = 'pyformat'
threadsafety = 0
encoding = DEFAULT_ENCODING
def setEncoding(self, encoding):
# Check the encoding
"".decode(encoding)
self.encoding = encoding
def getEncoding(self):
return self.encoding
def getConverter(self, type):
'See IDBITypeInfo'
return identity | zif.sedna | /zif.sedna-0.10alpha1.tar.gz/zif.sedna-0.10alpha1/src/zif/sedna/da.py | da.py |
Sedna is a read-write xml storage. The interface is a network socket, using
message-passing for queries and updates. Queries are according to the
W3C XQuery standard. Updates are an extension of XQuery.
Installing Sedna and XQuery syntax is beyond the scope of this document. Sedna
has Apache 2.0 license and may be obtained from
http://modis.ispras.ru/sedna/
The tests here assume a running Sedna database on localhost named 'test' with
the default login, 'SYSTEM' and passwd, 'MANAGER'
1. start sedna governor
$ se_gov
2. create database 'test' if necessary
$ se_cdb test
3. start database 'test'
$ se_sm test
Change these if necessary to match your system.
On \*nix you can also $ tailf [sedna-directory]/data/event.log to
monitor what the Sedna server is doing.
>>> login = 'SYSTEM'
>>> passwd = 'MANAGER'
>>> db = 'test'
>>> port = 5050
>>> host = 'localhost'
Ordinarily, this statement will be "from zif.sedna import protocol"
>>> import protocol
We open and close a connection:
>>> conn = protocol.SednaProtocol(host,db,login,passwd,port)
>>> conn.close()
If login fails, you get an OperationalError.
>>> bad_passwd = 'hello'
>>> conn = protocol.SednaProtocol(host,db,login,bad_passwd,port)
Traceback (most recent call last):
...
OperationalError: [226] SEDNA Message: ERROR SE3053
Authentication failed.
Let's start with an xquery that does not need to access any documents. The
result of a query is an iterator that may only be accessed once. result.value
empties that iterator into a single python unicode string. You may iterate the
result and hold it in a list, or use the items as they are generated. Items
in a result are python unicode strings, unless it makes sense for the result
to be a boolean, e.g., updates, inserts, deletes. zif.sedna's protocol will
send begin() to start a transaction automatically if necessary. You may
execute multiple queries in a transaction, but transactions must be committed
or rolled back before the connection is closed.
>>> conn = protocol.SednaProtocol(host,db,login,passwd,port)
>>> result = conn.execute(u'for $i in (1,2,3) return <z>{$i}</z>')
>>> print result.value
<z>1</z>
<z>2</z>
<z>3</z>
>>> result.value
u''
>>> result = conn.execute(u'for $i in (1,2,3) return <z>{$i}</z>')
>>> res = list(result)
>>> print res
[u'<z>1</z>', u'\n<z>2</z>', u'\n<z>3</z>']
>>> conn.commit()
True
Internally, Sedna stores documents and processes queries in utf-8. The
zif.sedna protocol expects queries to be python unicode strings, which are
converted to utf-8 for processing. Any query other than a python unicode string
will raise a ProgrammingError.
>>> result = conn.execute('for $i in (1,2,3) return <z>{$i}</z>')
Traceback (most recent call last):
...
ProgrammingError: Expected unicode, got <type 'str'>.
Let's bulk load a file so we have some data to work with. Since the "region"
folder is local to this module, a relative path will work to specify this file.
In practice, we will need to use an absolute path. If loading fails, it
raises a DatabaseError.
For the list of documents and collections in the current database, we use
connection.documents
>>> conn = protocol.SednaProtocol(host,db,login,passwd,port)
>>> db_docs = conn.documents
>>> if not 'testx_region' in db_docs:
... z = conn.execute(u'LOAD "example/region.xml" "testx_region"')
>>> conn.commit()
True
Equivalently, this could have been written:
conn.loadFile('example/region.xml','testx_region')
If we try to load this file again with the same name, we get an error.
>>> z = conn.loadFile('example/region.xml','testx_region')
Traceback (most recent call last):
...
DatabaseError: [163] SEDNA Message: ERROR SE2001
Document with the same name already exists.
Let's see what's in the document. Note that the resulting output is nicely
formatted. This is done with leading space and following newline ('\\n')
characters in each line of the result. Since this is XML, they are just there
for output formatting and are not really in the document.
>>> result = conn.execute(u'doc("testx_region")/*/*')
>>> print result.value
<africa>
<id_region>afr</id_region>
</africa>
<asia>
<id_region>asi</id_region>
</asia>
<australia>
<id_region>aus</id_region>
</australia>
<europe>
<id_region>eur</id_region>
</europe>
<namerica>
<id_region>nam</id_region>
</namerica>
<samerica>
<id_region>sam</id_region>
</samerica>
Extra spaces and newlines may be turned off inside a query with a declaration
provided by Sedna.
>>> ns = u'declare namespace se = "http://www.modis.ispras.ru/sedna";'
>>> newquery=ns+'declare option se:output "indent=no";'
>>> newquery = newquery + 'document("testx_region")/*/asia'
>>> result = conn.execute(newquery)
>>> print result.value
<asia><id_region>asi</id_region></asia>
XQuery lets you get just part of the document. Note that 'doc' and 'document'
are synonymous.
>>> data = conn.execute(u'doc("testx_region")//*[id_region="eur"]')
>>> print data.value
<europe>
<id_region>eur</id_region>
</europe>
Let's store a new document from just a string. 'BS' stands for 'bookstore'.
We shortened it for readability in this document.
>>> mytext = """<?xml version="1.0" encoding="ISO-8859-1"?>
... <BS>
...
... <book category="COOKING">
... <title lang="en">Everyday Italian</title>
... <author>Giada De Laurentiis</author>
... <year>2005</year>
... <price>30.00</price>
... </book>
...
... <book category="CHILDREN">
... <title lang="en">Harry Potter</title>
... <author>J K. Rowling</author>
... <year>2005</year>
... <price>29.99</price>
... </book>
...
... <book category="WEB">
... <title lang="en">XQuery Kick Start</title>
... <author>James McGovern</author>
... <author>Per Bothner</author>
... <author>Kurt Cagle</author>
... <author>James Linn</author>
... <author>Vaidyanathan Nagarajan</author>
... <year>2003</year>
... <price>49.99</price>
... </book>
...
... <book category="WEB">
... <title lang="en">Learning XML</title>
... <author>Erik T. Ray</author>
... <year>2003</year>
... <price>39.95</price>
... </book>
...
... </BS>"""
>>> string1 = mytext
>>> result = conn.loadText(string1, 'BS')
If we did not get any exceptions, the document is loaded. Let's do a query
for books with price > 30. We'll iterate the result and print the items. We
strip() the individual results to remove trailing newline characters.
>>> result = conn.execute(u'document("BS")//book[price>30]')
>>> for item in result:
... print item.strip()
<book category="WEB">
<title lang="en">XQuery Kick Start</title>
<author>James McGovern</author>
<author>Per Bothner</author>
<author>Kurt Cagle</author>
<author>James Linn</author>
<author>Vaidyanathan Nagarajan</author>
<year>2003</year>
<price>49.99</price>
</book>
<book category="WEB">
<title lang="en">Learning XML</title>
<author>Erik T. Ray</author>
<year>2003</year>
<price>39.95</price>
</book>
We can get a book by its index. XQuery indices are 1 based; 2 means second book.
>>> result = conn.execute(u'document("BS")/BS/book[2]')
>>> print result.value
<book category="CHILDREN">
<title lang="en">Harry Potter</title>
<author>J K. Rowling</author>
<year>2005</year>
<price>29.99</price>
</book>
We can get the last book.
>>> result = conn.execute(u'doc("BS")/BS/book[last()]')
>>> print result.value
<book category="WEB">
<title lang="en">Learning XML</title>
<author>Erik T. Ray</author>
<year>2003</year>
<price>39.95</price>
</book>
We can get the count of the books.
>>> query = u"""let $items := doc('BS')/BS/book
... return <count>{count($items)}</count>"""
>>> result = conn.execute(query)
>>> print result.value
<count>4</count>
Empty results return an empty string.
>>> result = conn.execute(u'document("BS")//book[price>300]')
>>> result.value
u''
Querying for an element that does not exist returns an empty result, not an
exception.
>>> result = conn.execute(u'document("BS")/BS/book[9]')
>>> result.value
u''
Hmmm. Can we retrieve an item from a list based on a previous selection?
Yes, we can. This is interesting, since this means we can get back to this
item if we want to update it.
Let's get the second book with a price greater than 30.
>>> prevQuery = u'document("BS")//book[price>30]'
>>> query = prevQuery + '[2]'
>>> result = conn.execute(query)
>>> print result.value
<book category="WEB">
<title lang="en">Learning XML</title>
<author>Erik T. Ray</author>
<year>2003</year>
<price>39.95</price>
</book>
Let's see how long that took.
>>> z = result.time
Sorry, can't show you the value here. You will have to try it yourself.
>>> z.endswith('secs')
True
>>> conn.commit()
True
Here's a query longer than 10240 bytes. It will go through anyway.
>>> result = conn.execute(
... u'document("BS")//book[price>300]'+' '*15000)
>>> result.value
u''
Let's try an update
>>> qry = u'document("BS")//book[title="Learning XML"]'
>>> data = conn.execute(qry)
>>> print data.value
<book category="WEB">
<title lang="en">Learning XML</title>
<author>Erik T. Ray</author>
<year>2003</year>
<price>39.95</price>
</book>
The above "book" element is the item we want to change. We use the same xpath
to id the item and do an "UPDATE insert" to put a new "quality" element
into the item. We also look at mixed-mode element handling here.
>>> ins = '<quality>Great <i>happy </i>quality</quality>'
>>> qry2 = 'UPDATE insert %s into %s' % (ins,qry)
>>> update = conn.execute(qry2)
>>> print update
True
OK. Update succeeded. Let's see the new item.
>>> check = conn.execute(qry)
>>> print check.value
<book category="WEB">
<quality>Great <i>happy </i>quality</quality>
<title lang="en">Learning XML</title>
<author>Erik T. Ray</author>
<year>2003</year>
<price>39.95</price>
</book>
>>> conn.commit()
True
>>> conn.close()
What about rollbacks? Let's try one.
>>> conn = protocol.SednaProtocol(host,db,login,passwd,port)
>>> conn.begin()
>>> qry = u'document("BS")//book[title="Learning XML"]/quality'
>>> result = conn.execute(qry)
We have a <quality> element in the book. Let's delete it.
>>> print result.value
<quality>Great <i>happy </i>quality</quality>
>>> qry2 = u'UPDATE delete %s' % qry
>>> result = conn.execute(qry2)
>>> data = conn.execute(qry)
Now, it's gone
>>> data.value
u''
We rollback
>>> conn.rollback()
True
>>> conn.close()
We reopen the database just to be sure that we are not looking at a cache.
>>> conn = protocol.SednaProtocol(host,db,login,passwd,port)
>>> conn.begin()
>>> data = conn.execute(qry)
The <quality> element is back! Rollback successful!
>>> print data.value
<quality>Great <i>happy </i>quality</quality>
We've done update and delete. Now, let's do a "replace". We raise the price
10% on all of the books.
>>> qry0 = u'document("BS")//book/price'
>>> qry = "UPDATE replace $price in " + qry0 +\
... """ with
... <price>{round-half-to-even($price * 1.1,2)}</price>
... """
>>> data = conn.execute(qry)
>>> data
True
>>> data = conn.execute(qry0)
>>> print data.value
<price>33</price>
<price>32.99</price>
<price>54.99</price>
<price>43.95</price>
Sedna also provides statements for "UPDATE delete_undeep" and "UPDATE rename".
Consult the Sedna documentation for instructions on these and for additional
information about Sedna - indexing, ODBC inside XQuery, user management,
database exports, triggers, etc.
Before closing this connection, let's see what the other output format looks
like. The default format is 0, XML. 1 gives us SXML. Maybe useful if you have
an SXML parser. It is smaller...
>>> qry = u'document("BS")//book[title="Learning XML"]'
>>> data = conn.execute(qry,format=1)
>>> print data.value
(book(@ (category "WEB"))
(quality"Great "(i"happy ")"quality")
(title(@ (lang "en"))"Learning XML")
(author"Erik T. Ray")
(year"2003")
(price"43.95")
)
>>> conn.commit()
True
>>> conn.close()
>>> conn.closed
True
Starting a new connection here.
>>> conn = protocol.SednaProtocol(host,db,login,passwd,port)
Error handling. Let's try to catch a DatabaseError. Error definitions are
available in the connection object per PEP-249. This query should be an XQuery
syntax error, so will be caught right when the query is sent.
>>> try:
... result = conn.execute(u'hello world')
... except conn.DatabaseError,e:
... print e
[3] SEDNA Message: ERROR XPST0003
It is a static error if an expression is not a valid instance of the grammar defined in A.1 EBNF.
Details: syntax error at token: 'world', line: 1
Now for errors in 'valid' but troublesome queries, errors that happen while the
result is being generated.
Here's a query that fails at run-time.
>>> qry = u'''(: In this query dynamic error will be raised :)
... (: due to "aaaa" is not castable to xs:integer. :)
... declare function local:f()
... {
... "aaaa" cast as xs:integer
... };
... local:f()
... '''
>>> result = conn.execute(qry)
Traceback (most recent call last):
...
DatabaseError: [112] SEDNA Message: ERROR FORG0001
Invalid value for cast/constructor.
Details: Cannot convert to xs:integer type
We get an error, but this is not as helpful as it can be. We set debugOn to
get a bit more info.
We turn on debug messages.
>>> conn.debugOn()
Retry the same query. Now, when we get the traceback, there is a bit more info
that is maybe helpful.
>>> result = conn.execute(qry)
Traceback (most recent call last):
...
DatabaseError: PPCast : 1
PPFunCall : 1 : http://www.w3.org/2005/xquery-local-functions:f
[112] SEDNA Message: ERROR FORG0001
Invalid value for cast/constructor.
Details: Cannot convert to xs:integer type
>>> conn.debugOff()
For a full idea of the client-server communication, we can do tracing. First,
we need to configure logging. We'll log to stdout here.
>>> import logging
>>> import sys
>>> logging.basicConfig(stream=sys.stdout)
>>> log = logging.getLogger()
>>> log.setLevel(logging.INFO)
Tracing gives a representation of the internal client-server interaction.
Tracing happens at logging.INFO level. (C) messages are sent by the client,
and (S) messages are the server's response. We see the client sending
the query, and the server's response. Here, we are seeing what the price would
look like of we raise it another 10% on the "Learning XML" book.
>>> conn.traceOn()
>>> qry = u'''for $item in document("BS")//book
... let $price := round-half-to-even($item/price * 1.1,2)
... where $item/title = "Learning XML"
... return <price>{$price}</price>'''
>>> data = conn.execute(qry)
INFO:root:(C) SEDNA_BEGIN_TRANSACTION
INFO:root:(S) SEDNA_BEGIN_TRANSACTION_OK
INFO:root:(C) SEDNA_EXECUTE for $item in document("BS")//book
let $price := round-half-to-even($item/price * 1.1,2)
where $item/title = "Learning XML"
return <price>{$price}</price>
INFO:root:(S) SEDNA_QUERY_SUCCEEDED
INFO:root:(S) SEDNA_ITEM_PART <price>48.35</price>
INFO:root:(S) SEDNA_ITEM_END
>>> print data.value
INFO:root:(C) SEDNA_GET_NEXT_ITEM
INFO:root:(S) SEDNA_RESULT_END
<price>48.35</price>
We turn tracing off and commit the session.
>>> conn.traceOff()
>>> conn.commit()
True
>>> conn.close()
Reset the log level.
>>> log.setLevel(logging.ERROR)
Final cleanup. We'll remove the documents we created.
>>> conn = protocol.SednaProtocol(host,db,login,passwd,port)
>>> conn.begin()
>>> for doc in ['testx_region','BS']:
... rs = conn.execute(u'DROP DOCUMENT "%s"' % doc)
>>> conn.commit()
True
>>> conn.close()
| zif.sedna | /zif.sedna-0.10alpha1.tar.gz/zif.sedna-0.10alpha1/src/zif/sedna/README.txt | README.txt |
=====================
zif.sedna.sednaobject
=====================
sednaobject has a couple of classes, SednaXPath and SednaElement, that abstract
fetches and updates for a Sedna server.
SednaXpath is for readonly query results. It provides list-like behavior.
Access query result items by index, slice, or iteration.
SednaElement provides a read-write elementtree-like interface to a single
element and its children. For container-like elements, it provides mutable
access to children by index. For more object-like elements, the "replace"
method is likely to be useful.
sednaobject requires lxml. Items based on lxml Element are supported, so
functionality provided by lxml.etree and lxml.objectify may be used for item
creation and editing. Plain-text xml may also be used.
sednaobject is blithely ignorant of namespaces, which is presumably OK for most
applications. Namespace-awareness will be added as necessary.
We'll start with the usual test document in the test database:
>>> login = 'SYSTEM'
>>> passwd = 'MANAGER'
>>> db = 'test'
>>> port = 5050
>>> host = 'localhost'
>>> import protocol
>>> conn = protocol.SednaProtocol(host,db,login,passwd,port)
>>> db_docs = conn.documents
>>> if not 'testx_region' in db_docs:
... z = conn.execute(u'LOAD "example/region.xml" "testx_region"')
>>> conn.commit()
True
zif.sedna.sednaobject.SednaXPath
--------------------------------
SednaXPath is a class intended to abstract XQuery results to provide pythonic
sequence methods. XPath results are readonly, so this class mainly provides
length, indexed access, and slicing.
Initialize a SednaXPath with a cursor, an XQuery expression, and an optional
parser:
>>> from sednaobject import SednaXPath
>>> curs = conn.cursor()
>>> expr = u"doc('testx_region')/regions/*"
>>> z = SednaXPath(curs,expr)
Get the length of the result:
>>> len(z)
6
>>> z.count()
6
Print the result in one shot. To obtain this into a variable, use str().
>>> print z
<africa>
<id_region>afr</id_region>
</africa>
<asia>
<id_region>asi</id_region>
</asia>
<australia>
<id_region>aus</id_region>
</australia>
<europe>
<id_region>eur</id_region>
</europe>
<namerica>
<id_region>nam</id_region>
</namerica>
<samerica>
<id_region>sam</id_region>
</samerica>
Access by index:
>>> z[0]
u'<africa><id_region>afr</id_region></africa>'
>>> z[-1]
u'<samerica><id_region>sam</id_region></samerica>'
>>> z[9]
Traceback (most recent call last):
...
IndexError: list index out of range
Do the "in" thing:
>>> z[0] in z
True
>>> from lxml.etree import tostring, fromstring
>>> d = fromstring(z[2])
>>> d in z
True
>>> tostring(d) in z
True
>>> "<arbitrary>tag</arbitrary>" in z
False
If we use lxml and parse to an Element, "in" and "index" still work.
>>> from lxml import objectify
>>> item = objectify.fromstring(z[2])
>>> item in z
True
>>> z.index(item)
2
>>> from lxml.etree import fromstring
>>> item = fromstring(z[3])
>>> item in z
True
>>> z.index(item)
3
Slice:
>>> for item in z[2:4]:
... print item
<australia><id_region>aus</id_region></australia>
<europe><id_region>eur</id_region></europe>
>>> for item in z[4:]:
... print item
<namerica><id_region>nam</id_region></namerica>
<samerica><id_region>sam</id_region></samerica>
>>> for item in z[:2]:
... print item
<africa><id_region>afr</id_region></africa>
<asia><id_region>asi</id_region></asia>
>>> z[-2:] == z[4:]
True
Do list comprehension. Note that this retrieves the entire set from the
server while iterating. Provide an XQuery with a "where" clause if you want
the server to do the "if" for you.
>>> y = [item for item in z if 'samerica' in item]
>>> print y[0].lstrip()
<samerica>
<id_region>sam</id_region>
</samerica>
Get the index of an item:
>>> u = z[3]
>>> z.index(u)
3
Enumerate:
>>> s = [(idx,value) for idx,value in enumerate(z)]
>>> s[-1]
(5, u'\n<samerica>\n <id_region>sam</id_region>\n</samerica>')
This abstraction uses 0-based indexes. XQuery uses 1-based indexes. You can
get the server's index by using xindex. This is handy if you need to construct
an expression for the server from the current path and the index:
>>> u = z[3]
>>> z.xindex(u)
4
xenumerate similarly provides server indices. Note that the semantics are a
bit different, since this is a method, not a built-in function.
>>> s = [(idx,value) for idx,value in z.xenumerate()]
>>> s[-1]
(6, u'\n<samerica>\n <id_region>sam</id_region>\n</samerica>')
You are not restricted to pure XPath expressions; most XQuery expressions that
return iterables will work fine:
>>> q = u" for $i in doc('testx_region')/regions/* "
>>> q += u" return $i/id_region/text() "
>>> z = SednaXPath(curs,q)
>>> len(z)
6
>>> z[0]
u'afr'
>>> z[1] in z
True
>>> z[-2:]
[u'nam', u'sam']
If you init the SednaXQuery object with a parser, results will be returned
parsed with that parser if possible. Good choices are lxml.etree.fromstring
and lxml.objectify.fromstring. If parsing fails, the text representation will
be returned.
>>> z = SednaXPath(curs,q,parser=fromstring)
>>> type(z[0])
<type 'unicode'>
>>> z[-2:]
[u'nam', u'sam']
>>> z[0]
u'afr'
>>> expr = u"doc('testx_region')/regions"
>>> z = SednaXPath(curs,expr,parser=fromstring)
>>> z[0].tag
'regions'
>>> expr = u"doc('testx_region')/regions/*"
>>> z = SednaXPath(curs,expr,parser=fromstring)
>>> type(z[0])
<type 'lxml.etree._Element'>
>>> [item.tag for item in z]
['africa', 'asia', 'australia', 'europe', 'namerica', 'samerica']
>>> [item.tag for item in z[2:4]]
['australia', 'europe']
zif.sedna.sednaobject.SednaElement
----------------------------------
SednaElement is a class intended to abstract an Element on the server to
provide elementtree-like methods, particularly element information and
modification for persistence. This is a read-write interface and very handy for
container elements. We do commits periodically here. We want to show error
messages, and Sedna seems to be rolling back state on certain errors.
Initialize a SednaElement with a cursor and an XPath expression:
>>> from sednaobject import SednaElement
>>> curs = conn.cursor()
>>> path = u"doc('testx_region')/regions"
>>> z = SednaElement(curs,path)
It is an error if the expression returns more than one element.
>>> path = u"doc('testx_region')/regions/*"
>>> t = SednaElement(curs,path)
Traceback (most recent call last):
...
ValueError: Cannot init SednaElement with multiple elements.
Len provides the number of child elements.
>>> len(z)
6
Obtain the element in one shot:
>>> k = str(z)
>>> print k
<regions>
<africa>
<id_region>afr</id_region>
</africa>
<asia>
<id_region>asi</id_region>
</asia>
<australia>
<id_region>aus</id_region>
</australia>
<europe>
<id_region>eur</id_region>
</europe>
<namerica>
<id_region>nam</id_region>
</namerica>
<samerica>
<id_region>sam</id_region>
</samerica>
</regions>
Item access works as with SednaXPath, except you get the items within the
Element instead of the items of the list returned by the query:
>>> z[0]
u'<africa><id_region>afr</id_region></africa>'
>>> z[-1] in z
True
>>> z[0] in z
True
>>> z[3:4]
[u'<europe><id_region>eur</id_region></europe>']
>>> z.xindex(z[-2])
5
Some elementtree functions work. Setting an attribute reads and rewrites the
entire item, so do this sparingly:
>>> z.tag
'regions'
>>> z.attrib
{}
>>> z.set('attr', 'something')
>>> z.attrib
{'attr': 'something'}
>>> z.get('attr')
u'something'
>>> z.cursor.connection.commit()
True
Sometimes, you have a somewhat atomic element, and just want to replace the
entire item with an update.
>>> idx = z.xindex(z[0])
>>> p = z.path + '/' + '*[%s]' % idx
>>> t = SednaElement(z.cursor,p)
>>> print t
<africa>
<id_region>afr</id_region>
</africa>
>>> t.replace('bob')
Traceback (most recent call last):
...
ValueError: Item is not well-formed.
>>> item = fromstring(str(t))
>>> from lxml.etree import SubElement
>>> new_element = SubElement(item,'v',{'attr' : 'val'})
>>> new_element.text = 'txt'
>>> t.replace(item)
>>> print t
<africa>
<id_region>afr</id_region>
<v attr="val">txt</v>
</africa>
>>> print z[0]
<africa><id_region>afr</id_region><v attr="val">txt</v></africa>
The list of subelements is mutable. Assign a new item at an index. Subelements
must be well-formed.
>>> t = fromstring(z[1])
>>> t.xpath('id_region')[0].text = 'asia'
>>> z[1] = t
>>> print z[1]
<asia><id_region>asia</id_region></asia>
>>> for idx, item in list(enumerate(z)):
... t = fromstring(item)
... t.xpath('id_region')[0].tag = 'region_id'
... z[idx] = t
>>> print z[2]
<australia><region_id>aus</region_id></australia>
>>> z.cursor.connection.commit()
True
>>> z[0] = 'fred'
Traceback (most recent call last):
...
ValueError: Item is not well-formed.
Append, insert, and remove work. Note that "remove" removes only the first
child whose normalized text representation matches the normalized text
representation of the item provided.
>>> t = '<antarctica><region_id>ant</region_id></antarctica>'
>>> len(z)
6
>>> z.append(t)
>>> len(z)
7
>>> z[-1]
u'<antarctica><region_id>ant</region_id></antarctica>'
>>> z.cursor.connection.commit()
True
>>> z.append('hello')
Traceback (most recent call last):
...
ValueError: Item is not well-formed.
>>> z.remove('hello')
Traceback (most recent call last):
...
ValueError: Item is not well-formed.
>>> z.remove(t)
>>> len(z)
6
>>> z[-1]
u'<samerica><region_id>sam</region_id></samerica>'
>>> s = z[3]
>>> print s
<europe><region_id>eur</region_id></europe>
>>> z.remove(s)
>>> len(z)
5
>>> z.insert(0,s)
>>> len(z)
6
>>> z[0]
u'<europe><region_id>eur</region_id></europe>'
>>> z[1]
u'<africa><region_id>afr</region_id><v attr="val">txt</v></africa>'
>>> j = z[:]
>>> len(j)
6
>>> isinstance(j,list)
True
These functions work for lxml.etree Elements.
>>> s = fromstring(s)
>>> z.remove(s)
>>> z[0]
u'<africa><region_id>afr</region_id><v attr="val">txt</v></africa>'
>>> z.insert(-1,s)
>>> len(z)
6
>>> z[-2]
u'<europe><region_id>eur</region_id></europe>'
del works.
>>> t = z[-1]
>>> t
u'<samerica><region_id>sam</region_id></samerica>'
>>> z.index(t)
5
>>> del z[0]
>>> z[0]
u'<asia><region_id>asia</region_id></asia>'
>>> z.index(t)
4
>>> del z[-1]
>>> z[-1]
u'<europe><region_id>eur</region_id></europe>'
>>> len(z)
4
Slice modification is unsupported.
>>> del z[:]
Traceback (most recent call last):
...
TypeError: unsupported operand type(s) for +: 'slice' and 'int'
Extend works.
>>> len(z)
4
>>> t = [z[0],z[1],z[2]]
>>> z.extend(t)
>>> len(z)
7
Note that "index" refers to the first appearance of an item by value, so the
following is correct.
>>> z.index(z[-1])
2
You may obtain the path SednaElement was initialized with.
>>> z.path
u"doc('testx_region')/regions"
It is sometimes handy to obtain the parent of an element. When getparent()
returns None, you are at root. .parent is a synonym. The parent returned
is a SednaElement.
>>> p = z.path + '/' + '*[1]'
>>> t = SednaElement(z.cursor,p)
>>> t.tag
'asia'
>>> s = t.parent
>>> s.tag
'regions'
>>> s = t.getparent()
>>> s.tag
'regions'
>>> isinstance(s,SednaElement)
True
>>> f = s.getparent()
>>> f is None
True
If you init a SednaElement with a parser, returned items will be parsed with
that parser. "fromstring" here is lxml.etree.fromstring
>>> path = u"doc('testx_region')/regions"
>>> z = SednaElement(curs,path,parser=fromstring)
>>> [item.tag for item in z]
['asia', 'australia', 'namerica', 'europe', 'asia', 'australia', 'namerica']
>>> z[0].tag
'asia'
Here, we use lxml.objectify.fromstring. Just trying a bunch of things...
>>> z = SednaElement(curs,path,parser=objectify.fromstring)
>>> [item.tag for item in z]
['asia', 'australia', 'namerica', 'europe', 'asia', 'australia', 'namerica']
>>> z[1].tag
'australia'
>>> z[1].region_id
'aus'
>>> t = z[1]
>>> t.region_id = 'aut'
>>> t.city = "Canberra"
>>> t['animal'] = 'kangaroo'
>>> t.fun_words = ["g'day","barbie", "sheila"]
>>> t['arb_list'] = [u'\u20ac (euro symbol)',3,False,4.0,-25, None]
>>> t.arb_list = t.arb_list[:] + [True, 'true']
>>> if not len(t.xpath('contact')):
... dummy = SubElement(t,'contact')
>>> if not len(t.xpath("contact/name")):
... dummy = SubElement(t.contact,'name')
>>> t.contact.name.last = 'Hogan'
>>> t.contact.name.first = 'Paul'
>>> z[1] = t
>>> m = z[1]
>>> m.region_id
'aut'
>>> m.city
'Canberra'
>>> m.fun_words
"g'day"
>>> list(m['fun_words'])
["g'day", 'barbie', 'sheila']
>>> m.fun_words[:]
["g'day", 'barbie', 'sheila']
>>> list(m.fun_words)
["g'day", 'barbie', 'sheila']
>>> m['city']
'Canberra'
>>> m.animal
'kangaroo'
>>> m.arb_list[:]
[u'\u20ac (euro symbol)', 3, False, 4.0, -25, None, True, 'true']
>>> '%s, %s' % (m.contact.name.last, m.contact.name.first)
'Hogan, Paul'
>>> t = objectify.Element('australia')
>>> t.region_id = 'aus'
>>> t.city = "Canberra"
>>> t.fun_words = ["g'day","barbie", "sheila"]
>>> if not len(t.xpath('contact')):
... dummy = SubElement(t,'contact')
>>> if not len(t.xpath("contact/name")):
... dummy = SubElement(t.contact,'name')
>>> t.contact.name.last = 'Hogan'
>>> t.contact.name.first = 'Paul'
>>> z[1] = t
>>> print tostring(z[1], pretty_print=True).strip()
<australia xmlns:py="http://codespeak.net/lxml/objectify/pytype" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" py:pytype="TREE">
<region_id py:pytype="str">aus</region_id>
<city py:pytype="str">Canberra</city>
<fun_words py:pytype="str">g'day</fun_words>
<fun_words py:pytype="str">barbie</fun_words>
<fun_words py:pytype="str">sheila</fun_words>
<contact>
<name>
<last py:pytype="str">Hogan</last>
<first py:pytype="str">Paul</first>
</name>
</contact>
</australia>
Cleanup. We delete the previously-created documents and close the connection.
>>> for doc in ['testx_region']:
... rs = conn.execute(u'DROP DOCUMENT "%s"' % doc)
>>> conn.commit()
True
>>> conn.close()
| zif.sedna | /zif.sedna-0.10alpha1.tar.gz/zif.sedna-0.10alpha1/src/zif/sedna/README_sednaobject.txt | README_sednaobject.txt |
from lxml import objectify
from lxml.etree import _Element, tounicode, tostring,fromstring, XMLSyntaxError
from lxml.doctestcompare import norm_whitespace
from dbapiexceptions import DatabaseError
def checkobj(obj,wf=True):
"""
local converter. everything sent to the server needs to be a string
if wf is True, we also check for well-formed-ness...
"""
if isinstance(obj,_Element):
item = tounicode(obj)
elif isinstance(obj,SednaElement):
item = str(obj)
else:
if wf:
try:
t = fromstring(obj)
except XMLSyntaxError:
raise ValueError("Item is not well-formed.")
item = obj
return item
class SednaXPath(object):
"""class for read-only xpath queries. Makes the query result sequence-like.
slices and stuff...
"""
def __init__(self,cursor,path, parser=None):
self.cursor = cursor
while path.endswith('/'):
path = path[:-1]
self.path = path
self._count = None
self._attrib = None
self.parser = parser
def count(self):
"""
return a count of the items returned by the query
"""
if self._count is not None:
return self._count
q = u'let $i := %s' % (self.path)
q += u' return <i><c>{count($i)}</c></i>'
s = self.cursor.execute(q)
f = objectify.fromstring(s.value)
self._count = int(f.c)
return self._count
def xpath(self,path):
"""
Send another query to the server for the current document.
"""
if path.startswith('/'):
base = self.path.split('/')[0]
return SednaXPath(self.cursor,base+path)
else:
return SednaXPath(self.cursor,self.path + '/' + path)
def _localKey(self,key):
"""
convert 0-based indices to 1-based for use in generated queries,
accounting for negative indices
"""
local = key+1
count = self.count()
if local < 1:
local = count + local
if local > count or local < 1:
raise IndexError('list index out of range')
return local
def __getitem__(self,index):
"""
retrieve the item at index
"""
if index < 0:
index += self.count()
item = self[index:index+1][0]
#if self.parser:
#try:
#item = self.parser(item)
#except XMLSyntaxError:
#pass
return item
def index0(self,obj):
"""
get the (0-based) index of the item in the list
This uses a brute-force technique, and may not be suitable for large
items in long lists
"""
item = checkobj(obj, wf=False)
normed = norm_whitespace(item)
q = u'declare namespace se = "http://www.modis.ispras.ru/sedna";'
q += 'declare option se:output "indent=no";'
q += u' %s' % self.path
s = self.cursor.execute(q)
count = -1
found = False
for k in s:
count += 1
if norm_whitespace(k) == normed:
found = True
break
if found:
return count
else:
raise ValueError('item not in list')
def index(self,obj):
"""
get the (0-based) index of the item in the list
"""
item = checkobj(obj, wf=False)
normed = norm_whitespace(item)
q = u'declare namespace se = "http://www.modis.ispras.ru/sedna";'
q += 'declare option se:output "indent=no";'
q += u' let $p := %s ,' % self.path
q += u' $i := %s ' % item
q += u' return <i>{index-of($p,$i)}</i>'
try:
s = self.cursor.execute(q)
except DatabaseError:
# this is probably an XQuery expression, not an XPath, so
# do brute-force evaluation
return self.index0(obj)
if s:
y = fromstring(s.value)
if y.text is not None:
val = int(y.text)
return val-1
raise ValueError('item not in list')
def xenumerate(self):
for idx,value in enumerate(self):
yield idx + 1, value
def xindex(self,obj):
return self.index(obj) + 1
def __contains__(self,obj):
item = checkobj(obj, wf=False)
wf = True
try:
t = fromstring(item)
except XMLSyntaxError:
wf = False
if wf:
q = u' for $i in %s ' % (self.path,)
q += u' where $i = %s ' % item
q += u' return $i'
s = self.cursor.execute(q)
else:
q = u' for $i in %s ' % (self.path,)
q += u' where $i = %(item)s '
q += u' return $i'
s = self.cursor.execute(q,{'item':item})
j = s.value
if j:
return True
return False
def __getslice__(self,start,stop):
#start,stop,step = key.indices(self.count())
#if step <> 1:
# raise NotImplementedError('cannot do steps')
rlen = stop - start
rstart = self._localKey(start)
q = u'declare namespace se = "http://www.modis.ispras.ru/sedna";'
q += u'declare option se:output "indent=no";'
q += u'for $i in subsequence(%s,%s,%s) ' % (self.path,rstart,rlen)
q += u'return $i'
s = list(self.cursor.execute(q))
if self.parser:
try:
return [self.parser(item) for item in s]
except XMLSyntaxError:
pass
return s
def _iterparse(self,s):
for item in s:
try:
i = self.parser(item)
except XMLSyntaxError:
i = item
yield i
def __iter__(self):
q = u' %s' % self.path
s = self.cursor.execute(q)
if self.parser:
return self._iterparse(s)
return s
def __str__(self):
q = u'%s' % self.path
s = self.cursor.execute(q)
return s.value
def __len__(self):
return self.count()
class SednaElement(SednaXPath):
"""a class to emulate read-write ElementTree functionality on an element in
the Sedna database.
initialize with a cursor and a path to the element.
"""
def __init__(self,cursor,path,parser=None, check=True):
"""
init the class with cursor and path
set check to false to eliminate a server request, but only if you
know what you are doing...
"""
super(SednaElement,self).__init__(cursor,path, parser)
if check:
self._checkElement()
def _checkElement(self):
"""
do a check to see that this is a single element
"""
q = u'let $i := %s' % (self.path,)
q += u' return <i><c>{count($i)}</c></i>'
s = self.cursor.execute(q)
f = objectify.fromstring(s.value)
c = int(f.c)
if c == 1:
return
elif c == 0:
raise ValueError(
'The path did not return an element. ([0] might need to be [1]?)')
else:
raise ValueError(
'Cannot init SednaElement with multiple elements.')
def getparent(self):
"""
return parent as a SednaElement or None if at root
"""
c = self.path + '/..'
t = SednaElement(self.cursor,c,self.parser)
if t.tag is None:
return None
return t
parent = property(getparent)
def count(self,tag=None):
if self._count is not None and tag is None:
return self._count
if tag:
pt = tag
else:
pt = '*'
q = u'let $i := %s/%s' % (self.path,pt)
q += u' return <i><c>{count($i)}</c></i>'
s = self.cursor.execute(q)
f = objectify.fromstring(s.value)
self._count = int(f.c)
return self._count
def append(self,obj):
item = checkobj(obj)
if self.count() > 0:
q = u'update insert %s following %s/*[last()]' % (item,self.path)
else:
q = u'update insert %s into %s' % (item,self.path)
self.cursor.execute(q)
self._count = None
def __contains__(self,obj):
try:
s = self.index(obj)
return True
except ValueError:
return False
def index(self,obj):
"""
get the first (0-based) index of the item in the list
"""
item = checkobj(obj)
normed = norm_whitespace(item)
q = u'declare namespace se = "http://www.modis.ispras.ru/sedna";'
q += 'declare option se:output "indent=no";'
q += u' let $p := %s/* ,' % self.path
q += u' $i := %s ' % item
q += u' return <i>{index-of($p,$i)}</i>'
try:
s = self.cursor.execute(q)
except DatabaseError:
raise ValueError('item not in list')
if s:
y = fromstring(s.value)
if y.text is not None:
val = int(y.text.split()[0])
return val-1
raise ValueError('item not in list')
def extend(self,items):
for item in items:
self.append(item)
self._count = None
def insert(self,key,item):
local = key+1
count = self.count()
self._count = None
if local < 1:
local = count + local
if local > count:
self.append(item)
return
elif local < 1:
local = 1
item = checkobj(item)
if count > 0:
q = u'update insert %s preceding %s/*[%s]' % (item,self.path,local)
self.cursor.execute(q)
else:
self.append(item)
def remove(self,obj):
index = self.index(obj) + 1
q = u'update delete %s/*[%s]' % (self.path,index)
self.cursor.execute(q)
self._count = None
def __iter__(self):
q = u' %s/*' % self.path
s = self.cursor.execute(q)
if self.parser:
return self._iterparse(s)
return s
def __delitem__(self,key):
local = self._localKey(key)
q = u'update delete %s/*[%s]' % (self.path,local)
self.cursor.execute(q)
self._count = None
@property
def tag(self):
q = u"let $i := %s return <t>{$i/name()}</t>" % self.path
t1 = self.cursor.execute(q)
r = fromstring(t1.value)
return r.text
#t = self.path.split('/')[-1]
#t1 = t.split('[')[0]
#return t1.strip()
def __getitem__(self,key):
"""we get the item at index inside the element"""
if isinstance(key,slice):
start,stop,step = key.indices(self.count())
if step <> 1:
raise NotImplementedError('cannot do steps')
return self.__getslice__(start,stop)
local = self._localKey(key)
q = u'declare namespace se = "http://www.modis.ispras.ru/sedna";'
q += 'declare option se:output "indent=no";'
q += u' %s/*[%s]' % (self.path,local)
s = self.cursor.execute(q)
z = self.cursor.fetchone()
if self.parser:
return self.parser(z)
return z
def __setitem__(self,key,value):
item = checkobj(value)
local = self._localKey(key)
q = u'update replace $i in %s/*[%s] ' % (self.path,local)
q += ' with %s' % item
s = self.cursor.execute(q)
def replace(self,obj):
""" replace item at self.path with the object"""
item = checkobj(obj)
q = u'update replace $i in %s ' % (self.path,)
q += ' with %s' % (item,)
self.cursor.execute(q)
self._attrib = None
self._count = None
def __getslice__(self,start,stop):
#start,stop,step = key.indices(self.count())
#if step <> 1:
# raise NotImplementedError('cannot do steps')
rlen = stop - start
rstart = self._localKey(start)
q = u'declare namespace se = "http://www.modis.ispras.ru/sedna";'
q += 'declare option se:output "indent=no";'
q += u'for $i in subsequence(%s/*,%s,%s) ' % (self.path,rstart,rlen)
q += 'return $i'
s = list(self.cursor.execute(q))
if self.parser:
return [self.parser(item) for item in s]
return s
#Attribute access
@property
def attrib(self):
"""get the attributes dict for the element
do not directly modify this. use set('attr','value')
If you need to remove an atribute, str -> edit -> replace is the best
option.
"""
if self._attrib is not None:
return self._attrib
q = u' for $i in %s/@* ' % (self.path)
q += u' let $nm := name($i), '
q += u' $vl:= data($i)'
q += u' return <d><k>{$nm}</k><v>{$vl}</v></d>'
s = self.cursor.execute(q)
attrs = {}
for k in s:
t = objectify.fromstring(k)
attrs[str(t.k)] = str(t.v)
self._attrib = attrs
return self._attrib
def set(self,key,value):
q = u'%s' % self.path
s = self.cursor.execute(q)
fromdb = self.cursor.fetchone()
item = objectify.fromstring(fromdb)
item.set(key,value)
self.replace(item)
def get(self,key):
"""
obtain the value of an attribute
"""
q = u'%s/data(@%s)' % (self.path,key)
s = self.cursor.execute(q)
t = s.value.strip()
if t:
return t
else:
raise KeyError("KeyError %s" % key)
def keys(self):
""" get the keys of the attributes
"""
return self.attrib.keys()
def values(self):
return self.attrib.values()
def items(self):
return self.attrib.items() | zif.sedna | /zif.sedna-0.10alpha1.tar.gz/zif.sedna-0.10alpha1/src/zif/sedna/sednaobject.py | sednaobject.py |
import socket
from struct import pack, unpack, calcsize
import time
from StringIO import StringIO
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
import logging
#logger = logging.getLogger()
#we want an elementtree impl, but only for non-essential stuff. non-fatal
try:
import lxml.etree as ET
except ImportError:
try:
import xml.etree.ElementTree as ET
except ImportError:
try:
import cElementTree as ET
except ImportError:
try:
import elementtree.ElementTree as ET
except ImportError:
logger = logging.getLogger()
logger.error(
'zif.sedna protocol wants an elementtree implementation for some functions.')
# Sedna token constants
from msgcodes import *
# standard errors from PEP-249
from dbapiexceptions import Error, Warning, InterfaceError, DatabaseError,\
InternalError, OperationalError, ProgrammingError, IntegrityError,\
DataError, NotSupportedError
SEDNA_VERSION_MAJOR = 3
SEDNA_VERSION_MINOR = 0
SEDNA_MAX_BODY_LENGTH = 10240
LOAD_BUFFER_SIZE = SEDNA_MAX_BODY_LENGTH / 2
# local utility functions
def zString(aString):
"""
return a string prefixed with null+length in network format
"""
strlen = len(aString)
return pack('!bi%ss'% strlen,0,strlen,aString)
def splitString(text,length):
"""
Yield substrings of length or smaller
"""
while text:
split, text = text[:length], text[length:]
yield split
def normalizeMessage(message):
"""
un-tab and rstrip an informational message
tab-to-space replacement and rstripping helps with repeatable doctests
"""
message = message.decode('utf-8')
n = []
for k in message.split('\n'):
n.append(k.rstrip().replace('\t',' '))
return u'\n'.join(n)
def escapeAndQuote(aDict):
"""
Put strings in quotes
also double single- and double-quote characters within a string
"""
newDict = {}
for item in aDict:
try:
value = aDict[item]
except TypeError:
raise ProgrammingError(
'expected a parameters dict. Use pyformat %(var)s for constants.')
if isinstance(value,basestring):
for quote in ('"',"'"):
if quote in value:
split = value.split(quote)
dq = 2*quote
value = dq.join(split)
value = "'"+ value + "'"
# these elifs do not seem to be necessary in py 2.4 and 2.5
#elif isinstance(value,long):
#value = str(value)
#while value.endswith('L'):
#value = value[:-1]
#elif isinstance(value,float):
#value = str(value)
newDict[item] = value
return newDict
class BasicCursor(object):
"""a PEP-249-like cursor to a zif.sedna protocol object
You may override this by setting the connection's cursorFactory to
some other implementation.
If you use pyformat parameters in a statement, strings are quoted and
quotation marks are doubled within the string. Numbers are left unquoted.
Only for use with atomic values.
"""
arraysize = 1
rowcount = -1
lastrowid = None
def __init__(self,connection):
self.connection = connection
def execute(self,statement, parameters=None):
if parameters:
statement = statement % escapeAndQuote(parameters)
self.result = self.connection.execute(statement)
return self.result
def executemany(self,statements,parameters=None):
for statement in statements:
if parameters:
statement = statement % escapeAndQuote(parameters)
self.execute(statement)
def __iter__(self):
return iter(self.result)
def fetchall(self):
return [item for item in self.result]
def fetchone(self):
try:
return self.result.next()
except StopIteration:
return None
def fetchmany(self,size=None):
if size is None:
size = self.arraysize
else:
theList = []
for counter in xrange(size):
try:
theList.append(self.fetchone())
except StopIteration:
break
return theList
def setinputsizes(self,sizes):
pass
def setoutputsize(self,size,column=None):
pass
def close(self):
self.connection = None
class Result(object):
"""Object representing the result of a query. iterable.
Iterating over a result will yield a utf-8 encoded string for each "item".
result.time is a string with the server processing time. This is perhaps
useful for optimizing queries.
result.value returns the entire result as a
python unicode string
"""
def __init__(self,conn):
self.conn = conn
self._time = None
self.more = True
self.item = '_DUMMY_'
def __iter__(self):
return self
def getTime(self):
if not self._time:
time = self.conn._send_string(token=SEDNA_SHOW_TIME)
self._time = time.decode('utf-8')
return self._time
time = property(getTime)
def next(self):
currItem = self.item
if self.item == '_DUMMY_':
raise DatabaseError('Item not sent')
if self.more:
self.conn._send_string(token=SEDNA_GET_NEXT_ITEM)
if currItem is None:
raise StopIteration
else:
return currItem.decode('utf-8')
def _get_value(self):
return u''.join(list(self))
value = property(_get_value)
class ErrorInfo(object):
def __init__(self,msg):
#first Int is the code.
self.code, = unpack('!I',msg[:4])
# two Ints and a byte = 9
# normalize the info so it works reliably in doctests.
# ahh. the world makes sense again... :)
# self.sednaCode = msg[msg.find(':')+8:msg.find('\n')]
self.info = "[%s] %s" % (self.code, normalizeMessage(msg[9:].strip()))
class DebugInfo(ErrorInfo):
def __init__(self,msg):
self.code = None
self.info = "%s" % normalizeMessage(msg[9:].strip())
class SednaError(object):
def __init__(self,item):
if isinstance(item,ErrorInfo):
self.code = item.code
self.info = item.info
raise DatabaseError(self.info)
class DatabaseRuntimeError(SednaError):
pass
class SednaProtocol(object):
"""Sedna protocol
init with
host string host name or ip address
db string sedna database name to connect to
login string user name
passwd string user password
port int port for connection default:5050
Exceptions are raised when operations fail.
Query execution must take place within a transaction.
The result of a query will be in self.result; there is only one query
and one result available at a time, though sedna's ACID properties will
allow multiple instances to be employed concurrently.
successful updates return True
"""
headerFormat = '!II'
prefixLength = calcsize(headerFormat)
maxDataLength = SEDNA_MAX_BODY_LENGTH - prefixLength
receiveBuffer = ''
result = None
error = None
closed = True
maxQueryLength = SEDNA_MAX_BODY_LENGTH
notabs = False
nonewlines = False
doTrace = False
_inTransaction = False
ermsgs = None
cursorFactory = BasicCursor
# error exposition (PEP-249)
Error = Error
Warning = Warning
InterfaceError = InterfaceError
DatabaseError = DatabaseError
InternalError = InternalError
OperationalError = OperationalError
ProgrammingError = ProgrammingError
IntegrityError = IntegrityError
DataError = DataError
NotSupportedError = NotSupportedError
# Public interfaces
# queries
def execute(self,query,format=0):
"""
Send query to the Sedna server.
query should be unicode or otherwise encodable to utf-8
format is 0 for XML
1 for SXML
"""
# first, clear out previous stuff in case we are in a LRP
self.ermsgs = []
self.currItem = []
self.result = None
self.receiveBuffer = ''
if isinstance(query,unicode):
query = query.encode('utf-8')
else:
raise ProgrammingError("Expected unicode, got %s." % type(query))
if not self.inTransaction:
self.begin()
self.error = None
self._send_string(query,token=SEDNA_EXECUTE,format=format)
return self.result
query = execute
def close(self):
"""close the connection"""
if self.socket and not self.closed:
self._send_string(token=SEDNA_CLOSE_CONNECTION)
self.closed = True
# dbi wants a cursor
def cursor(self):
"""return a cursor from cursorFactory"""
self.ermsgs = []
self.currItem = []
self.result = None
self.receiveBuffer = ''
return self.cursorFactory(self)
# transactions
def begin(self):
"""
start transaction
"""
# always acquire instance lock on begin.
# the lock is released on error or when transaction ends.
self.lock.acquire()
self._send_string(token=SEDNA_BEGIN_TRANSACTION)
beginTransaction = begin
def commit(self):
"""
commit transaction
"""
self.receiveBuffer = ''
res = self._send_string(token=SEDNA_COMMIT_TRANSACTION)
return res
def rollback(self):
"""
rollback transaction
"""
self.receiveBuffer = ''
res = self._send_string(token=SEDNA_ROLLBACK_TRANSACTION)
return res
def endTransaction(self,how):
"""endTransaction from Sedna pydriver API"""
if how == 'commit':
self.commit()
elif how == 'rollback':
self.rollback()
else:
raise ProgrammingError(
"Expected 'commit' or 'rollback', got '%s'" % how)
def transactionStatus(self):
"""transactionStatus from Sedna pydriver API"""
if self.inTransaction:
return 'active'
else:
return 'none'
# Miscellaneous public methods
# sometimes, you just want to upload a document...
def loadText(self,text,document_name,collection_name=None):
"""
load a string or stringio into the database as document_name
if collection_name is provided, document will go in that
collection.
Just in case there is an <?xml preamble with an encoding, we run it
through an elementtree parser and presumably get unicode back.
If it's already unicode, no big deal...
"""
if not isinstance(text,unicode):
text = ET.tostring(ET.XML(text))
self._inputBuffer = StringIO(text)
s = u'LOAD STDIN "%s"' % document_name
if collection_name:
s += ' "%s"' % collection_name
try:
res = self.execute(s)
finally:
#always clear input buffer
self._inputBuffer = ''
return res
def loadFile(self,filename,document_name,collection_name=None):
"""
load a file by name into the database as document_name
if the file is not ascii or utf-8 encoded, assure that the
XML header indicates the correct encoding.
if collection_name is provided, document will go in that
collection.
"""
s = u'LOAD "%s" "%s"' % (filename, document_name)
if collection_name:
s += u' "%s"' % collection_name
return self.execute(s)
# database metadata sugar
@property
def documents(self):
return self._listMetadata('$documents')
@property
def modules(self):
return self._listMetadata('$modules')
@property
def collections(self):
return self._listMetadata('$collections')
@property
def indexes(self):
return self._listMetadata('$indexes')
@property
def schema(self):
return self.execute(u'doc("$schema")').value
def _listMetadata(self,loc):
s = self.execute(u'doc("%s")' % loc)
theList = []
z = s.value
t = ET.XML(z)
for item in t:
name = item.get('name')
theList.append(name)
return theList
def getSchema(self,doc_or_collection_name):
return self.execute(u'doc("$schema_%s")' % doc_or_collection_name).value
def getDocumentStats(self,doc_name):
return self.execute(u'doc("$document_%s")' % doc_name).value
def getCollectionStats(self,collection_name):
return self.execute(u'doc("$collection_%s")' % collection_name).value
def collectionDocuments(self,collection_name):
t = u"document('$documents')/documents/collection[@name='%s']/document"
st = t % collection_name
res = self.execute(st)
theList = []
for doc in res:
item = ET.XML(doc)
name = item.get('name')
theList.append(name)
return theList
# debug helpers
def debugOn(self):
"""
Sedna should send debugging info.
Set this within a transaction.
"""
token = SEDNA_SET_SESSION_OPTIONS
data = pack("!I",DEBUG_ON)+zString('')
self._send_string(data,token)
def setDebugHandler(self,fn):
self.handleDebug = fn
def handleDebug(self,debugInfo):
"""Handle debug information.
if you want to deal with debug info, override this or
use setDebugHandler, above.
This method will be called with a DebugInfo object when debug info is
available as part of a query result.
you only need to handle this if you call debugOn()
a DebugInfo object has .code and .info members
"""
raise NotImplementedError
def debugOff(self):
"""
Sedna stops sending debugging info
Also sent within a transaction.
"""
token = SEDNA_SET_SESSION_OPTIONS
data = pack("!I",DEBUG_OFF)+zString('')
self._send_string(data,token)
def traceOn(self):
self.doTrace = True
def traceOff(self):
self.doTrace = False
def resetSessionOptions(self):
"""
Put session options back to default.
"""
self._send_string(token=SEDNA_RESET_SESSION_OPTIONS)
# init
def __init__(self,host='localhost',db="test",login="SYSTEM",
passwd="MANAGER",port=5050,trace=False):
self.host = host
self.port = port
self.username = login
self.password = passwd
self.database = db
self.ermsgs = []
# handlers. left side is a response token from Sedna.
# right side is the local callback for the body associated
# with that token.
self.handlers = {
SEDNA_SEND_SESSION_PARAMETERS : self._sendSessionParameters,
SEDNA_SEND_AUTH_PARAMETERS : self._sendAuthParameters,
SEDNA_AUTHENTICATION_OK : self._authenticationOK,
SEDNA_AUTHENTICATION_FAILED : self._authenticationFailed,
SEDNA_ERROR_RESPONSE : self._errorResponse,
SEDNA_QUERY_SUCCEEDED : self._querySucceeded,
SEDNA_QUERY_FAILED : self._queryFailed,
SEDNA_UPDATE_SUCCEEDED : self._updateSucceeded,
SEDNA_UPDATE_FAILED : self._updateFailed,
SEDNA_BULKLOAD_FILENAME : self._bulkloadFilename,
SEDNA_BULKLOAD_FROMSTREAM : self._bulkloadFromstream,
SEDNA_BULKLOAD_SUCCEEDED : self._bulkloadSucceeded,
SEDNA_BULKLOAD_FAILED : self._bulkloadFailed,
SEDNA_BEGIN_TRANSACTION_OK : self._beginTransactionOK,
SEDNA_BEGIN_TRANSACTION_FAILED : self._beginTransactionFailed,
SEDNA_COMMIT_TRANSACTION_OK : self._commitTransactionOK,
SEDNA_COMMIT_TRANSACTION_FAILED : self._commitTransactionFailed,
SEDNA_ROLLBACK_TRANSACTION_OK : self._rollbackTransactionOK,
SEDNA_ROLLBACK_TRANSACTION_FAILED : self._rollbackTransactionFailed,
SEDNA_DEBUG_INFO : self._debugInfo,
SEDNA_ITEM_PART : self._itemPart,
SEDNA_ITEM_END : self._itemEnd,
SEDNA_RESULT_END : self._resultEnd,
SEDNA_LAST_QUERY_TIME : self._lastQueryTime,
SEDNA_CLOSE_CONNECTION_OK : self._closeConnectionOK,
SEDNA_TRANSACTION_ROLLBACK_BEFORE_CLOSE : \
self._transactionRollbackBeforeClose,
SEDNA_SET_SESSION_OPTIONS_OK : self._setSessionOptionsOK,
SEDNA_RESET_SESSION_OPTIONS_OK : self._resetSessionOptionsOK
}
self.openSocket(host,port)
if trace:
self.traceOn()
self._send_string(token=SEDNA_START_UP)
self.lock = _threading.Lock()
# the rest of the module is non-public methods
# socket opening
def openSocket(self,host,port):
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error,e:
raise InterfaceError(u"Could not create socket: %s" % e)
try:
self.socket.connect((host,port))
except socket.error,e:
if self.socket:
self.socket.close()
raise InterfaceError(
u'Server connection failed. Is Sedna server running? %s' % e)
if self.socket:
# found this on the net, It's supposed to be faster than default?
self.socket.setsockopt(socket.SOL_TCP,socket.TCP_NODELAY,0)
# start handshaking and authenticating
self.closed = False
# communication with the server
def _send_string(self,data='',token=0,format=0,respond=True):
"""
send a message to the server
data a string
token a defined message id
format desired return format for queries
respond internal flag indicating whether a particular send is
the final send of a request
The message is token|length prefixed.
If it is a query message, and it is too long, we split it into smaller
messages with execute_long...long_query_end
format is the requested return format for queries.
format = 0 : XML
format = 1 : SXML - see Sedna docs - it's the format with parens
"""
# just a bit of sanity. data at this point should be a
# utf-8 encoded string
if not isinstance(data,str):
raise InterfaceError (u"Expected string, got %s." % data)
if token in (SEDNA_EXECUTE, SEDNA_EXECUTE_LONG):
self.result = None
datalen = len(data)
if datalen+self.prefixLength > self.maxQueryLength:
#if it is a really long request, split it into smaller requests
for split in splitString(data,LOAD_BUFFER_SIZE):
# each of these this is not a final request, so we
# set respond to False
self._send_string(split,token=SEDNA_EXECUTE_LONG,
format=format,respond=False)
# send a message to end the request
self._send_string(token=SEDNA_LONG_QUERY_END)
# return here to prevent endless recursion...
return
# if we are doing EXECUTE or EXECUTE_LONG, we need to prefix the
# request with the byte indicating the desired output format
data = pack('!b',format) + zString(data)
elif len(data)+self.prefixLength > self.maxDataLength:
raise InterfaceError(u"Message is too long.")
self._sendSocketData(pack(self.headerFormat,int(token),len(data)
) + data)
if self.doTrace:
logger = logging.getLogger()
if token in (SEDNA_EXECUTE, SEDNA_EXECUTE_LONG):
trace = data[6:]
elif token == SEDNA_SET_SESSION_OPTIONS:
trace = ''
else:
trace = data[5:]
if trace:
logger.info("(C) %s %s" % (codes[token],
trace.strip()))
else:
logger.info("(C) %s" % codes[token])
if respond:
return self._get_response()
def _get_response(self):
"""get the response
first, get enough of the response to determine its length,
then obtain the remainder of the response based on the length.
dispatch response to appropriate handler
"""
prefixLen = self.prefixLength
#get the header, two ints
header = self._getSocketData(prefixLen)
token,length = unpack(self.headerFormat,header)
msg = self._getSocketData(length)
# handlers are call-backs after the data are received
if self.doTrace:
logger = logging.getLogger()
if token in (SEDNA_ERROR_RESPONSE, SEDNA_DEBUG_INFO):
z = msg[9:]
else:
z = msg[5:]
if z:
logger.info("(S) %s %s" % (codes[token], normalizeMessage(z)))
else:
logger.info("(S) %s" % codes[token])
return self.handlers[token](msg)
def _getInTransaction(self):
return self._inTransaction
def _setInTransaction(self,bool):
"""Cannot enter a transaction without the instance lock
Presumably, this enforces one transaction at a time...
"""
if bool:
# block until lock is available
# lock acquisition is in self.begin()
#self.lock.acquire()
self._inTransaction = bool
else:
self._inTransaction = bool
# release lock. Transaction is complete.
self.lock.release()
inTransaction = property(_getInTransaction,_setInTransaction)
# communications at a bit lower level
def _sendSocketData(self,data):
"""
send data to the socket, trying to make sure it is all sent
"""
datalen = len(data)
totalsent = 0
while totalsent < datalen:
try:
sent = self.socket.send(data[totalsent:])
except socket.error,e:
raise InterfaceError('Error writing to socket: %s' % e)
if sent == 0:
raise InterfaceError("Socket connection broken.")
totalsent += sent
def _getSocketData(self,length):
"""
get 'length' bytes from the socket
"""
bufferLen = len(self.receiveBuffer)
while bufferLen < length:
if bufferLen == 0:
# We don't have anything yet.
# Yield this processing time-slice to other threads.
time.sleep(0)
try:
data = self.socket.recv(length-bufferLen)
except socket.error,e:
raise InterfaceError('Error reading from socket: %s' % e)
self.receiveBuffer += data
bufferLen += len(data)
data, self.receiveBuffer = self.receiveBuffer[:length], \
self.receiveBuffer[length:]
return data
# handlers
# start-up
def _sendSessionParameters(self,msg):
token = SEDNA_SESSION_PARAMETERS
msg = pack('!bb',SEDNA_VERSION_MAJOR,SEDNA_VERSION_MINOR) \
+ zString(self.username.encode('utf-8')) \
+ zString(self.database.encode('utf-8'))
self._send_string(msg, token)
# authentication
def _sendAuthParameters(self,msg):
token = SEDNA_AUTHENTICATION_PARAMETERS
msg = zString(self.password.encode('utf-8'))
self._send_string(msg,token)
def _authenticationOK(self,msg):
pass
def _authenticationFailed(self,msg):
error = ErrorInfo(msg)
self.socket.close()
raise OperationalError(error.info)
# protocol error noticed by the server
def _errorResponse(self,msg):
error = ErrorInfo(msg)
if self.inTransaction:
self.inTransaction = False
self.ermsgs.append(error.info)
error.info = '\n'.join(self.ermsgs)
raise SednaError(error)
# transactions - receivers
def _beginTransactionOK(self,msg):
self.inTransaction = True
def _beginTransactionFailed(self,msg):
error = ErrorInfo(msg)
if self.inTransaction:
self.inTransaction = False
raise SednaError(error)
def _commitTransactionOK(self,msg):
self.inTransaction = False
return True
def _commitTransactionFailed(self,msg):
error = ErrorInfo(msg)
self.inTransaction = False
raise SednaError(error)
def _rollbackTransactionOK(self,msg):
if self.inTransaction:
self.inTransaction = False
return True
def _rollbackTransactionFailed(self,msg):
if self.inTransaction:
self.inTransaction = False
error = ErrorInfo(msg)
raise SednaError(error)
# queries - receivers
def _querySucceeded(self,msg):
self.result = Result(self)
# sedna immediately sends the first part of the result, so get it.
self._get_response()
return self.result
def _queryFailed(self,msg):
error = ErrorInfo(msg)
raise ProgrammingError(error.info)
def _updateSucceeded(self,msg):
self.result = True
return self.result
def _updateFailed(self,msg):
error = ErrorInfo(msg)
raise SednaError(error)
def _bulkloadFilelike(self,filelike):
"""
general internal method for bulk-loading filelikes
used in _bulkloadFilename and _bulkloadFromstream
"""
data = filelike.read(LOAD_BUFFER_SIZE)
token = SEDNA_BULKLOAD_PORTION
while data:
if isinstance(data,unicode):
# this should be acceptable. sockets cannot handle
# python unicodes, and sedna is always utf-8
data = data.encode('utf-8')
data = zString(data)
self._send_string(data,token,respond=False)
data = filelike.read(LOAD_BUFFER_SIZE)
filelike.close()
self._send_string(token=SEDNA_BULKLOAD_END)
def _bulkloadFilename(self,msg):
"""
upload the file we asked to upload
"""
# Int and a byte = 5
theFile = open(msg[5:],'r')
self._bulkloadFilelike(theFile)
def _bulkloadFailed(self,msg):
error = ErrorInfo(msg)
raise SednaError(error)
def _bulkloadFromstream(self,msg):
self._bulkloadFilelike(self._inputBuffer)
def _bulkloadSucceeded(self,msg):
self._inputBuffer = ''
return True
def _lastQueryTime(self,msg):
#Int-and-a-byte = 5
return msg[5:]
# Results processing
def _itemPart(self,msg):
"""
part of a response is available
"""
c = self.currItem
# 5 is Int + byte
c.append(msg[5:])
# this is not the final answer, so ask for more...
self._get_response()
def _itemEnd(self,msg):
item = ''.join(self.currItem)
self.currItem = []
self.result.item = item
def _resultEnd(self,msg):
self.result.more = False
if self.currItem:
item = ''.join(self.currItem)
self.currItem = None
self.result.item = item
else:
self.result.item = None
# debug info
def _debugInfo(self,msg):
"""
package a SEDNA_DEBUG_INFO message for client handler.
client may provide a handleDebug method, using setDebugHandler(fn)
regardless, debug info ends up in the traceback if enabled.
"""
di = DebugInfo(msg)
try:
self.handleDebug(di)
except NotImplementedError:
pass
self.ermsgs.append(di.info)
self._get_response()
# Connection and transaction feedback
def _closeConnectionOK(self,msg):
self.socket.close()
def _transactionRollbackBeforeClose(self,msg):
self.socket.close()
raise Warning("Transaction rolled back when connection closed")
# setting session options
def _setSessionOptionsOK(self,msg):
pass
def _resetSessionOptionsOK(self,msg):
pass | zif.sedna | /zif.sedna-0.10alpha1.tar.gz/zif.sedna-0.10alpha1/src/zif/sedna/protocol.py | protocol.py |
README_da.txt
This is simple instructions for using the da in a zope3 installation.
You need to require zope.rdb in your app's configure.zcml.
<include package="zope.rdb" file="meta.zcml" />
<include package="zope.rdb" />
Add a namespace to your <configure directive:
xmlns:rdb="http://namespaces.zope.org/rdb"
Then, you can do an rdb connection to sedna by dsn. For example:
<rdb:provideConnection
name="testsedna"
component="zif.sedna.da.SednaAdapter"
dsn="dbi://SYSTEM:MANAGER@localhost:5050/test"
/>
From there, in application code, use
from zope.rdb.interfaces import IZopeDatabaseAdapter
from zope.component import getUtility
sedna = getUtility(IZopeDatabaseAdapter,'testsedna')()
to obtain a handle, just like any other database adapter.
Obtain a cursor
c = sedna.cursor()
and do some queries. Here, we use elementtree syntax to put Chapter 1 of
Genesis into a page. self.body is the 'body' element of the page.
res = c.execute(u'doc("ot")/tstmt/bookcoll[1]/book[1]/chapter[1]/v/text()')
theList = c.fetchall()
ol = SubElement(self.body,'ol')
for k in theList:
p = SubElement(ol,'li')
p.text = k.strip()
fetchall() is not necessary; you may iterate the result directly.
res = c.execute(u'doc("ot")/tstmt/bookcoll[1]/book[1]/chapter[1]/v/text()')
ol = SubElement(self.body,'ol')
for k in res:
p = SubElement(ol,'li')
p.text = k.strip()
query result may be a boolean for updates, inserts, etc. Otherwise, it is
an iterable that produces python unicode strings. Here, the xquery obtained
the text content, but we could have written the query without "text()" and
returned the full "v" elements and parsed them with an XML parser.
Generally, failing queries will raise an exception. Zope takes care of
pooling connections and begin(), commit() / rollback().
| zif.sedna | /zif.sedna-0.10alpha1.tar.gz/zif.sedna-0.10alpha1/src/zif/sedna/README_da.txt | README_da.txt |
HTMLSanitizer
HTMLsanitizer intends to remove potentially hazardous code from untrusted
HTML. This reduces the potential for cross-site scripting and other security
exploits.
To use, import it from zif.xtemplate
::
>>> from zif.xtemplate import HTMLSanitizer
and create the object.
::
>>> sanitizer = HTMLSanitizer()
HTMLSanitizer has default sets of tags and attributes it allows.
::
>>> tags = list(sanitizer.allowedTags)
>>> tags.sort()
>>> tags == ['a', 'abbr', 'acronym', 'address', 'area', 'b', 'big',
... 'blockquote', 'br', 'button', 'caption', 'center', 'cite', 'code',
... 'col', 'colgroup', 'dd', 'del', 'dfn', 'dir', 'div', 'dl', 'dt', 'em',
... 'fieldset', 'font', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr',
... 'i', 'img', 'input', 'ins', 'kbd', 'label', 'legend', 'li', 'map',
... 'menu', 'ol', 'optgroup', 'option', 'p', 'pre', 'q', 's', 'samp',
... 'select', 'small', 'span', 'strike', 'strong', 'sub', 'sup', 'table',
... 'tbody', 'td', 'textarea', 'tfoot', 'th', 'thead', 'tr', 'tt', 'u',
... 'ul', 'var']
True
>>> attrs = list(sanitizer.allowedAttributes)
>>> attrs.sort()
>>> attrs == ['abbr', 'accept', 'accept-charset', 'accesskey', 'action',
... 'align', 'alt', 'axis', 'border', 'cellpadding', 'cellspacing', 'char',
... 'charoff', 'charset', 'checked', 'cite', 'class', 'clear', 'color',
... 'cols', 'colspan', 'compact', 'coords', 'datetime', 'dir', 'disabled',
... 'enctype', 'for', 'frame', 'headers', 'height', 'href', 'hreflang',
... 'hspace', 'id', 'ismap', 'label', 'lang', 'longdesc', 'maxlength',
... 'media', 'method', 'multiple', 'name', 'nohref', 'noshade', 'nowrap',
... 'prompt', 'readonly', 'rel', 'rev', 'rows', 'rowspan', 'rules', 'scope',
... 'selected', 'shape', 'size', 'span', 'src', 'start', 'summary',
... 'tabindex','target', 'title', 'type', 'usemap', 'valign', 'value',
... 'vspace', 'width']
True
To sanitize an lxml Element and its subElements, ask the sanitizer to sanitize
it, using the sanitize method.
::
>>> from lxml.etree import fromstring, tounicode
Let's make a snippet of HTML that has a script tag. Scripts are bad if
they come from an untrusted source, like the web. The bad tag and its contents
are removed. Any text following it is added to the tail of the previous tag or
the text of the containing element as appropriate.
::
>>> html = """<p><script src="somewhere/badscript.js"></script>
... OK code here.</p>"""
>>> output = sanitizer.sanitize(fromstring(html))
>>> tounicode(output)
u'<p>\nOK code here.</p>'
Style attributes can be bad, too, because browsers will execute javascript in
them.
::
>>> html = """<p style="color:blue;">OK code here.</p>"""
>>> output = sanitizer.sanitize(fromstring(html))
>>> tounicode(output)
u'<p>OK code here.</p>'
If we want to allow an attribute, tell the sanitizer before sanitizing.
::
>>> sanitizerWithStyle = HTMLSanitizer()
>>> sanitizerWithStyle.allowAttribute('style')
>>> html = """<p style="color:blue;">OK code here.</p>"""
>>> output = sanitizerWithStyle.sanitize(fromstring(html))
>>> tounicode(output)
u'<p style="color:blue;">OK code here.</p>'
We can allow tags for special purposes, too.
::
>>> sanitizerWithBody = HTMLSanitizer()
>>> sanitizerWithBody.allowTag('body')
>>> html = """<body><p style="color:blue;">OK code here.</p></body>"""
>>> output = sanitizerWithBody.sanitize(fromstring(html))
>>> tounicode(output)
u'<body><p>OK code here.</p></body>'
We can also deny tags and attributes.
::
>>> sanitizerWithoutTextArea = HTMLSanitizer()
>>> sanitizerWithoutTextArea.denyTag('textarea')
>>> html = """<form><textarea>Something here</textarea><input name="spam"
... type="submit" />
... </form>"""
>>> output = sanitizerWithoutTextArea.sanitize(fromstring(html))
>>> tounicode(output)
u'<form><input name="spam" type="submit"/>\n</form>'
>>> sanitizerWithoutTextArea.denyAttribute('name')
>>> output = sanitizerWithoutTextArea.sanitize(fromstring(html))
>>> tounicode(output)
u'<form><input type="submit"/>\n</form>'
Alternatively, we can initialize the HTMLSanitizer with a custom set of
allowed tags and/or attributes.
::
>>> sanitizerX = HTMLSanitizer(tags=['form','textarea'],attributes=['type'])
>>> html = """<form><textarea>Something here</textarea><input name="spam"
... type="input" />
... </form>"""
>>> output = sanitizerX.sanitize(fromstring(html))
>>> tounicode(output)
u'<form><textarea>Something here</textarea>\n</form>'
If there is nothing remaining after sanitizing, sanitizer returns None. Note
for this example that "body" is not ordinarily an allowed tag.
::
>>> html = """<body><p>Something here</p><input name="spam"
... type="input" />
... </body>"""
>>> output = sanitizer.sanitize(fromstring(html))
>>> output is None
True
We can give the sanitizer a text snippet and get unicode back.
::
>>> snippet = """<p style="color:blue;">OK code here.</p> And I want a <b>
... blue</b> pony for Christmas."""
>>> sanitizer.sanitizeString(snippet)
u'<p>OK code here.</p> And I want a <b>\nblue</b> pony for Christmas.'
We can also extract text (remove tags) from an element. This also removes
newlines.
::
>>> snippet = """<p style="color:blue;">I have a dog. <script exploit="true">
... </script>And I want a <b>
... blue</b> pony for Christmas.</p>"""
>>> sanitizer.extractText(fromstring(snippet))
u'I have a dog. And I want a blue pony for Christmas.'
| zif.xtemplate | /zif.xtemplate-0.2.2.tar.gz/zif.xtemplate-0.2.2/src/zif/xtemplate/sanitizer_README.txt | sanitizer_README.txt |
import logging
from lxml.etree import XMLSyntaxError, fromstring, HTML, _Element, tostring
from sanitizer import HTMLSanitizer
def getElementById(element,anId):
"""return the first element with this id attribute.
Return None if not available
>>> from lxml.etree import tostring,fromstring,Element,SubElement
>>> s = '<div><p id="myId">Some text</p></div>'
>>> elt = fromstring(s)
>>> e = getElementById(elt,'myId')
>>> tostring(e)
'<p id="myId">Some text</p>'
>>> e = getElementById(elt,'anotherId')
>>> e is None
True
"""
try:
return element.xpath("//*[@id='%s']" % (anId,))[0]
except IndexError:
return None
def appendSnippet(target,s,sanitize=True, bad='remove'):
"""apppend the snippet at target
target is an Element in the document.
s is the snippet, a string of xml. It does not need to have any tags,
if the snippet is otherwise not well-formed or understood as XML,
it will be parsed by lxml.HTML as tag soup.
snippet will be appended to text and/or children of the target Element.
The "bad" parameter is the sanitizer policy for handling bad tags.
If bad is 'remove', the default, any offending tag and its contents
will be removed.
If bad is 'span', offending tag will be replaced with <span> elements.
If bad is 'comment', offending tag will be commented.
>>> t = fromstring('<div><p id="p"></p></div>')
>>> p = getElementById(t,'p')
>>> appendSnippet(p,'Now is the time ')
>>> appendSnippet(p,'for <b>all</b> ')
>>> appendSnippet(p,'<i>good men</i> ')
>>> appendSnippet(p,'to come to the aid of their country.')
>>> tostring(t)
'<div><p id="p">Now is the time for <b>all</b> <i>good men</i> to come to the aid of their country.</p></div>'
Testing sanitizing
>>> t = fromstring('<div><p id="p"></p></div>')
>>> p = getElementById(t,'p')
>>> appendSnippet(p,'Now is the time <script src="http:/bad.com">')
>>> tostring(t)
'<div><p id="p">Now is the time </p></div>'
>>> t = fromstring('<p><ul id="ul1"></ul></p>')
>>> ul = getElementById(t,'ul1')
>>> appendSnippet(ul,'<li>Now is the time <script src="http:/bad.com"> for all good men</li>',bad="span")
>>> tostring(t)
'<p><ul id="ul1"><li>Now is the time <span src="http:/bad.com"> for all good men</span></li></ul></p>'
>>> t = fromstring('<p><ul id="ul1"></ul></p>')
>>> ul = getElementById(t,'ul1')
>>> appendSnippet(ul,'<li>Now is the time <script src="http:/bad.com"> for all good men</li>',bad="comment")
>>> tostring(t)
'<p><ul id="ul1"><li>Now is the time <!--sanitized: [script] for all good men--></li></ul></p>'
>>> t = fromstring('<p><ul id="ul1"></ul></p>')
>>> ul = getElementById(t,'ul1')
>>> appendSnippet(ul,'<li>Now is the time <script>for (i=12;i<0,i--){}</script> for all good men</li>',bad="comment")
>>> tostring(t)
'<p><ul id="ul1"><li>Now is the time <!--sanitized: [script] for (i=12;i--> for all good men</li></ul></p>'
>>> t = fromstring('<div><p id="p"></p></div>')
>>> p = getElementById(t,'p')
>>> appendSnippet(p,'Now is the time <script src="http:/bad.com">', sanitize=False)
>>> tostring(t)
'<div><p id="p">Now is the time <script src="http:/bad.com"/></p></div>'
Testing some bad html lxml's HTML parser does this for us, Don't
complain if it does something unexpected.
>>> t = fromstring('<div><p id="p"></p></div>')
>>> p = getElementById(t,'p')
>>> appendSnippet(p,'Now is the time <i><b>for</i></b> ', sanitize=False)
>>> tostring(t)
'<div><p id="p">Now is the time <i><b>for</b></i> </p></div>'
"""
t = u'<div>%s</div>' % s
try:
parsed = fromstring(t)
except XMLSyntaxError:
logger = logging.getLogger()
logger.log(logging.DEBUG,"Snippet (%s) parsed as tag soup." % s)
# let's parse this as tag soup!
parsed = HTML(t)
parsed = parsed.xpath('//div')[0]
if sanitize:
sanitizer = HTMLSanitizer()
parsed = sanitizer.sanitize(parsed, bad=bad)
e = parsed.xpath('//div')[0]
startText = e.text
locationChildren = target.getchildren()
if startText:
if locationChildren:
textloc = locationChildren[-1]
try:
textloc.tail += startText
except TypeError:
textloc.tail = startText
else:
try:
target.text += startText
except TypeError:
target.text = startText
children = e.getchildren()
target.extend(children)
def appendWidget(target,widget):
# a widget may be an Element or list of Elements, or an html snippet
# this hopefully helps with widget use.
try:
target.append(widget)
except TypeError:
#let's try appending it as a list of Elements
try:
target.extend(widget)
except TypeError:
#it's a string, hopefully
if isinstance(widget,basestring):
appendSnippet(target,widget,sanitize=False)
else:
raise TypeError('widget (%s) could not be appended' % widget)
def fixEmptyElements(element):
"""globally make a fix on empty elements for the dtd.
lxml likes to xml-minimize if possible. Here we assign some
(empty) text so this does not happen when it should not.
>>> t = fromstring('<div><p id="p"></p></div>')
>>> p = getElementById(t,'p')
>>> appendSnippet(p,'Now is the time <script src="http:/bad.com">', sanitize=False)
>>> fixEmptyElements(t)
>>> tostring(t)
'<div><p id="p">Now is the time <script src="http:/bad.com"></script></p></div>'
"""
allEmptyElements = element.xpath('//*[count(*)=0]')
for element in allEmptyElements:
if not element.text:
if not mayBeEmpty(element.tag):
element.text = ''
def fixTDs(element):
"""set td text non-none so pretty-print does not add extra space
This probably won't be used nuch.
>>> t = fromstring('<table><tr><td><img src="blah"/></td></tr></table>')
>>> tostring(t,pretty_print=True)
'<table>\\n <tr>\\n <td>\\n <img src="blah"/>\\n </td>\\n </tr>\\n</table>'
The above makes space before the image, which makes a table of images render
incorrectly.
>>> fixTDs(t)
>>> s = tostring(t,pretty_print=True)
>>> "<td><img src" in s
True
"""
tds = element.xpath('//td')
for element in tds:
if not element.text:
element.text = ''
for elt in element.getchildren():
if not elt.tail:
elt.tail = ''
"""
dtd location is 'http://www.w3.org/TR/xhtml1/DTD'
or similar for other dtds.
This probably ought to be in a utility eventually. Global doc_type is
maybe a bad thing. What if we want a different doc_type?
"""
import os
from urlparse import urlparse
try:
from xml.parsers.xmlproc import xmldtd
except ImportError:
print """This package uses PyXML.( http://pyxml.sourceforge.net/)
It may be installed as "pyxml" or python-xml" in Linux distributions.
"""
raise
#temporarily, we will deal with only one doc_type.
doc_type="""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">"""
sdtdl = doc_type.split('"')
for k in sdtdl:
if 'w3.org' in k:
dtduri = k
upath = urlparse(dtduri)[2]
dtdfile = os.path.split(upath)[1]
thisfilepath = os.path.dirname(__file__)
localfile = os.path.join(thisfilepath,'dtds',dtdfile)
if os.path.isfile(localfile):
dtdInfo = xmldtd.load_dtd(localfile)
else:
logger = logging.getLogger()
logger.log(logging.INFO,"loading external DTD. Please place %s in %s." \
% (dtdfile,os.path.join(thisfilepath,'dtds')))
dtdInfo = xmldtd.load_dtd(dtduri)
def html_attrs(tag):
"""get the legal attributes of a tag according to the DTD
>>> s = html_attrs('body')
>>> s == [u'id', u'class', u'style', u'title', u'lang', u'xml:lang', u'dir',
... u'onclick', u'ondblclick', u'onmousedown', u'onmouseup', u'onmouseover',
... u'onmousemove', u'onmouseout', u'onkeypress', u'onkeydown', u'onkeyup',
... u'onload', u'onunload']
True
>>> s = html_attrs('em')
>>> s == [u'id', u'class', u'style', u'title', u'lang', u'xml:lang', u'dir',
... u'onclick', u'ondblclick', u'onmousedown', u'onmouseup', u'onmouseover',
... u'onmousemove', u'onmouseout', u'onkeypress', u'onkeydown', u'onkeyup']
True
Unknown tags get the defaults
>>> s = html_attrs('zope')
>>> s == ['accesskey', 'class', 'dir', 'id', 'lang', 'style', 'tabindex',
... 'title']
True
"""
try:
attrs = dtdInfo.get_elem(tag).get_attr_list()
except KeyError:
logger = logging.getLogger()
logger.log(logging.INFO,"Invalid tag (%s) for DTD %s." % (tag,dtdfile))
attrs = ['accesskey','class','dir','id','lang','style','tabindex','title']
return attrs
def mayBeEmpty(tag):
"""return True or False depending on whether the DTD allows these tags to be
empty. We ask this to see if a tag may be xml-minimized according to the
DTD.
>>> mayBeEmpty('body')
False
>>> mayBeEmpty('img')
True
>>> mayBeEmpty('base')
True
"""
try:
content_model = dtdInfo.elems[tag].get_content_model()
except KeyError:
# we don't know nothin' about this tag, don't mess with it.
return True
# from what I can see returned as the content_model in xmldtd,
# it is a 3-tuple, and if the second member is an empty list, the tag may be
# minimized.
if not content_model[1]:
return True
return False | zif.xtemplate | /zif.xtemplate-0.2.2.tar.gz/zif.xtemplate-0.2.2/src/zif/xtemplate/lxmlhtmlutils.py | lxmlhtmlutils.py |
=============
zif.xtemplate
=============
zif.xtemplate provides a base class for browser views. It descends from
zope.publisher.browser.BrowserPage to provide the base information you need
to create a data-centric page, namely context and request. zif.XTemplate
leverages lxml's elementtree interface, fast serializing of XML, and XPath
support to generate HTML in a fast, safe, and pythonic fashion.
It is fairly simple to create a basic page. Just make a view class that is a
descendent of zif.xtemplate.xtemplate.XTemplate.
::
>>> from zif.xtemplate import XTemplate
>>> class TestViewClass(XTemplate):
... pass
The base class does not put out a particularly interesting page, but the output
is a well-formed, perfectly functional, if empty, page of HTML. Let's register
the page in zcml and see what it looks like.
::
>>> from zope.configuration import xmlconfig
>>> ignored = xmlconfig.string("""
... <configure
... xmlns="http://namespaces.zope.org/zope"
... xmlns:browser="http://namespaces.zope.org/browser"
... >
... <!-- allow browser directives here -->
... <include package="zope.app.publisher.browser" file="meta.zcml" />
... <browser:page
... name="testview.html"
... for="*"
... class="zif.xtemplate.README.TestViewClass"
... permission="zope.Public"
... />
... </configure>
... """)
Start a browser.
::
>>> from zope.testbrowser.testing import Browser
>>> browser = Browser('http://localhost/')
>>> #browser.handleErrors = False
>>> browser.open('/testview.html')
The page has a DOCTYPE string:
::
>>> 'DOCTYPE' in browser.contents
True
The page has a <head> element.
::
>>> '<head>' in browser.contents
True
The page has a <body> element.
::
>>> '<body>' in browser.contents
True
>>> browser.contents=='''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
... "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
... <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
... <head>
... <title>Untitled</title>
... <meta content="text/html;charset=utf-8" http-equiv="content-type" />
... <meta content="zif.xtemplate" name="generator" />
... <meta content="text/css" http-equiv="Content-Style-Type" />
... </head>
... <body></body>
... </html>'''
True
Let's do a view class that is a bit more interesting. We will start from the
base class and add some paragraphs to the body. The elementtree API is flexible;
p0, p1, and p2 are similar paragraphs, but constructed in different ways.
Notice that we do not have to close the tags. Elementtree takes care of that for
us. We always use lxml.etree. (c)elementtree is not compatible here, because
xpath is necessary to process the document before it is serialized. Some of
that processing assures proper output of empty elements that should not be
XMLminimized. For example, <div class="blah" /> is not valid HTML, and an
empty div needs be output as, for this example, <div class="blah"></div>.
This class obtains context and request, which are the same context and request
you are familiar with. The render() method is the only one actually needed in
a subclass. Additional methods may be used to refactor repetitive operations.
They are also recommended just to make the code more readable. Blank lines also
help. A render() method can turn into a huge block of grey fairly easily.
::
>>> from lxml.etree import Element, SubElement, Comment
>>> class TestViewClass2(XTemplate):
... title="Test2"
...
... def getHelloWorld(self):
... return "Hello, World!"
...
... def putInSnippet(self):
... snippet = '<p id="newp1">This is <em>fun</em>.</p>'
... self.appendSnippet(self.body,snippet,sanitize=False)
...
... def render(self):
... context = self.context
... request = self.request
...
... self.addStyleSheet('/@@/resource1/mystylesheet.css')
... hwtext = self.getHelloWorld()
...
... p0 = SubElement(self.body,'p',{'style':'color:red;','id':'p0'})
... p0.text = hwtext
...
... p1 = Element('p')
... p1.set('style','color:blue;')
... p1.text = hwtext
... self.body.append(p1)
...
... p2 = SubElement(self.body,'p',style="color:green;")
... p2.text = hwtext
...
... # a <span> element goes after p0's text. We still have a pointer
... # to p0, so it does not matter that this is out-of-order.
... span0 = SubElement(p0,'span')
... # text inside the span
... span0.text = ' And Hello to '
... # text that follows the span, still in the same paragraph
... span0.tail = 'other worlds, too!'
...
... self.putInSnippet()
... self.body.append(Comment("The <p> above is a parsed snippet."))
...
... self.getElementById('newp1').set('class','paragraph1')
So, we register this view:
::
>>> ignored = xmlconfig.string("""
... <configure
... xmlns="http://namespaces.zope.org/zope"
... xmlns:browser="http://namespaces.zope.org/browser"
... >
... <!-- allow browser directives here -->
... <include package="zope.app.publisher.browser" file="meta.zcml" />
... <browser:page
... name="testview2.html"
... for="*"
... class="zif.xtemplate.README.TestViewClass2"
... permission="zope.Public"
... />
... </configure>
... """)
And let's look at the output a bit.
::
>>> browser = Browser('http://localhost/')
>>> #browser.handleErrors = False
>>> browser.open('/testview2.html')
>>> browser.contents.count('Hello, World!') == 3
True
>>> browser.contents.count('<em>fun</em>') == 1
True
>>> browser.contents =='''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
... "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
... <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
... <head>
... <title>Test2</title>
... <meta content="text/html;charset=utf-8" http-equiv="content-type" />
... <meta content="zif.xtemplate" name="generator" />
... <meta content="text/css" http-equiv="Content-Style-Type" />
... <link href="/@@/resource1/mystylesheet.css" type="text/css" rel="stylesheet" />
... </head>
... <body>
... <p style="color:red;" id="p0">Hello, World!<span> And Hello to </span>other worlds, too!</p>
... <p style="color:blue;">Hello, World!</p>
... <p style="color:green;">Hello, World!</p>
... <p id="newp1" class="paragraph1">This is <em>fun</em>.</p>
... <!--The <p> above is a parsed snippet.-->
... </body>
... </html>'''
True
It's not easy to see what's going on here, so we will operate the class directly
to show how it works.
Since XTemplate descends from BrowserView, it needs to be instanciated with
context and request.
::
>>> class TestResponse(object):
... def setHeader(self,key,value):
... pass
>>> class TestRequest(dict):
... def __init__(self):
... self.environment = {}
... self.response = TestResponse()
>>> context = None
>>> request = TestRequest()
>>> myDoc = TestViewClass2(context,request)
The base lxml element is myDoc.document. The instance variables, myDoc.body and
myDoc.head correspond to the <body> and <head> elements of myDoc.document.
Adding to and manipulating the document are ordinarily done using lxml's
elementtree API, using self.body and self.head as the initial hooks into the
document. Most of what you need to know to use xtemplates is knowing a bit
about lxml.
::
>>> type(myDoc.document)
<type 'etree._ElementTree'>
>>> type(myDoc.body)
<type 'etree._Element'>
>>> type(myDoc.head)
<type 'etree._Element'>
Default body is empty at instanciation.
::
>>> [item for item in myDoc.body]
[]
Nothing happens with the document until it is called. XTemplate's __call__
method calls render(), then calls finalizePage(), which in turn calls a few
methods that post-process the document. XTemplate uses lxml's xpath
functionality internally, so (c)elementtree will not work here. Ultimately, the
ElementTree in the .document instance variable is serialized to unicode and
returned to Zope for output.
::
>>> myDoc.render()
>>> [item.text for item in myDoc.body.xpath('p')]
['Hello, World!', 'Hello, World!', 'Hello, World!', 'This is ']
A note about the xpath expression in the code above. All lxml Elements have
xpath functionality. Above, we asked for the list of all "p" elements in
myDoc.body. xpath expressions can be used to obtain lists of Elements of a
particular description. To find the list of empty elements for postprocessing,
xtemplate uses the xpath expression "//*[count(*)=0]". You may tailor an xpath
expression to find practically any subset of elements in a document. For
example, if you need to access an Element with an id of "bob", the xpath
expression would be "//*[@id='bob']". XTemplate uses this internally in its
getElementById method. This only scratches the surface of what is possible with
xpath expressions. You do not need to be an xpath expert to use xpath
expressions. I am certainly not one. To find the above, I Googled.
Just a bit about text and tail. It takes a bit of getting used to the "text"
and "tail" instance variables in an elementtree Element. Particularly when an
Element allows mixed content (text and tags interspersed), it can be difficult
to determine exactly which tag gets what text. As a hopefully clear
illustration, consider the following. How would you create this with lxml?
::
<c><a>text_a<b>text_b</b>tail_b</a>tail_a</c>
Here's some code that creates the above. An element's "text" comes after the
element's start tag and before the next tag. An element's "tail" comes after
the element's closing tag and before the next tag.
::
>>> from lxml.etree import Element, SubElement, tostring
>>> c = Element('c')
>>> a = SubElement(c,'a')
>>> a.text = 'text_a'
>>> a.tail = 'tail_a'
>>> b = SubElement(a,'b')
>>> b.text = 'text_b'
>>> b.tail = 'tail_b'
>>> tostring(c)
'<c><a>text_a<b>text_b</b>tail_b</a>tail_a</c>'
It would be a chore to make users do this, and we probably do not want users
directly accessing the lxml code. Fortunately, xtemplate has a helper method,
appendSnippet, that takes care of the fuss. Let's say we want to append a
snippet to myDoc. Let's pretend the user-generated string comes from a form.
::
>>> snippet = '<p>This is <em>user-generated</em> HTML </p>'
The wrong way to do this is to make a <div> and make the snippet its text.
::
>>> div = SubElement(myDoc.body,'div')
>>> div.text = snippet
>>> tostring(div)
'<div><p>This is <em>user-generated</em> HTML </p></div>'
Ick! The tags were HTML-escaped! This is better...
::
>>> #first clear out the bad text
>>> div.text = None
>>> myDoc.appendSnippet(div,snippet)
>>> tostring(div)
'<div><p>This is <em>user-generated</em> HTML </p></div>'
One important thing that zif.xtemplate does for you is assuring that the HTML
is valid. It does not bother parsing the document for complete DTD compliance,
but it does some work to assure that for the most part, your HTML is
syntactically correct.
::
>>> newDoc = TestViewClass(context,request)
>>> mainDiv = Element('div', {'id':'main'})
>>> newDoc.body.append(mainDiv)
>>> tostring(newDoc.document)
'<html><head/><body><div id="main"/></body></html>'
We know that <div id="main"/> is not valid html, because a div tag must have
a closing div tag. The XML-miminized <head> is also a problem. One of the
methods that is called in finalizePage() is fixEmptyElements. After this is
called, the document is more like HTML than XML.
::
>>> newDoc.fixEmptyElements()
>>> tostring(newDoc.document)
'<html><head></head><body><div id="main"></div></body></html>'
zif.xtemplate won't mess with elements that are allowed to be minimized. Note that the
<input> tag is left alone below.
::
>>> form = SubElement(mainDiv,'form')
>>> input = SubElement(form,'input',{'type':'submit','value':'OK'})
>>> newDoc.fixEmptyElements()
>>> tostring(newDoc.document)
'<ht...<form><input type="submit" value="OK"/></form></div></body></html>'
zif.xtemplate will fill out "action" and "method" in <form> elements if you
leave them out. Let's call a document and see how the postprocessing alters it.
Note that empty, xml-minimized tags have an XHTML 1.0 appendix c correction: an
extra space before the slash ending the tag.
::
>>> #First, we trick TestRequest into having PATH_INFO
>>> request['PATH_INFO'] = 'http://localhost/index.html'
>>> newDoc = TestViewClass(context,request)
>>> #charset is ordinarily gotten from request, but we have to set it here
>>> newDoc.charset = 'utf-8'
>>> mainDiv = Element('div', {'id':'main'})
>>> newDoc.body.append(mainDiv)
>>> form = SubElement(mainDiv,'form')
>>> input = SubElement(form,'input',{'type':'submit','value':'OK'})
>>> newDoc()
'<!DOC...<form action="http://localhost/index.html" method="post">...<input type="submit" value="OK" />...'
Templating, making several pages have the same look, is easy if you create one
class that does the common boilerplate, then create subclasses of that class.
You will want to super() the render method. Note that self.body, self.head,
and self.docElement are available to all subclasses, as well as a DOM-ish
method, getElementById(). If your template classes or base templates have
elements with "id" attributes, you can access those elements with the
getElementById method. lxml's element.xpath("XPathExpr") and XSLT facilities
are also available to use for page generation.
The appendSnippet method has sanitize=True by default, which means that only
tags and elements in a white list are included in the output. This should
reduce the chance of cross-site scripting exploits, and is the recommended way
to output user-provided text if HTML is allowed. Simply setting the text or
tail properties of an element will always HTML-escape the text. See
"HTMLSanitizer":/sanitizer_README.html for more info about how it works.
Inline javascript is likely to be troublesome; it is difficult to get "<" and
"&" in text. The work-around is to use external files for javascript. It is a
good idea anyway. Use the addScript(url) method.
There are some class variables / switches that affect output and processing.
For reference, they are provided here. Override in subclasses as needed.
- **defaultTitle** - a default title. This is the ultimate fallback when no title can
be found. Default: u'Untitled'
- **lang** - page language. Overridable in subclasses. Default: u'en'
- **strictXHTML** - try to do XHTML strict where supported by browser. Default: True
- **useMetaContentTypeTag** - include a meta tag with content-type and charset.
Default: True
- **cacheControl** - invoke the (overridable) doCacheControl method. Default: True
- **title** - the title. Just one of many ways to set title. Default: u''
- **docTypeHeader** - do a <!DOCTYPE... statement at the beginning. Default: True
- **prettyPrint** - indent where feasible. Default: True
- **tdFix** - do not indent content inside td tags when pretty printing.
Default: False
- **benchmark** - put a note about page generation time into log. Default: True
- **generatorTag** - a bit of advertising, but can be overridden. Default: True
- **template** - a string or unicode of well-formed HTML we wish to start with
as a template. Namespace declarations and METAL/TAL are not currently
supported. Default: the contents of the basetemplate.html file in the
distribution folder. You may override the default behavior by overriding the
getTemplate method.
If your class provides a postProcess() method, this will be called just before
the output is serialized. A postProcess() method provides a final opportunity to
apply global changes to the document. For ultimate control, override the
__call__ method, and return a unicode string.
zif.xtemplate contains no harmful chemicals.
| zif.xtemplate | /zif.xtemplate-0.2.2.tar.gz/zif.xtemplate-0.2.2/src/zif/xtemplate/README.txt | README.txt |
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from zope.publisher.browser import BrowserPage
from zope.traversing.browser.absoluteurl import absoluteURL
import logging
from zope.publisher.http import getCharsetUsingRequest, IResult
from zope.publisher.interfaces.browser import IBrowserRequest
import os
#cStringIO does not do unicode properly, apparently
from StringIO import StringIO
import time
from xml.dom import XHTML_NAMESPACE, XML_NAMESPACE
logger = logging.getLogger()
from sanitizer import HTMLSanitizer
from interfaces import ILXMLHTMLPage
from zope.interface import implements
from zope.component import adapts
try:
from lxml.etree import Element,SubElement, tounicode, ElementTree, \
fromstring, XMLSyntaxError, HTML, Comment, parse, HTMLParser, XMLParser
except ImportError:
print """This package uses lxml.(http://codespeak.net/lxml/)
It may be installed as "lxml" or "python-lxml" in Linux distributions.
easy_install also works. You want version 1.0+
"""
raise
from lxmlhtmlutils import getElementById, appendSnippet, appendWidget, \
fixEmptyElements, fixTDs
localfilepath = os.path.dirname(__file__)
baseTemplate = os.path.join(localfilepath,'basetemplate.html')
t_doc_type="""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">"""
doc_type="""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">"""
def readLocalFile(filename):
filepath = os.path.join(localfilepath,filename)
if os.path.isfile(filepath):
return file(filepath).readlines()
raise ValueError("Could not read file %s" % filepath)
#class LXMLHTMLResult(object):
#implements(IResult)
#adapts(ILXMLHTMLPage, IBrowserRequest)
#def __init__(self,context,request):
#self.context = context
#self.request = request
#def __call__(self):
#request = self.request
#context = self.context
#charset = getCharsetUsingRequest(request)
class XTemplate(BrowserPage):
implements(ILXMLHTMLPage)
"""lxml-generated page"""
#set a default title. This is the ultimate fallback when
# no title can be found
defaultTitle = u'Untitled'
#page language. Overridable in subclasses.
lang = u'en'
#try to do XHTML strict where supported by browser.
# doctype, xml declaration with charset
strictXHTML = True
#put in a meta tag with content-type and charset
useMetaContentTypeTag = True
#set a cache control header
cacheControl = True
#the title. Just one of many ways to set title
title = u''
#do a <!DOCTYPE... statement at the beginning
docTypeHeader = True
#indent where feasible
prettyPrint = True
#do not indent content inside td tags when pretty printing
tdFix = False
#throw a note about page generation time into log
benchmark = True
#a bit of advertising, but can be overridden
generatorTag = True
#these are sometimes useful
IE=False
KHTML = False
Gecko = False
def __init__(self,context,request):
"""Initialize the page"""
super(XTemplate,self).__init__(context,request)
agent = self.request.get('HTTP_USER_AGENT','')
if 'MSIE' in agent:
self.IE=True
if 'KHTML' in agent:
self.KHTML = True
if 'Gecko' in agent:
self.Gecko = True
self.agent = agent
self.startTime = time.time()
template = self.getTemplate()
if template is None:
baseTemplate = os.path.join(localfilepath,'basetemplate.html')
z = open(baseTemplate).read()
template = z
if template:
s = StringIO(template)
parser = XMLParser(remove_blank_text=True)
self.document = parse(s,parser)
self.docElement = self.document.getroot()
self.head = self.document.xpath('//head')[0]
self.body = self.document.xpath('//body')[0]
else:
self.docElement = Element('html')
self.document = ElementTree(self.docElement)
self.head = SubElement(self.docElement,'head')
self.body = SubElement(self.docElement,'body')
self.scripts = []
self.styleSheets = []
self.charset = getCharsetUsingRequest(request)
def getTemplate(self):
"""return a string or unicode representation of the base HTML template
for this template. Override this if you want to use a different
template provider. Base implementation here returns None
"""
return None
def renderDocBoilerPlate(self):
#print "before:%s" % self.docElement.keys()
self.docElement.set('xmlns',XHTML_NAMESPACE)
#self.docElement.set('xml:lang',self.lang)
# ack. here's the better way to do this.
self.docElement.set('{%s}lang' % XML_NAMESPACE,self.lang)
self.docElement.set('lang',self.lang)
#print "after:%s" % self.docElement.keys()
ca = self.request.environment.get('HTTP_ACCEPT','')
if self.strictXHTML:
#let's see if the client accepts xhtml+xml...
if 'application/xhtml+xml' in ca:
ct = 'application/xhtml+xml'
self.request.response.setHeader('content-type','%s' % (ct))
else:
#client does not accept xhtml+xml
self.strictXHTML=False
if not self.strictXHTML:
ct = 'text/html'
self.request.response.setHeader('content-type','%s;charset=%s' % (ct,self.charset))
# if self.useBaseTag:
# base = Element('base')
# base.set('href','%s/' % absoluteURL(self.context,self.request))
# self.head.append(base)
if self.useMetaContentTypeTag:
self.addMetaTag({'http-equiv':'content-type',
'content':'%s;charset=%s' % (ct,self.charset)})
self.renderMetaTags()
if self.cacheControl:
self.doCacheControl()
def doCacheControl(self):
"""cache control.
may be overriden in descendents.
set cacheControl to True to invoke"""
self.request.response.setHeader('cache-control','no-cache')
def renderTitle(self,getTitle=None):
"""obtain title from ... somewhere..."""
tt = SubElement(self.head,'title')
# is getTitle provided in the call?
if getTitle:
try:
#try to call it
tt.text = getTitle()
except TypeError:
#maybe it is a string
tt.text = getTitle
elif self.title:
#maybe this template has a title?
tt.text = self.title
else:
try:
#maybe this template has a getTitle method?
tt.text = self.getTitle()
except AttributeError:
try:
#maybe context has a getTitle method?
tt.text = self.context.getTitle()
except AttributeError:
tt.text = self.defaultTitle
def renderMetaTags(self):
if self.generatorTag:
self.addMetaTag({'name':'generator','content':'zif.xtemplate'})
self.addMetaTag({'http-equiv':'Content-Style-Type','content':'text/css'})
def addMetaTag(self,attribs):
""" add a meta tag. attribs is a dict"""
tag = Element('meta',attribs)
self.head.append(tag)
def render(self):
"""Descendent classes should override this.
available elements;
self.body
self.head
self.docElement
These are lxml.etree.Elements that have the elementtree api.
"""
def getElementById(self,anId):
"""return the first element with this id attribute.
Return None if not available"""
return getElementById(self.docElement,anId)
def appendSnippet(self,target,s,sanitize=True):
"""apppend the snippet at target
target is an Element in the document.
s is the snippet, a string of xml. It does not need to have any tags,
if the snippet is otherwise not well-formed or understood as XML,
it will be parsed by lxml.HTML as tag soup.
snippet will be appended to text and/or children of the location Element.
"""
appendSnippet(target,s,sanitize=True)
def appendWidget(self,target,widget):
# a widget may be an Element or list of Elements, or an html snippet
# this hopefully helps with widget use.
appendWidget(target,widget)
def renderStyleSheetsAndScripts(self):
"""do script and style tags"""
for k in self.scripts:
s = SubElement(self.head,'script',{'type':'text/javascript'})
s.set('src',k)
for k in self.styleSheets:
s = SubElement(self.head, 'link',{'rel':'stylesheet',
'type':'text/css','href':k})
#s = SubElement(self.head,'style',{'type':'text/css'})
#s.text = '\n@import url("%s");\n' % k
def postProcess(self):
"""perform subclass-specific post-processing"""
def fixEmptyElements(self):
"""globally make a fix on empty elements for the dtd.
lxml likes to xml-minimize if possible. Here we assign some
(empty) text so this does not happen when it should not."""
fixEmptyElements(self.docElement)
def fixTDs(self):
"""set td text non-none so pretty-print does not add extra space"""
fixTDs(self.body)
def fixForms(self):
"""set action and method on forms if missing"""
forms = self.body.xpath('//form')
for form in forms:
action = form.get('action')
if not action:
#action = absoluteURL(self.context,self.request)
action = self.request['PATH_INFO']
form.set('action',action)
method = form.get('method')
if not method:
form.set('method','post')
def renderEndComment(self):
"""override this if want something different..."""
s = Comment('Created with Zope 3 XTemplate')
self.docElement.append(s)
def finalizePage(self):
self.renderTitle()
self.renderDocBoilerPlate()
self.fixForms()
self.renderStyleSheetsAndScripts()
self.fixEmptyElements()
self.postProcess()
if self.tdFix and self.prettyPrint:
self.fixTDs()
#self.renderEndComment()
def addScript(self,url):
"""add a script url"""
if not url in self.scripts:
self.scripts.append(url)
def addStyleSheet(self,url):
"""add a style sheet url"""
if not url in self.styleSheets:
self.styleSheets.append(url)
def __call__(self):
self.render()
self.finalizePage()
doc = tounicode(self.document,pretty_print=self.prettyPrint)
if not self.strictXHTML:
dt = t_doc_type
else:
dt = doc_type
txt = doc.encode(self.charset)
if not self.strictXHTML:
#XHTML1.0 Appendix C
replacements = (
('/>', ' />'),
(''',''')
)
for m in replacements:
if m[0] in txt:
txt = txt.replace(m[0], m[1])
output = [txt]
if self.docTypeHeader:
output.append(dt)
#e="%s\n%s" % (dt,e)
if self.strictXHTML:
xmlheader = '<?xml version="1.0" encoding="%s"?>' % self.charset
output.append(xmlheader)
#e = "%s\n%s" % (xmlheader,e)
e = '\n'.join(reversed(output))
if self.benchmark:
logger.log(logging.INFO, "Page generated in %01.4f s." % (time.time() - self.startTime,))
#print "Page generated in %01.4f s." % (time.time() - self.startTime,)
return e
#def __call__(self):
#self.render()
## z = Element('<!DOCTYPE>')
## z.text = 'html'
## self.docElement.addprevious(z)
#self.finalizePage()
#from html5lib.treewalkers.lxmletree import TreeWalker
#from html5lib.serializer import HTMLSerializer
#tree = TreeWalker(self.docElement)
#serializer = HTMLSerializer()
#z = serializer.render(tree,encoding=self.charset)
#if self.benchmark:
#logger.log(logging.INFO, "Page generated in %01.4f s." % (time.time() - self.startTime,))
#return u'<!DOCTYPE html>\n%s' % z
def asSnippet(self, element=None, pretty_print=True):
"""return an element and its contents as unicode string.
no post-processing here. Subclasses are free to first call
other routines in e.g., finalizePage, if desired.
"""
if not element:
element=self.body
doc=tounicode(element,pretty_print=pretty_print)
return doc | zif.xtemplate | /zif.xtemplate-0.2.2.tar.gz/zif.xtemplate-0.2.2/src/zif/xtemplate/xtemplate.py | xtemplate.py |
from lxml.etree import HTML, XMLSyntaxError, fromstring, tounicode, Comment, \
tostring
import logging
#thanks to Mark Pilgrim for these lists
#http://feedparser.org/docs/html-sanitization.html
allowed_tags = set(('a', 'abbr', 'acronym', 'address', 'area', 'b', 'big',
'blockquote', 'br', 'button', 'caption', 'center', 'cite', 'code', 'col',
'colgroup', 'dd', 'del', 'dfn', 'dir', 'div', 'dl', 'dt', 'em', 'fieldset',
'font', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img',
'input', 'ins', 'kbd', 'label', 'legend', 'li', 'map', 'menu', 'ol',
'optgroup', 'option', 'p', 'pre', 'q', 's', 'samp', 'select', 'small',
'span', 'strike', 'strong', 'sub', 'sup', 'table', 'tbody', 'td',
'textarea', 'tfoot', 'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var'))
allowed_attributes = set(('abbr', 'accept', 'accept-charset', 'accesskey',
'action','align', 'alt', 'axis', 'border', 'cellpadding', 'cellspacing',
'char','charoff', 'charset', 'checked', 'cite', 'class', 'clear', 'cols',
'colspan', 'color', 'compact', 'coords', 'datetime', 'dir', 'disabled',
'enctype', 'for', 'frame', 'headers', 'height', 'href', 'hreflang',
'hspace', 'id', 'ismap', 'label', 'lang', 'longdesc', 'maxlength',
'media', 'method', 'multiple', 'name', 'nohref', 'noshade', 'nowrap',
'prompt', 'readonly', 'rel', 'rev', 'rows', 'rowspan', 'rules',
'scope', 'selected', 'shape', 'size', 'span', 'src', 'start',
'summary', 'tabindex', 'target', 'title', 'type', 'usemap', 'valign',
'value', 'vspace', 'width'))
class HTMLSanitizer(object):
"""class that removes unwanted tags and attributes from an lxml Element and
its children."""
def __init__(self, tags=allowed_tags, attributes=allowed_attributes):
self.allowedAttributes = set(attributes)
self.allowedTags = set(tags)
def allowAttribute(self,attr):
self.allowedAttributes.add(attr.lower())
def allowTag(self,tag):
self.allowedTags.add(tag.lower())
def denyAttribute(self,attr):
self.allowedAttributes.remove(attr)
def denyTag(self,tag):
self.allowedTags.remove(tag)
def sanitize(self,element, bad='remove'):
"""return the element and its subelements, with unwanted tags and
attributes removed"""
for attr in element.keys():
if not attr.lower() in self.allowedAttributes:
del element.attrib[attr]
if not element.tag.lower() in self.allowedTags:
if bad == 'remove':
logger = logging.getLogger()
logger.log(logging.DEBUG,'element removed (%s)' % tostring(element))
if element.tail:
return element.tail
return None
elif bad == 'span':
element.tag = 'span'
#nasty hack, but apparently needs to happen
element.text = element.text[:element.text.find('<')]
elif bad == 'comment':
t = Comment()
# same hack as above. apparently element.text at this point can
# have the text to the end of the snippet.
t.text = "sanitized: [%s] %s" % (element.tag, element.text[:element.text.find('<')])
t.tail = element.tail
return t
for elt in element:
s = self.sanitize(elt, bad=bad)
if s is not None:
if isinstance(s,basestring):
prev = elt.getprevious()
if prev is not None:
if prev.tail:
prev.tail = prev.tail + s
else:
prev.tail = s
else:
if element.text:
element.text = element.text + s
else:
element.text = s
element.remove(elt)
else:
element.replace(elt,s)
else:
element.remove(elt)
return element
def sanitizeString(self,aString, bad="remove"):
t = u'<div>%s</div>' % aString
try:
parsed = fromstring(t)
except XMLSyntaxError:
logger = logging.getLogger()
logger.log(logging.DEBUG,"Snippet (%s) parsed as tag soup." % s)
# let's parse this as tag soup!
parsed = HTML(t)
parsed = parsed.xpath('//div')[0]
parsed = self.sanitize(parsed, bad=bad)
return tounicode(parsed)[5:-6]
def extractText(self, element,bad="remove"):
t = self.sanitize(element,bad=bad)
accum = []
for k in t.xpath('//*'):
text = k.text
tail = k.tail
if text:
text = text.split()
text = ' '.join(text)
accum.append(text)
if tail:
tail = tail.split()
tail = ' '.join(tail)
accum.append(tail)
return u' '.join(accum) | zif.xtemplate | /zif.xtemplate-0.2.2.tar.gz/zif.xtemplate-0.2.2/src/zif/xtemplate/sanitizer.py | sanitizer.py |
# Ziffers for Python 3.10+
This repo is hosting experimental parser for the [Ziffers](https://github.com/amiika/ziffers) numbered musical notation to Python 3.10+. This library is using lark lalr-1 parser and ebnf PEG for parsing the notation.
## Supported environments
Ziffers python supports following live coding and computer-aided composition environments:
* [Sardine](https://github.com/Bubobubobubobubo/sardine)
* [Music21](https://github.com/cuthbertLab/music21)
# Status:
**Supported:**
```
Pitches: -2 -1 0 1 2
Chords: 0 024 2 246
Note lengths: w 0 h 1 q 2 e 3 s 4
Subdivision: [1 2 [3 4]]
Decimal durations: 0.25 0 1 <0.333>2 3
Octaves: ^ 0 ^ 1 _ 2 _ 3
Escaped octave: <2> 1 <1>1<-2>3
Named chords: i i i i
Randoms: % ? % ? % ?
Random between: (-3,6)
Repeat: [: 1 (2,6) 3 :4]
Repeat cycles: [: <q e> (1,4) <(2 3) (3 (1,7))> :]
Lists: h 1 q(0 1 2 3) 2
List cycles: (: <q e> (1,4) <(2 3) (3 (1,7))> :)
Loop cycles: <0 <1 <2 <3 <4 5>>>>>
Basic operations: (1 2 (3 4)+2)*2 ((1 2 3)+(0 9 13))-2 ((3 4 10)*(2 9 3))%7
Product operations: (0 1 2 3)+(1 4 2 3) (0 1 2)-(0 2 1)+2
Euclid cycles: (q1)<6,7>(q4 (e3 e4) q2) (q1)<6,7>(q4 q3 q2)
Transformations: (0 1 2)<r> (0 1 2)<i>(-2 1)
List assignation: A=(0 (1,6) 3) B=(3 ? 2) B A B B A
Random repeat: (: 1 (2,6) 3 :4)
```
**New features:**
```
Shorthand for random repeat: (2 5):3 [2 5 1]:4 (1,6):6
```
**Partial support:**
```
Escape/eval: {10 11} {3+1*2} // {1.2 2.43} NOT SUPPORTED YET.
Roman chords: i ii iii i^maj i^7
```
**TBD:**
```
Random selections: [q 1 2, q 3 e 4 6]
Conditionals: 1 {%<0.5?3} 3 4 (: 1 2 {%<0.2?3:2} :3)
Functions: (0 1 2 3){x%3==0?x-2:x+2}
Polynomials: (-10..10){(x**3)*(x+1)%12}
Modal interchange (a-g): iiia ig ivf^7
``` | ziffers | /ziffers-0.0.1.tar.gz/ziffers-0.0.1/README.md | README.md |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | zig-pdist | /zig_pdist-0.1.tar.gz/zig_pdist-0.1/zig_pdist/Gaussiandistribution.py | Gaussiandistribution.py |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Binomial(Distribution):
""" Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
n (int) number of trials
TODO: Fill out all functions below
"""
def __init__(self, prob=.5, size=20):
self.n = size
self.p = prob
Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev())
def calculate_mean(self):
"""Function to calculate the mean from p and n
Args:
None
Returns:
float: mean of the data set
"""
self.mean = self.p * self.n
return self.mean
def calculate_stdev(self):
"""Function to calculate the standard deviation from p and n.
Args:
None
Returns:
float: standard deviation of the data set
"""
self.stdev = math.sqrt(self.n * self.p * (1 - self.p))
return self.stdev
def replace_stats_with_data(self):
"""Function to calculate p and n from the data set
Args:
None
Returns:
float: the p value
float: the n value
"""
self.n = len(self.data)
self.p = 1.0 * sum(self.data) / len(self.data)
self.mean = self.calculate_mean()
self.stdev = self.calculate_stdev()
def plot_bar(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n])
plt.title('Bar Chart of Data')
plt.xlabel('outcome')
plt.ylabel('count')
def pdf(self, k):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k)))
b = (self.p ** k) * (1 - self.p) ** (self.n - k)
return a * b
def plot_bar_pdf(self):
"""Function to plot the pdf of the binomial distribution
Args:
None
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
x = []
y = []
# calculate the x values to visualize
for i in range(self.n + 1):
x.append(i)
y.append(self.pdf(i))
# make the plots
plt.bar(x, y)
plt.title('Distribution of Outcomes')
plt.ylabel('Probability')
plt.xlabel('Outcome')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Binomial distributions with equal p
Args:
other (Binomial): Binomial instance
Returns:
Binomial: Binomial distribution
"""
try:
assert self.p == other.p, 'p values are not equal'
except AssertionError as error:
raise
result = Binomial()
result.n = self.n + other.n
result.p = self.p
result.calculate_mean()
result.calculate_stdev()
return result
def __repr__(self):
"""Function to output the characteristics of the Binomial instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}, p {}, n {}".\
format(self.mean, self.stdev, self.p, self.n) | zig-pdist | /zig_pdist-0.1.tar.gz/zig_pdist-0.1/zig_pdist/Binomialdistribution.py | Binomialdistribution.py |
# zigate

[](https://pypi.python.org/pypi/zigate)
[](http://isitmaintained.com/project/doudz/zigate "Average time to resolve an issue")
[](http://isitmaintained.com/project/doudz/zigate "Percentage of issues still open")
[](https://paypal.me/sebramage)
[](https://en.cryptobadges.io/donate/3DHvPBWyf5Vsp485tGFu7WfYSd6r5qgZdH)
Python library for [ZiGate](http://zigate.fr/).
This library manage communication between python and zigate key, both USB and WiFi key are supported.
ZiGate is an universal gateway compatible with a lot of ZigBee device (like Xiaomi, Philipps Hue, Ikea, etc).
## Getting Started
### Installation
To install simply do:
```bash
pip3 install zigate
```
Or if you've planned to use mqtt
```bash
pip3 install zigate[mqtt]
```
### Usage
```python
# if you want logging
import logging
logging.basicConfig()
logging.root.setLevel(logging.DEBUG)
import zigate
z = zigate.connect(port=None) # Leave None to auto-discover the port
print(z.get_version())
OrderedDict([('major', 1), ('installer', '30c'), ('rssi', 0), ('version', '3.0c')])
print(z.get_version_text())
3.0c
# refresh devices list
z.get_devices_list()
# start inclusion mode
>>> z.permit_join()
>>> z.is_permitting_join()
True
# list devices
>>> z.devices
[Device 677c , Device b8ce , Device 92a7 , Device 59ef ]
>>> z.devices[0].addr
'677c'
# get all discovered endpoints
>>> z.devices[0].endpoints
{1: {
'clusters': {0: Cluster 0 General: Basic,
1026: Cluster 1026 Measurement: Temperature,
1027: Cluster 1027 Measurement: Atmospheric Pressure,
1029: Cluster 1029 Measurement: Humidity},
}}
# get well known attributes
>>> for attribute in z.devices[0].properties:
print(attribute)
{'data': 'lumi.weather', 'name': 'type', 'attribute': 5, 'value': 'lumi.weather'}
{'data': '0121c70b0421a8010521090006240100000000642932096521851c662bd87c01000a210000', 'name': 'battery', 'value': 3.015, 'unit': 'V', 'attribute': 65281}
{'data': -1983, 'name': 'temperature', 'value': -19.83, 'unit': '°C', 'attribute': 0}
{'data': 9779, 'name': 'pressure2', 'value': 977.9, 'unit': 'mb', 'attribute': 16}
{'data': 977, 'name': 'pressure', 'value': 977, 'unit': 'mb', 'attribute': 0}
{'data': 4484, 'name': 'humidity', 'value': 44.84, 'unit': '%', 'attribute': 0}
# get specific property
>>> z.devices[0].get_property('temperature')
{'data': -1983,
'name': 'temperature',
'value': -19.83,
'unit': '°C',
'attribute': 0}
# call action on devices
z.action_onoff('b8ce', 1, zigate.ON)
# or from devices
z.devices[1].action_onoff(zigate.ON)
# OTA process
# Load image and send headers to ZiGate
z.ota_load_image('path/to/ota/image_file.ota')
# Tell client that image is available
z.ota_image_notify('addr')
# It will take client usually couple seconds to query headers
# from server. Upgrade process start automatically if correct
# headers are loaded to ZiGate. If you have logging level debug
# enabled you will get automatically progress updates.
# Manually check ota status - logging level INFO
z.get_ota_status()
# Whole upgrade process time depends on device and ota image size
# Upgrading ikea bulb took ~15 minutes
# Upgrading ikea remote took ~45 minutes
```
### OTA Update
Some devices (like Ikea Tradfri) could be updated.
For Ikea, you could download available OTA files using the following command :
`python3 zigate.ikea_ota_download`
```python
# OTA process
# Load image and send headers to ZiGate
z.ota_load_image('path/to/ota/image_file.ota')
# Tell client that image is available
z.ota_image_notify('addr')
# It will take client usually couple seconds to query headers
# from server. Upgrade process start automatically if correct
# headers are loaded to ZiGate. If you have logging level debug
# enabled you will get automatically progress updates.
# Manually check ota status - logging level INFO
z.get_ota_status()
# Whole upgrade process time depends on device and ota image size
# Upgrading ikea bulb took ~15 minutes
# Upgrading ikea remote took ~45 minutes
```
### Callback
We use pydispatcher for callback
```python
from zigate import dispatcher
def my_callback(sender, signal, **kwargs):
print(sender) # zigate instance
print(signal) # one of EVENT
print(kwargs) # contains device and/or attribute changes, etc
dispatcher.connect(my_callback, zigate.ZIGATE_ATTRIBUTE_UPDATED)
z = zigate.connect()
# or
z = zigate.connect(port='/dev/ttyUSB0')
# to catch any events
dispatcher.connect(my_callback, dispatcher.Any)
```
event can be :
```python
zigate.ZIGATE_DEVICE_ADDED
zigate.ZIGATE_DEVICE_UPDATED
zigate.ZIGATE_DEVICE_REMOVED
zigate.ZIGATE_DEVICE_ADDRESS_CHANGED
zigate.ZIGATE_ATTRIBUTE_ADDED
zigate.ZIGATE_ATTRIBUTE_UPDATED
zigate.ZIGATE_DEVICE_NEED_DISCOVERY
```
kwargs depends of the event type:
* for `zigate.ZIGATE_DEVICE_ADDED` kwargs contains device.
* for `zigate.ZIGATE_DEVICE_UPDATED` kwargs contains device.
* for `zigate.ZIGATE_DEVICE_REMOVED` kwargs contains addr (the device short address).
* for `zigate.ZIGATE_DEVICE_ADDRESS_CHANGED` kwargs contains old_addr and new_addr (used when re-pairing an already known device).
* for `zigate.ZIGATE_ATTRIBUTE_ADDED` kwargs contains device and discovered attribute.
* for `zigate.ZIGATE_ATTRIBUTE_UPDATED` kwargs contains device and updated attribute.
* for `zigate.ZIGATE_DEVICE_NEED_DISCOVERY` kwargs contains device.
## Wifi ZiGate
WiFi ZiGate is also supported:
```python
import zigate
z = zigate.connect(host='192.168.0.10')
# or if you want to set the port
z = zigate.connect(host='192.168.0.10:1234')
```
## PiZiGate
PiZiGate (ZiGate module for raspberry pi) is also supported:
```python
import zigate
z = zigate.connect(gpio=True)
# or if you want to set the port
z = zigate.connect(port='/dev/serial0', gpio=True)
```
To be able to use the PiZiGate on Rpi3 you need to disable the bluetooth module.
To disable bluetooth:
* Add `dtoverlay=pi3-disable-bt` in `/boot/config.txt`
* Remove `console=serial0,115200` from `/boot/cmdline.txt`
* Disable hciuart `sudo systemctl disable hciuart`
* Add user to gpio group, example with pi user `sudo usermod -aG gpio pi`
* and `reboot`
Alternatively you could set mini uart for bluetooth or for zigate but be aware that there's performance issue.
## MQTT Broker
This requires paho-mqtt. It could be install as a dependency with `pip3 install zigate[mqtt]`
```bash
python3 -m zigate.mqtt_broker --device auto --mqtt_host localhost:1883
```
Add `--mqtt_username` and `--mqtt_password` as arguments and allow them to be used to establish connection to the MQTT broker.
The broker publish the following topics: zigate/device_changed/[addr]
Payload example :
```python
'zigate/device_changed/522a'
{"addr": "522a", "endpoints": [{"device": 0, "clusters": [{"cluster": 1026, "attributes": [{"value": 22.27, "data": 2227, "unit": "\u00b0C", "name": "temperature", "attribute": 0}]}, {"cluster": 1027, "attributes": [{"value": 977, "data": 977, "unit": "mb", "name": "pressure", "attribute": 0}, {"value": 977.7, "data": 9777, "unit": "mb", "name": "pressure2", "attribute": 16}, {"data": -1, "attribute": 20}]}, {"cluster": 1029, "attributes": [{"value": 35.03, "data": 3503, "unit": "%", "name": "humidity", "attribute": 0}]}], "profile": 0, "out_clusters": [], "in_clusters": [], "endpoint": 1}], "info": {"power_source": 0, "ieee": "158d0002271c25", "addr": "522a", "id": 2, "rssi": 255, "last_seen": "2018-02-21 09:41:27"}}
```
zigate/device_removed.
Payload example :
```python
{"addr": "522a"}
```
zigate/attribute_changed/[addr]/[endpoint]/[cluster]/[attribute] payload is changed attribute.
Payload example :
```python
'zigate/attribute_changed/522a/01/0403/0010'
{"cluster": 1027, "value": 978.5, "data": 9785, "attribute": 16, "unit": "mb", "endpoint": 1, "addr": "522a", "name": "pressure2"}
```
You can send command to zigate using the topic zigate/command payload should be:
```python
{"function": "function_name", "args": ["optional","args","list"]}
# example to start permit join
payload = '{"function": "permit_join"}'
client.publish('zigate/command', payload)
```
The broker will publish the result using the topic "zigate/command/result".
Payload example :
```python
{"function": "permit_join", "result": 0}
```
All the zigate functions can be call:
```python
# turn on endpoint 1
payload = '{"function": "action_onoff", "args": ["522a", 1, 1]}'
client.publish('zigate/command', payload)
# turn off endpoint 1
payload = '{"function": "action_onoff", "args": ["522a", 1, 0]}'
client.publish('zigate/command', payload)
```
## Flasher
Python tool to flash your Zigate (Jennic JN5168)
Thanks to Sander Hoentjen (tjikkun) we now have a flasher !
[Original repo](https://github.com/tjikkun/zigate-flasher)
### Flasher Usage
```bash
usage: python3 -m zigate.flasher [-h] -p {/dev/ttyUSB0} [-w WRITE] [-s SAVE] [-u] [-d] [--gpio] [--din]
optional arguments:
-h, --help show this help message and exit
-p {/dev/ttyUSB0}, --serialport {/dev/ttyUSB0}
Serial port, e.g. /dev/ttyUSB0
-w WRITE, --write WRITE
Firmware bin to flash onto the chip
-s SAVE, --save SAVE File to save the currently loaded firmware to
-u, --upgrade Download and flash the lastest available firmware
-d, --debug Set log level to DEBUG
--gpio Configure GPIO for PiZiGate flash
--din Configure USB for ZiGate DIN flash
```
## Command Line Interface
```bash
usage: python3 -m zigate [-h] [--port PORT] [--host HOST] [--path PATH] [--gpio]
[--channel CHANNEL] [--admin_panel]
optional arguments:
-h, --help show this help message and exit
--port PORT ZiGate usb port
--host HOST Wifi ZiGate host:port
--path PATH ZiGate state file path
--gpio Enable PiZigate
--channel CHANNEL Zigbee channel
--admin_panel Enable Admin panel
```
## How to contribute
If you are looking to make a contribution to this project we suggest that you follow the steps in these guides:
* <https://github.com/firstcontributions/first-contributions/blob/master/README.md>
* <https://github.com/firstcontributions/first-contributions/blob/master/github-desktop-tutorial.md>
Some developers might also be interested in receiving donations in the form of hardware such as Zigbee modules or devices, and even if such donations are most often donated with no strings attached it could in many cases help the developers motivation and indirect improve the development of this project.
## Comment contribuer
Si vous souhaitez apporter une contribution à ce projet, nous vous suggérons de suivre les étapes décrites dans ces guides:
* <https://github.com/firstcontributions/first-contributions/blob/master/README.md>
* <https://github.com/firstcontributions/first-contributions/blob/master/github-desktop-tutorial.md>
Certains développeurs pourraient également être intéressés par des dons sous forme de matériel, tels que des modules ou des dispositifs Zigbee, et même si ces dons sont le plus souvent donnés sans aucune condition, cela pourrait dans de nombreux cas motiver les développeurs et indirectement améliorer le développement de ce projet.
| zigate | /zigate-0.40.12.tar.gz/zigate-0.40.12/README.md | README.md |
# MIT License
Copyright (c) 2021 Franck Nijhof
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| zigbee2mqtt | /zigbee2mqtt-0.0.1.tar.gz/zigbee2mqtt-0.0.1/LICENSE.md | LICENSE.md |
# Python: Client for Zigbee2MQTT
[![GitHub Release][releases-shield]][releases]
[![Python Versions][python-versions-shield]][pypi]
![Project Stage][project-stage-shield]
![Project Maintenance][maintenance-shield]
[![License][license-shield]](LICENSE.md)
[![Build Status][build-shield]][build]
[![Code Coverage][codecov-shield]][codecov]
[![Code Quality][code-quality-shield]][code-quality]
[![Sponsor Frenck via GitHub Sponsors][github-sponsors-shield]][github-sponsors]
[![Support Frenck on Patreon][patreon-shield]][patreon]
Python client for Zigbee2MQTT.
## About
In progress Python client library for Zigbee2MQTT.
## Installation
```bash
pip install zigbee2mqtt
```
## Usage
```python
# TODO: Need example
```
## Changelog & Releases
This repository keeps a change log using [GitHub's releases][releases]
functionality. The format of the log is based on
[Keep a Changelog][keepchangelog].
Releases are based on [Semantic Versioning][semver], and use the format
of `MAJOR.MINOR.PATCH`. In a nutshell, the version will be incremented
based on the following:
- `MAJOR`: Incompatible or major changes.
- `MINOR`: Backwards-compatible new features and enhancements.
- `PATCH`: Backwards-compatible bugfixes and package updates.
## Contributing
This is an active open-source project. We are always open to people who want to
use the code or contribute to it.
We've set up a separate document for our
[contribution guidelines](CONTRIBUTING.md).
Thank you for being involved! :heart_eyes:
## Setting up development environment
This Python project is fully managed using the [Poetry][poetry] dependency
manager. But also relies on the use of NodeJS for certain checks during
development.
You need at least:
- Python 3.8+
- [Poetry][poetry-install]
- NodeJS 14+ (including NPM)
To install all packages, including all development requirements:
```bash
npm install
poetry install
```
As this repository uses the [pre-commit][pre-commit] framework, all changes
are linted and tested with each commit. You can run all checks and tests
manually, using the following command:
```bash
poetry run pre-commit run --all-files
```
To run just the Python tests:
```bash
poetry run pytest
```
## Authors & contributors
The original setup of this repository is by [Franck Nijhof][frenck].
For a full list of all authors and contributors,
check [the contributor's page][contributors].
## License
MIT License
Copyright (c) 2021 Franck Nijhof
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
[build-shield]: https://github.com/frenck/python-zigbee2mqtt/actions/workflows/tests.yaml/badge.svg
[build]: https://github.com/frenck/python-zigbee2mqtt/actions/workflows/tests.yaml
[code-quality-shield]: https://img.shields.io/lgtm/grade/python/g/frenck/python-zigbee2mqtt.svg?logo=lgtm&logoWidth=18
[code-quality]: https://lgtm.com/projects/g/frenck/python-zigbee2mqtt/context:python
[codecov-shield]: https://codecov.io/gh/frenck/python-zigbee2mqtt/branch/master/graph/badge.svg
[codecov]: https://codecov.io/gh/frenck/python-zigbee2mqtt
[contributors]: https://github.com/frenck/python-zigbee2mqtt/graphs/contributors
[frenck]: https://github.com/frenck
[github-sponsors-shield]: https://frenck.dev/wp-content/uploads/2019/12/github_sponsor.png
[github-sponsors]: https://github.com/sponsors/frenck
[keepchangelog]: http://keepachangelog.com/en/1.0.0/
[license-shield]: https://img.shields.io/github/license/frenck/python-zigbee2mqtt.svg
[maintenance-shield]: https://img.shields.io/maintenance/yes/2021.svg
[patreon-shield]: https://frenck.dev/wp-content/uploads/2019/12/patreon.png
[patreon]: https://www.patreon.com/frenck
[poetry-install]: https://python-poetry.org/docs/#installation
[poetry]: https://python-poetry.org
[pre-commit]: https://pre-commit.com/
[project-stage-shield]: https://img.shields.io/badge/Project%20Stage-Concept-red.svg
[pypi]: https://pypi.org/project/zigbee2mqtt/
[python-versions-shield]: https://img.shields.io/pypi/pyversions/zigbee2mqtt
[releases-shield]: https://img.shields.io/github/release/frenck/python-zigbee2mqtt.svg
[releases]: https://github.com/frenck/python-zigbee2mqtt/releases
[semver]: http://semver.org/spec/v2.0.0.html
| zigbee2mqtt | /zigbee2mqtt-0.0.1.tar.gz/zigbee2mqtt-0.0.1/README.md | README.md |
# Apache License
Version 2.0, January 2004
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
## 1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1
through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the
License.
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled
by, or are under common control with that entity. For the purposes of this definition, "control" means
(i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract
or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial
ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including but not limited to software
source code, documentation source, and configuration files.
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form,
including but not limited to compiled object code, generated documentation, and conversions to other media
types.
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License,
as indicated by a copyright notice that is included in or attached to the work (an example is provided in the
Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from)
the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent,
as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not
include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work
and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version of the Work and any
modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to
Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to
submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of
electronic, verbal, or written communication sent to the Licensor or its representatives, including but not
limited to communication on electronic mailing lists, source code control systems, and issue tracking systems
that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise designated in writing by the copyright
owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been
received by Licensor and subsequently incorporated within the Work.
## 2. Grant of Copyright License.
Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare
Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such
Derivative Works in Source or Object form.
## 3. Grant of Patent License.
Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent
license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such
license applies only to those patent claims licensable by such Contributor that are necessarily infringed by
their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such
Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim
or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work
constitutes direct or contributory patent infringement, then any patent licenses granted to You under this
License for that Work shall terminate as of the date such litigation is filed.
## 4. Redistribution.
You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You meet the following conditions:
1. You must give any other recipients of the Work or Derivative Works a copy of this License; and
2. You must cause any modified files to carry prominent notices stating that You changed the files; and
3. You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent,
trademark, and attribution notices from the Source form of the Work, excluding those notices that do
not pertain to any part of the Derivative Works; and
4. If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that
You distribute must include a readable copy of the attribution notices contained within such NOTICE
file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed as part of the Derivative Works; within
the Source form or documentation, if provided along with the Derivative Works; or, within a display
generated by the Derivative Works, if and wherever such third-party notices normally appear. The
contents of the NOTICE file are for informational purposes only and do not modify the License. You may
add Your own attribution notices within Derivative Works that You distribute, alongside or as an
addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be
construed as modifying the License.
You may add Your own copyright statement to Your modifications and may provide additional or different license
terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative
Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the
conditions stated in this License.
## 5. Submission of Contributions.
Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by
You to the Licensor shall be under the terms and conditions of this License, without any additional terms or
conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate
license agreement you may have executed with Licensor regarding such Contributions.
## 6. Trademarks.
This License does not grant permission to use the trade names, trademarks, service marks, or product names of
the Licensor, except as required for reasonable and customary use in describing the origin of the Work and
reproducing the content of the NOTICE file.
## 7. Disclaimer of Warranty.
Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor
provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT,
MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of
permissions under this License.
## 8. Limitation of Liability.
In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless
required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any
Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential
damages of any character arising as a result of this License or out of the use or inability to use the Work
(including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or
any and all other commercial damages or losses), even if such Contributor has been advised of the possibility
of such damages.
## 9. Accepting Warranty or Additional Liability.
While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for,
acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole
responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold
each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
| ziggeo | /ziggeo-2.29.tar.gz/ziggeo-2.29/LICENSE.md | LICENSE.md |
class ZiggeoVideos:
def __init__(self, application):
self.__application = application
def index(self, data = None):
return self.__application.connect.getJSON('/v1/videos/', data)
def count(self, data = None):
return self.__application.connect.getJSON('/v1/videos/count', data)
def get(self, token_or_key):
return self.__application.connect.getJSON('/v1/videos/' + token_or_key + '')
def get_bulk(self, data = None):
return self.__application.connect.postJSON('/v1/videos/get_bulk', data)
def stats_bulk(self, data = None):
return self.__application.connect.postJSON('/v1/videos/stats_bulk', data)
def download_video(self, token_or_key):
return self.__application.cdn_connect.get('/v1/videos/' + token_or_key + '/video')
def download_image(self, token_or_key):
return self.__application.cdn_connect.get('/v1/videos/' + token_or_key + '/image')
def get_stats(self, token_or_key):
return self.__application.connect.getJSON('/v1/videos/' + token_or_key + '/stats')
def push_to_service(self, token_or_key, data = None):
return self.__application.connect.postJSON('/v1/videos/' + token_or_key + '/push', data)
def apply_effect(self, token_or_key, data = None):
return self.__application.connect.postJSON('/v1/videos/' + token_or_key + '/effect', data)
def apply_meta(self, token_or_key, data = None):
return self.__application.connect.postJSON('/v1/videos/' + token_or_key + '/metaprofile', data)
def update(self, token_or_key, data = None):
return self.__application.connect.postJSON('/v1/videos/' + token_or_key + '', data)
def update_bulk(self, data = None):
return self.__application.connect.postJSON('/v1/videos/update_bulk', data)
def delete(self, token_or_key):
return self.__application.connect.delete('/v1/videos/' + token_or_key + '')
def create(self, data = None, file = None):
if (file != None):
result = self.__application.connect.postUploadJSON('/v1/videos-upload-url', 'video', data, file, 'video_type')
result['default_stream'] = self.__application.connect.postJSON('/v1/videos/' + result['token'] + '/streams/' + result['default_stream']['token'] + '/confirm-video')
return result
else:
return self.__application.connect.postJSON('/v1/videos/', data, file)
def analytics(self, token_or_key, data = None):
return self.__application.connect.postJSON('/v1/videos/' + token_or_key + '/analytics', data) | ziggeo | /ziggeo-2.29.tar.gz/ziggeo-2.29/ZiggeoVideos.py | ZiggeoVideos.py |
class ZiggeoEffectProfileProcess:
def __init__(self, application):
self.__application = application
def index(self, effect_token_or_key, data = None):
return self.__application.connect.getJSON('/v1/effects/' + effect_token_or_key + '/process', data)
def get(self, effect_token_or_key, token_or_key):
return self.__application.connect.getJSON('/v1/effects/' + effect_token_or_key + '/process/' + token_or_key + '')
def delete(self, effect_token_or_key, token_or_key):
return self.__application.connect.delete('/v1/effects/' + effect_token_or_key + '/process/' + token_or_key + '')
def create_filter_process(self, effect_token_or_key, data = None):
return self.__application.connect.postJSON('/v1/effects/' + effect_token_or_key + '/process/filter', data)
def create_watermark_process(self, effect_token_or_key, data = None, file = None):
if (file != None):
result = self.__application.connect.postUploadJSON('/v1/effects/' + effect_token_or_key + '/process/watermark-upload-url', 'effect_process', data, file)
result = self.__application.connect.postJSON('/v1/effects/' + effect_token_or_key + '/process/' + result['token'] + '/confirm-watermark')
return result
else:
return self.__application.connect.postJSON('/v1/effects/' + effect_token_or_key + '/process/watermark', data, file)
def edit_watermark_process(self, effect_token_or_key, token_or_key, data = None, file = None):
if (file != None):
result = self.__application.connect.postUploadJSON('/v1/effects/' + effect_token_or_key + '/process/' + token_or_key + '/watermark-upload-url', 'effect_process', data, file)
result = self.__application.connect.postJSON('/v1/effects/' + effect_token_or_key + '/process/' + token_or_key + '/confirm-watermark')
return result
else:
return self.__application.connect.postJSON('/v1/effects/' + effect_token_or_key + '/process/watermark/' + token_or_key + '', data, file) | ziggeo | /ziggeo-2.29.tar.gz/ziggeo-2.29/ZiggeoEffectProfileProcess.py | ZiggeoEffectProfileProcess.py |
# Ziggeo's Python Server SDK
latest version: **2.29**
## Index
1. [Why Ziggeo's Python Server Side SDK?](#why-us)
2. [Prerequisites](#prerequisites)
1. [Download](#download)
2. [How to use](#how-to-use)
3. [Installation](#install)
3. [Client-Side Integration](#codes-client-side)
4. [Server-Side Integration](#codes-server-side)
1. [Init](#codes-init)
2. [Available Methods](#codes-methods)
3. [Methods for Videos](#method-videos)
1. [Videos Index](#method-videos-index)
2. [Videos Count](#method-videos-count)
3. [Videos Get](#method-videos-get)
4. [Videos Get Bulk](#method-videos-get-bulk)
5. [Videos Stats Bulk](#method-videos-stats-bulk)
6. [Videos Download Video](#method-videos-download-video)
7. [Videos Download Image](#method-videos-download-image)
8. [Videos Get Stats](#method-videos-get-stats)
9. [Videos Push To Service](#method-videos-push-to-service)
10. [Videos Apply Effect](#method-videos-apply-effect)
11. [Videos Apply Meta](#method-videos-apply-meta)
12. [Videos Update](#method-videos-update)
13. [Videos Update Bulk](#method-videos-update-bulk)
14. [Videos Delete](#method-videos-delete)
15. [Videos Create](#method-videos-create)
16. [Videos Analytics](#method-videos-analytics)
4. [Methods for Streams](#method-streams)
1. [Streams Index](#method-streams-index)
2. [Streams Get](#method-streams-get)
3. [Streams Download Video](#method-streams-download-video)
4. [Streams Download Image](#method-streams-download-image)
5. [Streams Push To Service](#method-streams-push-to-service)
6. [Streams Delete](#method-streams-delete)
7. [Streams Create](#method-streams-create)
8. [Streams Attach Image](#method-streams-attach-image)
9. [Streams Attach Video](#method-streams-attach-video)
10. [Streams Attach Subtitle](#method-streams-attach-subtitle)
5. [Methods for Audios](#method-audios)
1. [Audios Index](#method-audios-index)
2. [Audios Count](#method-audios-count)
3. [Audios Get](#method-audios-get)
4. [Audios Get Bulk](#method-audios-get-bulk)
5. [Audios Download Audio](#method-audios-download-audio)
6. [Audios Update](#method-audios-update)
7. [Audios Update Bulk](#method-audios-update-bulk)
8. [Audios Delete](#method-audios-delete)
9. [Audios Create](#method-audios-create)
6. [Methods for Audio_streams](#method-audio-streams)
1. [Audio_streams Index](#method-audio-streams-index)
2. [Audio_streams Get](#method-audio-streams-get)
3. [Audio_streams Download Audio](#method-audio-streams-download-audio)
4. [Audio_streams Delete](#method-audio-streams-delete)
5. [Audio_streams Create](#method-audio-streams-create)
7. [Methods for Authtokens](#method-authtokens)
1. [Authtokens Get](#method-authtokens-get)
2. [Authtokens Update](#method-authtokens-update)
3. [Authtokens Delete](#method-authtokens-delete)
4. [Authtokens Create](#method-authtokens-create)
8. [Methods for Application](#method-application)
1. [Application Get](#method-application-get)
2. [Application Update](#method-application-update)
3. [Application Get Stats](#method-application-get-stats)
9. [Methods for Effect Profiles](#method-effect-profiles)
1. [Effect Profiles Create](#method-effect-profiles-create)
2. [Effect Profiles Index](#method-effect-profiles-index)
3. [Effect Profiles Get](#method-effect-profiles-get)
4. [Effect Profiles Delete](#method-effect-profiles-delete)
5. [Effect Profiles Update](#method-effect-profiles-update)
10. [Methods for Effect Profile Process](#method-effect-profile-process)
1. [Effect Profile Process Index](#method-effect-profile-process-index)
2. [Effect Profile Process Get](#method-effect-profile-process-get)
3. [Effect Profile Process Delete](#method-effect-profile-process-delete)
4. [Effect Profile Process Create Filter Process](#method-effect-profile-process-create-filter-process)
5. [Effect Profile Process Create Watermark Process](#method-effect-profile-process-create-watermark-process)
6. [Effect Profile Process Edit Watermark Process](#method-effect-profile-process-edit-watermark-process)
11. [Methods for Meta Profiles](#method-meta-profiles)
1. [Meta Profiles Create](#method-meta-profiles-create)
2. [Meta Profiles Index](#method-meta-profiles-index)
3. [Meta Profiles Get](#method-meta-profiles-get)
4. [Meta Profiles Delete](#method-meta-profiles-delete)
12. [Methods for Meta Profile Process](#method-meta-profile-process)
1. [Meta Profile Process Index](#method-meta-profile-process-index)
2. [Meta Profile Process Get](#method-meta-profile-process-get)
3. [Meta Profile Process Delete](#method-meta-profile-process-delete)
4. [Meta Profile Process Create Video Analysis Process](#method-meta-profile-process-create-video-analysis-process)
5. [Meta Profile Process Create Audio Transcription Process](#method-meta-profile-process-create-audio-transcription-process)
6. [Meta Profile Process Create Nsfw Process](#method-meta-profile-process-create-nsfw-process)
7. [Meta Profile Process Create Profanity Process](#method-meta-profile-process-create-profanity-process)
13. [Methods for Webhooks](#method-webhooks)
1. [Webhooks Create](#method-webhooks-create)
2. [Webhooks Confirm](#method-webhooks-confirm)
3. [Webhooks Delete](#method-webhooks-delete)
14. [Methods for Analytics](#method-analytics)
1. [Analytics Get](#method-analytics-get)
5. [License](#license)
## Why Ziggeo's Python Server Side SDK? <a name="why-us"></a>
[Ziggeo](https://ziggeo.com) is a powerfull, whitelabel video SAAS with a goal to help people with their video revolution. And what better way to do it than with an award winning multimedia API.
This server side SDK is designed to help you ease the communication with Ziggeo API. In that it allows you to privately communicate between your server and our server through requests of what you want to happen.
It offers you pre-built functionality to call and manipulate and there are demos in /demos/ directory for you to check out and use as starting point.
### Who it is for?
1. Do you have a system that requires calls to be made which should not be seen on client side?
2. Want to have an easier time handling the media as it comes to your server?
3. Want something that is simple and easy to use?
4. You need some powerful features high end video services provide?
If any of the above is "Yes" then you are in the right place as this SDK is for you!
## Prerequisites <a name="prerequisites"></a>
### Download <a name="download"></a>
You will want to either download the SDK zip file or to pull it in as git repository into your own project.
To clone it you would go into your project folder and then
```python git clone https://github.com/Ziggeo/ZiggeoPythonSdk```
### How to use <a name="how-to-use"></a>
To start using the Python SDK you would need to initialize the Ziggeo class with application token, private token and possibly encryption token. The token and keys can be found within the Ziggeo application once you log into your account, under Overview page.
### Installation<a name="install"></a>
pip install ziggeo
## Client-Side Integration<a name="codes-client-side"></a>
For the client-side integration, you need to add these assets to your html file:
```html
<link rel="stylesheet" href="//assets-cdn.ziggeo.com/v2-stable/ziggeo.css" />
<script src="//assets-cdn.ziggeo.com/v2-stable/ziggeo.js"></script>
```
Then, you need to specify your api token:
```html
<script>
var ziggeoApplication = new ZiggeoApi.V2.Application({
token: "APPLICATION_TOKEN",
webrtc_streaming_if_necessary: true,
webrtc_on_mobile: true
});
</script>
```
You can specify other global options, [see here](https://ziggeo.com/docs).
To fire up a recorder on your page, add:
```html
<ziggeorecorder></ziggeorecorder>
```
To embed a player for an existing video, add:
```html
<ziggeoplayer ziggeo-video='video-token'></ziggeoplayer>
```
For the full documentation, please visit [ziggeo.com](https://ziggeo.com/docs).
## Server-Side Integration<a name="codes-server-side"></a>
### Initialize Ziggeo class in your code<a name="codes-init"></a>
You can integrate the Server SDK as follows:
```python
ziggeo = Ziggeo("*token*", "*private_key*", "*encryption_key*")
```
Config is optional and if not specified (recommended), the Config file will be used instead.
### Available Methods<a name="codes-methods"></a>
Currently available methods are branched off within different categories:
1. Videos
2. Streams
3. Audios
4. Audio_streams
5. Authtokens
6. Application
7. Effect Profiles
8. Effect Profile Process
9. Meta Profiles
10. Meta Profile Process
11. Webhooks
12. Analytics
Each of this sections has their own actions and they are explained bellow
### Videos<a name="method-videos"></a>
The videos resource allows you to access all single videos. Each video may contain more than one stream.
#### Index<a name="method-videos-index"></a>
Query an array of videos (will return at most 50 videos by default). Newest videos come first.
```python
ziggeo.videos().index(arguments = None)
```
Arguments
- limit: *Limit the number of returned videos. Can be set up to 100.*
- skip: *Skip the first [n] entries.*
- reverse: *Reverse the order in which videos are returned.*
- states: *Filter videos by state*
- tags: *Filter the search result to certain tags, encoded as a comma-separated string*
#### Count<a name="method-videos-count"></a>
Get the video count for the application.
```python
ziggeo.videos().count(arguments = None)
```
Arguments
- states: *Filter videos by state*
- tags: *Filter the search result to certain tags, encoded as a comma-separated string*
#### Get<a name="method-videos-get"></a>
Get a single video by token or key.
```python
ziggeo.videos().get(token_or_key)
```
#### Get Bulk<a name="method-videos-get-bulk"></a>
Get multiple videos by tokens or keys.
```python
ziggeo.videos().get_bulk(arguments = None)
```
Arguments
- tokens_or_keys: *Comma-separated list with the desired videos tokens or keys (Limit: 100 tokens or keys).*
#### Stats Bulk<a name="method-videos-stats-bulk"></a>
Get stats for multiple videos by tokens or keys.
```python
ziggeo.videos().stats_bulk(arguments = None)
```
Arguments
- tokens_or_keys: *Comma-separated list with the desired videos tokens or keys (Limit: 100 tokens or keys).*
- summarize: *Boolean. Set it to TRUE to get the stats summarized. Set it to FALSE to get the stats for each video in a separate array. Default: TRUE.*
#### Download Video<a name="method-videos-download-video"></a>
Download the video data file
```python
ziggeo.videos().download_video(token_or_key)
```
#### Download Image<a name="method-videos-download-image"></a>
Download the image data file
```python
ziggeo.videos().download_image(token_or_key)
```
#### Get Stats<a name="method-videos-get-stats"></a>
Get the video's stats
```python
ziggeo.videos().get_stats(token_or_key)
```
#### Push To Service<a name="method-videos-push-to-service"></a>
Push a video to a provided push service.
```python
ziggeo.videos().push_to_service(token_or_key, arguments = None)
```
Arguments
- pushservicetoken: *Push Services's token (from the Push Services configured for the app)*
#### Apply Effect<a name="method-videos-apply-effect"></a>
Apply an effect profile to a video.
```python
ziggeo.videos().apply_effect(token_or_key, arguments = None)
```
Arguments
- effectprofiletoken: *Effect Profile token (from the Effect Profiles configured for the app)*
#### Apply Meta<a name="method-videos-apply-meta"></a>
Apply a meta profile to a video.
```python
ziggeo.videos().apply_meta(token_or_key, arguments = None)
```
Arguments
- metaprofiletoken: *Meta Profile token (from the Meta Profiles configured for the app)*
#### Update<a name="method-videos-update"></a>
Update single video by token or key.
```python
ziggeo.videos().update(token_or_key, arguments = None)
```
Arguments
- min_duration: *Minimal duration of video*
- max_duration: *Maximal duration of video*
- tags: *Video Tags*
- key: *Unique (optional) name of video*
- volatile: *Automatically removed this video if it remains empty*
- expiration_days: *After how many days will this video be deleted*
- expire_on: *On which date will this video be deleted. String in ISO 8601 format: YYYY-MM-DD*
#### Update Bulk<a name="method-videos-update-bulk"></a>
Update multiple videos by token or key.
```python
ziggeo.videos().update_bulk(arguments = None)
```
Arguments
- tokens_or_keys: *Comma-separated list with the desired videos tokens or keys (Limit: 100 tokens or keys).*
- min_duration: *Minimal duration of video*
- max_duration: *Maximal duration of video*
- tags: *Video Tags*
- volatile: *Automatically removed this video if it remains empty*
- expiration_days: *After how many days will this video be deleted*
- expire_on: *On which date will this video be deleted. String in ISO 8601 format: YYYY-MM-DD*
#### Delete<a name="method-videos-delete"></a>
Delete a single video by token or key.
```python
ziggeo.videos().delete(token_or_key)
```
#### Create<a name="method-videos-create"></a>
Create a new video.
```python
ziggeo.videos().create(arguments = None, file = None)
```
Arguments
- file: *Video file to be uploaded*
- min_duration: *Minimal duration of video*
- max_duration: *Maximal duration of video*
- tags: *Video Tags*
- key: *Unique (optional) name of video*
- volatile: *Automatically removed this video if it remains empty*
- effect_profile: *Set the effect profile that you want to have applied to your video.*
- meta_profile: *Set the meta profile that you want to have applied to your video once created.*
- video_profile: *Set the video profile that you want to have applied to your video as you create it.*
#### Analytics<a name="method-videos-analytics"></a>
Get analytics for a specific videos with the given params
```python
ziggeo.videos().analytics(token_or_key, arguments = None)
```
Arguments
- from: *A UNIX timestamp in microseconds used as the start date of the query*
- to: *A UNIX timestamp in microseconds used as the end date of the query*
- date: *A UNIX timestamp in microseconds to retrieve data from a single date. If set, it overwrites the from and to params.*
- query: *The query you want to run. It can be one of the following: device_views_by_os, device_views_by_date, total_plays_by_country, full_plays_by_country, total_plays_by_hour, full_plays_by_hour, total_plays_by_browser, full_plays_by_browser*
### Streams<a name="method-streams"></a>
The streams resource allows you to directly access all streams associated with a single video.
#### Index<a name="method-streams-index"></a>
Return all streams associated with a video
```python
ziggeo.streams().index(video_token_or_key, arguments = None)
```
Arguments
- states: *Filter streams by state*
#### Get<a name="method-streams-get"></a>
Get a single stream
```python
ziggeo.streams().get(video_token_or_key, token_or_key)
```
#### Download Video<a name="method-streams-download-video"></a>
Download the video data associated with the stream
```python
ziggeo.streams().download_video(video_token_or_key, token_or_key)
```
#### Download Image<a name="method-streams-download-image"></a>
Download the image data associated with the stream
```python
ziggeo.streams().download_image(video_token_or_key, token_or_key)
```
#### Push To Service<a name="method-streams-push-to-service"></a>
Push a stream to a provided push service.
```python
ziggeo.streams().push_to_service(video_token_or_key, token_or_key, arguments = None)
```
Arguments
- pushservicetoken: *Push Services's token (from the Push Services configured for the app)*
#### Delete<a name="method-streams-delete"></a>
Delete the stream
```python
ziggeo.streams().delete(video_token_or_key, token_or_key)
```
#### Create<a name="method-streams-create"></a>
Create a new stream
```python
ziggeo.streams().create(video_token_or_key, arguments = None, file = None)
```
Arguments
- file: *Video file to be uploaded*
#### Attach Image<a name="method-streams-attach-image"></a>
Attaches an image to a new stream. Must be attached before video, since video upload triggers the transcoding job and binds the stream
```python
ziggeo.streams().attach_image(video_token_or_key, token_or_key, arguments = None, file = None)
```
Arguments
- file: *Image file to be attached*
#### Attach Video<a name="method-streams-attach-video"></a>
Attaches a video to a new stream
```python
ziggeo.streams().attach_video(video_token_or_key, token_or_key, arguments = None, file = None)
```
Arguments
- file: *Video file to be attached*
#### Attach Subtitle<a name="method-streams-attach-subtitle"></a>
Attaches a subtitle to the stream.
```python
ziggeo.streams().attach_subtitle(video_token_or_key, token_or_key, arguments = None)
```
Arguments
- lang: *Subtitle language*
- label: *Subtitle reference*
- data: *Actual subtitle*
### Audios<a name="method-audios"></a>
The audios resource allows you to access all single audios. Each video may contain more than one stream.
#### Index<a name="method-audios-index"></a>
Query an array of audios (will return at most 50 audios by default). Newest audios come first.
```python
ziggeo.audios().index(arguments = None)
```
Arguments
- limit: *Limit the number of returned audios. Can be set up to 100.*
- skip: *Skip the first [n] entries.*
- reverse: *Reverse the order in which audios are returned.*
- states: *Filter audios by state*
- tags: *Filter the search result to certain tags, encoded as a comma-separated string*
#### Count<a name="method-audios-count"></a>
Get the audio count for the application.
```python
ziggeo.audios().count(arguments = None)
```
Arguments
- states: *Filter audios by state*
- tags: *Filter the search result to certain tags, encoded as a comma-separated string*
#### Get<a name="method-audios-get"></a>
Get a single audio by token or key.
```python
ziggeo.audios().get(token_or_key)
```
#### Get Bulk<a name="method-audios-get-bulk"></a>
Get multiple audios by tokens or keys.
```python
ziggeo.audios().get_bulk(arguments = None)
```
Arguments
- tokens_or_keys: *Comma-separated list with the desired audios tokens or keys (Limit: 100 tokens or keys).*
#### Download Audio<a name="method-audios-download-audio"></a>
Download the audio data file
```python
ziggeo.audios().download_audio(token_or_key)
```
#### Update<a name="method-audios-update"></a>
Update single audio by token or key.
```python
ziggeo.audios().update(token_or_key, arguments = None)
```
Arguments
- min_duration: *Minimal duration of audio*
- max_duration: *Maximal duration of audio*
- tags: *Audio Tags*
- key: *Unique (optional) name of audio*
- volatile: *Automatically removed this audio if it remains empty*
- expiration_days: *After how many days will this audio be deleted*
- expire_on: *On which date will this audio be deleted. String in ISO 8601 format: YYYY-MM-DD*
#### Update Bulk<a name="method-audios-update-bulk"></a>
Update multiple audios by token or key.
```python
ziggeo.audios().update_bulk(arguments = None)
```
Arguments
- tokens_or_keys: *Comma-separated list with the desired audios tokens or keys (Limit: 100 tokens or keys).*
- min_duration: *Minimal duration of audio*
- max_duration: *Maximal duration of audio*
- tags: *Audio Tags*
- volatile: *Automatically removed this audio if it remains empty*
- expiration_days: *After how many days will this audio be deleted*
- expire_on: *On which date will this audio be deleted. String in ISO 8601 format: YYYY-MM-DD*
#### Delete<a name="method-audios-delete"></a>
Delete a single audio by token or key.
```python
ziggeo.audios().delete(token_or_key)
```
#### Create<a name="method-audios-create"></a>
Create a new audio.
```python
ziggeo.audios().create(arguments = None, file = None)
```
Arguments
- file: *Audio file to be uploaded*
- min_duration: *Minimal duration of audio*
- max_duration: *Maximal duration of audio*
- tags: *Audio Tags*
- key: *Unique (optional) name of audio*
- volatile: *Automatically removed this video if it remains empty*
### Audio_streams<a name="method-audio-streams"></a>
The streams resource allows you to directly access all streams associated with a single audio.
#### Index<a name="method-audio-streams-index"></a>
Return all streams associated with a audio
```python
ziggeo.audio_streams().index(audio_token_or_key, arguments = None)
```
Arguments
- states: *Filter streams by state*
#### Get<a name="method-audio-streams-get"></a>
Get a single stream
```python
ziggeo.audio_streams().get(audio_token_or_key, token_or_key)
```
#### Download Audio<a name="method-audio-streams-download-audio"></a>
Download the audio data associated with the stream
```python
ziggeo.audio_streams().download_audio(audio_token_or_key, token_or_key)
```
#### Delete<a name="method-audio-streams-delete"></a>
Delete the stream
```python
ziggeo.audio_streams().delete(audio_token_or_key, token_or_key)
```
#### Create<a name="method-audio-streams-create"></a>
Create a new stream
```python
ziggeo.audio_streams().create(audio_token_or_key, arguments = None, file = None)
```
Arguments
- file: *Audio file to be uploaded*
### Authtokens<a name="method-authtokens"></a>
The auth token resource allows you to manage authorization settings for video objects.
#### Get<a name="method-authtokens-get"></a>
Get a single auth token by token.
```python
ziggeo.authtokens().get(token)
```
#### Update<a name="method-authtokens-update"></a>
Update single auth token by token.
```python
ziggeo.authtokens().update(token_or_key, arguments = None)
```
Arguments
- volatile: *Will this object automatically be deleted if it remains empty?*
- hidden: *If hidden, the token cannot be used directly.*
- expiration_date: *Expiration date for the auth token (Unix epoch time format)*
- usage_expiration_time: *Expiration time per session (seconds)*
- session_limit: *Maximal number of sessions*
- grants: *Permissions this tokens grants*
#### Delete<a name="method-authtokens-delete"></a>
Delete a single auth token by token.
```python
ziggeo.authtokens().delete(token_or_key)
```
#### Create<a name="method-authtokens-create"></a>
Create a new auth token.
```python
ziggeo.authtokens().create(arguments = None)
```
Arguments
- volatile: *Will this object automatically be deleted if it remains empty?*
- hidden: *If hidden, the token cannot be used directly.*
- expiration_date: *Expiration date for the auth token (Unix epoch time format)*
- usage_expiration_time: *Expiration time per session (seconds)*
- session_limit: *Maximal number of sessions*
- grants: *Permissions this tokens grants*
### Application<a name="method-application"></a>
The application token resource allows you to manage your application.
#### Get<a name="method-application-get"></a>
Read application.
```python
ziggeo.application().get()
```
#### Update<a name="method-application-update"></a>
Update application.
```python
ziggeo.application().update(arguments = None)
```
Arguments
- volatile: *Will this object automatically be deleted if it remains empty?*
- name: *Name of the application*
- auth_token_required_for_create: *Require auth token for creating videos*
- auth_token_required_for_update: *Require auth token for updating videos*
- auth_token_required_for_read: *Require auth token for reading videos*
- auth_token_required_for_destroy: *Require auth token for deleting videos*
- client_can_index_videos: *Client is allowed to perform the index operation*
- client_cannot_access_unaccepted_videos: *Client cannot view unaccepted videos*
- enable_video_subpages: *Enable hosted video pages*
#### Get Stats<a name="method-application-get-stats"></a>
Read application stats
```python
ziggeo.application().get_stats(arguments = None)
```
Arguments
- period: *Optional. Can be 'year' or 'month'.*
### Effect Profiles<a name="method-effect-profiles"></a>
The effect profiles resource allows you to access and create effect profiles for your app. Each effect profile may contain one process or more.
#### Create<a name="method-effect-profiles-create"></a>
Create a new effect profile.
```python
ziggeo.effectProfiles().create(arguments = None)
```
Arguments
- key: *Effect profile key.*
- title: *Effect profile title.*
- default_effect: *Boolean. If TRUE, sets an effect profile as default. If FALSE, removes the default status for the given effect*
#### Index<a name="method-effect-profiles-index"></a>
Get list of effect profiles.
```python
ziggeo.effectProfiles().index(arguments = None)
```
Arguments
- limit: *Limit the number of returned effect profiles. Can be set up to 100.*
- skip: *Skip the first [n] entries.*
- reverse: *Reverse the order in which effect profiles are returned.*
#### Get<a name="method-effect-profiles-get"></a>
Get a single effect profile
```python
ziggeo.effectProfiles().get(token_or_key)
```
#### Delete<a name="method-effect-profiles-delete"></a>
Delete the effect profile
```python
ziggeo.effectProfiles().delete(token_or_key)
```
#### Update<a name="method-effect-profiles-update"></a>
Updates an effect profile.
```python
ziggeo.effectProfiles().update(token_or_key, arguments = None)
```
Arguments
- default_effect: *Boolean. If TRUE, sets an effect profile as default. If FALSE, removes the default status for the given effect*
### Effect Profile Process<a name="method-effect-profile-process"></a>
The process resource allows you to directly access all process associated with a single effect profile.
#### Index<a name="method-effect-profile-process-index"></a>
Return all processes associated with a effect profile
```python
ziggeo.effectProfileProcess().index(effect_token_or_key, arguments = None)
```
Arguments
- states: *Filter streams by state*
#### Get<a name="method-effect-profile-process-get"></a>
Get a single process
```python
ziggeo.effectProfileProcess().get(effect_token_or_key, token_or_key)
```
#### Delete<a name="method-effect-profile-process-delete"></a>
Delete the process
```python
ziggeo.effectProfileProcess().delete(effect_token_or_key, token_or_key)
```
#### Create Filter Process<a name="method-effect-profile-process-create-filter-process"></a>
Create a new filter effect process
```python
ziggeo.effectProfileProcess().create_filter_process(effect_token_or_key, arguments = None)
```
Arguments
- effect: *Effect to be applied in the process*
#### Create Watermark Process<a name="method-effect-profile-process-create-watermark-process"></a>
Attaches an image to a new stream
```python
ziggeo.effectProfileProcess().create_watermark_process(effect_token_or_key, arguments = None, file = None)
```
Arguments
- file: *Image file to be attached*
- vertical_position: *Specify the vertical position of your watermark (a value between 0.0 and 1.0)*
- horizontal_position: *Specify the horizontal position of your watermark (a value between 0.0 and 1.0)*
- video_scale: *Specify the image scale of your watermark (a value between 0.0 and 1.0)*
#### Edit Watermark Process<a name="method-effect-profile-process-edit-watermark-process"></a>
Edits an existing watermark process.
```python
ziggeo.effectProfileProcess().edit_watermark_process(effect_token_or_key, token_or_key, arguments = None, file = None)
```
Arguments
- file: *Image file to be attached*
- vertical_position: *Specify the vertical position of your watermark (a value between 0.0 and 1.0)*
- horizontal_position: *Specify the horizontal position of your watermark (a value between 0.0 and 1.0)*
- video_scale: *Specify the image scale of your watermark (a value between 0.0 and 1.0)*
### Meta Profiles<a name="method-meta-profiles"></a>
The meta profiles resource allows you to access and create meta profiles for your app. Each meta profile may contain one process or more.
#### Create<a name="method-meta-profiles-create"></a>
Create a new meta profile.
```python
ziggeo.metaProfiles().create(arguments = None)
```
Arguments
- key: *Meta Profile profile key.*
- title: *Meta Profile profile title.*
#### Index<a name="method-meta-profiles-index"></a>
Get list of meta profiles.
```python
ziggeo.metaProfiles().index(arguments = None)
```
Arguments
- limit: *Limit the number of returned meta profiles. Can be set up to 100.*
- skip: *Skip the first [n] entries.*
- reverse: *Reverse the order in which meta profiles are returned.*
#### Get<a name="method-meta-profiles-get"></a>
Get a single meta profile
```python
ziggeo.metaProfiles().get(token_or_key)
```
#### Delete<a name="method-meta-profiles-delete"></a>
Delete the meta profile
```python
ziggeo.metaProfiles().delete(token_or_key)
```
### Meta Profile Process<a name="method-meta-profile-process"></a>
The process resource allows you to directly access all process associated with a single meta profile.
#### Index<a name="method-meta-profile-process-index"></a>
Return all processes associated with a meta profile
```python
ziggeo.metaProfileProcess().index(meta_token_or_key)
```
#### Get<a name="method-meta-profile-process-get"></a>
Get a single process
```python
ziggeo.metaProfileProcess().get(meta_token_or_key, token_or_key)
```
#### Delete<a name="method-meta-profile-process-delete"></a>
Delete the process
```python
ziggeo.metaProfileProcess().delete(meta_token_or_key, token_or_key)
```
#### Create Video Analysis Process<a name="method-meta-profile-process-create-video-analysis-process"></a>
Create a new video analysis meta process
```python
ziggeo.metaProfileProcess().create_video_analysis_process(meta_token_or_key)
```
#### Create Audio Transcription Process<a name="method-meta-profile-process-create-audio-transcription-process"></a>
Create a new audio transcription meta process
```python
ziggeo.metaProfileProcess().create_audio_transcription_process(meta_token_or_key)
```
#### Create Nsfw Process<a name="method-meta-profile-process-create-nsfw-process"></a>
Create a new nsfw filter meta process
```python
ziggeo.metaProfileProcess().create_nsfw_process(meta_token_or_key, arguments = None)
```
Arguments
- nsfw_action: *One of the following three: approve, reject, nothing.*
#### Create Profanity Process<a name="method-meta-profile-process-create-profanity-process"></a>
Create a new profanity filter meta process
```python
ziggeo.metaProfileProcess().create_profanity_process(meta_token_or_key, arguments = None)
```
Arguments
- profanity_action: *One of the following three: approve, reject, nothing.*
### Webhooks<a name="method-webhooks"></a>
The webhooks resource allows you to create or delete webhooks related to a given application.
#### Create<a name="method-webhooks-create"></a>
Create a new webhook for the given url to catch the given events.
```python
ziggeo.webhooks().create(arguments = None)
```
Arguments
- target_url: *The url that will catch the events*
- encoding: *Data encoding to be used by the webhook to send the events.*
- events: *Comma-separated list of the events the webhook will catch. They must be valid webhook type events.*
#### Confirm<a name="method-webhooks-confirm"></a>
Confirm a webhook using its ID and the corresponding validation code.
```python
ziggeo.webhooks().confirm(arguments = None)
```
Arguments
- webhook_id: *Webhook ID that's returned in the creation call.*
- validation_code: *Validation code that is sent to the webhook when created.*
#### Delete<a name="method-webhooks-delete"></a>
Delete a webhook using its URL.
```python
ziggeo.webhooks().delete(arguments = None)
```
Arguments
- target_url: *The url that will catch the events*
### Analytics<a name="method-analytics"></a>
The analytics resource allows you to access the analytics for the given application
#### Get<a name="method-analytics-get"></a>
Get analytics for the given params
```python
ziggeo.analytics().get(arguments = None)
```
Arguments
- from: *A UNIX timestamp in microseconds used as the start date of the query*
- to: *A UNIX timestamp in microseconds used as the end date of the query*
- date: *A UNIX timestamp in microseconds to retrieve data from a single date. If set, it overwrites the from and to params.*
- query: *The query you want to run. It can be one of the following: device_views_by_os, device_views_by_date, total_plays_by_country, full_plays_by_country, total_plays_by_hour, full_plays_by_hour, total_plays_by_browser, full_plays_by_browser*
## License <a name="license"></a>
Copyright (c) 2013-2022 Ziggeo
Apache 2.0 License
| ziggeo | /ziggeo-2.29.tar.gz/ziggeo-2.29/README.md | README.md |
class ZiggeoStreams:
def __init__(self, application):
self.__application = application
def index(self, video_token_or_key, data = None):
return self.__application.connect.getJSON('/v1/videos/' + video_token_or_key + '/streams', data)
def get(self, video_token_or_key, token_or_key):
return self.__application.connect.getJSON('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '')
def download_video(self, video_token_or_key, token_or_key):
return self.__application.cdn_connect.get('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '/video')
def download_image(self, video_token_or_key, token_or_key):
return self.__application.cdn_connect.get('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '/image')
def push_to_service(self, video_token_or_key, token_or_key, data = None):
return self.__application.connect.postJSON('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '/push', data)
def delete(self, video_token_or_key, token_or_key):
return self.__application.connect.delete('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '')
def create(self, video_token_or_key, data = None, file = None):
if (file != None):
result = self.__application.connect.postUploadJSON('/v1/videos/' + video_token_or_key + '/streams-upload-url', 'stream', data, file, 'video_type')
result = self.__application.connect.postJSON('/v1/videos/' + video_token_or_key + '/streams/' + result['token'] + '/confirm-video')
return result
else:
return self.__application.connect.postJSON('/v1/videos/' + video_token_or_key + '/streams', data, file)
def attach_image(self, video_token_or_key, token_or_key, data = None, file = None):
if (file != None):
result = self.__application.connect.postUploadJSON('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '/image-upload-url', 'stream', data, file)
result = self.__application.connect.postJSON('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '/confirm-image')
return result
else:
return self.__application.connect.postJSON('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '/image', data, file)
def attach_video(self, video_token_or_key, token_or_key, data = None, file = None):
if (file != None):
result = self.__application.connect.postUploadJSON('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '/video-upload-url', 'stream', data, file, 'video_type')
result = self.__application.connect.postJSON('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '/confirm-video')
return result
else:
return self.__application.connect.postJSON('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '/video', data, file)
def attach_subtitle(self, video_token_or_key, token_or_key, data = None):
return self.__application.connect.postJSON('/v1/videos/' + video_token_or_key + '/streams/' + token_or_key + '/subtitle', data) | ziggeo | /ziggeo-2.29.tar.gz/ziggeo-2.29/ZiggeoStreams.py | ZiggeoStreams.py |
import codecs
import mimetypes
import sys
import uuid
try:
import io
except ImportError:
pass # io is requiered in python3 but not available in python2
class MultiPartForm(object):
def __init__(self):
self.boundary = uuid.uuid4().hex
self.content_type = 'multipart/form-data; boundary={}'.format(self.boundary)
@classmethod
def u(cls, s):
if sys.hexversion < 0x03000000 and isinstance(s, str):
s = s.decode('utf-8')
if sys.hexversion >= 0x03000000 and isinstance(s, bytes):
s = s.decode('utf-8')
return s
def iter(self, fields, files):
"""
fields is a sequence of (name, value) elements for regular form fields.
files is a sequence of (name, filename, file-type) elements for data to be uploaded as files
Yield body's chunk as bytes
"""
encoder = codecs.getencoder('utf-8')
for (key, value) in fields.items():
key = self.u(key)
yield encoder('--{}\r\n'.format(self.boundary))
yield encoder(self.u('Content-Disposition: form-data; name="{}"\r\n').format(key))
yield encoder('\r\n')
if isinstance(value, int) or isinstance(value, float):
value = str(value)
yield encoder(self.u(value))
yield encoder('\r\n')
for (key, filename, fd) in files:
key = self.u(key)
filename = self.u(filename)
yield encoder('--{}\r\n'.format(self.boundary))
yield encoder(self.u('Content-Disposition: form-data; name="{}"; filename="{}"\r\n').format(key, filename))
yield encoder('Content-Type: {}\r\n'.format(mimetypes.guess_type(filename)[0] or 'application/octet-stream'))
yield encoder('\r\n')
with fd:
buff = fd.read()
yield (buff, len(buff))
yield encoder('\r\n')
yield encoder('--{}--\r\n'.format(self.boundary))
def encode(self, fields, files):
body = io.BytesIO()
for chunk, chunk_len in self.iter(fields, files):
body.write(chunk)
return self.content_type, body.getvalue() | ziggeo | /ziggeo-2.29.tar.gz/ziggeo-2.29/MultiPartForm.py | MultiPartForm.py |
from ZiggeoConfig import ZiggeoConfig
from ZiggeoConnect import ZiggeoConnect
from ZiggeoAuth import ZiggeoAuth
from ZiggeoVideos import ZiggeoVideos
from ZiggeoStreams import ZiggeoStreams
from ZiggeoAudios import ZiggeoAudios
from ZiggeoAudio_streams import ZiggeoAudio_streams
from ZiggeoAuthtokens import ZiggeoAuthtokens
from ZiggeoApplication import ZiggeoApplication
from ZiggeoEffectProfiles import ZiggeoEffectProfiles
from ZiggeoEffectProfileProcess import ZiggeoEffectProfileProcess
from ZiggeoMetaProfiles import ZiggeoMetaProfiles
from ZiggeoMetaProfileProcess import ZiggeoMetaProfileProcess
from ZiggeoWebhooks import ZiggeoWebhooks
from ZiggeoAnalytics import ZiggeoAnalytics
class Ziggeo:
def __init__(self, token, private_key, encryption_key = None):
self.token = token
self.private_key = private_key
self.encryption_key = encryption_key
self.config = ZiggeoConfig()
server_api_url = self.config.server_api_url
for k, v in self.config.regions.items():
if (self.token.startswith(k)):
server_api_url = v
self.connect = ZiggeoConnect(self, server_api_url)
api_url = self.config.api_url
for k, v in self.config.api_regions.items():
if (self.token.startswith(k)):
api_url = v
self.api_connect = ZiggeoConnect(self, api_url)
cdn_url = self.config.cdn_url
for k, v in self.config.cdn_regions.items():
if (self.token.startswith(k)):
cdn_url = v
self.cdn_connect = ZiggeoConnect(self, cdn_url)
self.__auth = None
self.__videos = None
self.__streams = None
self.__audios = None
self.__audio_streams = None
self.__authtokens = None
self.__application = None
self.__effectProfiles = None
self.__effectProfileProcess = None
self.__metaProfiles = None
self.__metaProfileProcess = None
self.__webhooks = None
self.__analytics = None
def auth(self):
if (self.__auth == None):
self.__auth = ZiggeoAuth(self)
return self.__auth
def videos(self):
if (self.__videos == None):
self.__videos = ZiggeoVideos(self)
return self.__videos
def streams(self):
if (self.__streams == None):
self.__streams = ZiggeoStreams(self)
return self.__streams
def audios(self):
if (self.__audios == None):
self.__audios = ZiggeoAudios(self)
return self.__audios
def audio_streams(self):
if (self.__audio_streams == None):
self.__audio_streams = ZiggeoAudio_streams(self)
return self.__audio_streams
def authtokens(self):
if (self.__authtokens == None):
self.__authtokens = ZiggeoAuthtokens(self)
return self.__authtokens
def application(self):
if (self.__application == None):
self.__application = ZiggeoApplication(self)
return self.__application
def effectProfiles(self):
if (self.__effectProfiles == None):
self.__effectProfiles = ZiggeoEffectProfiles(self)
return self.__effectProfiles
def effectProfileProcess(self):
if (self.__effectProfileProcess == None):
self.__effectProfileProcess = ZiggeoEffectProfileProcess(self)
return self.__effectProfileProcess
def metaProfiles(self):
if (self.__metaProfiles == None):
self.__metaProfiles = ZiggeoMetaProfiles(self)
return self.__metaProfiles
def metaProfileProcess(self):
if (self.__metaProfileProcess == None):
self.__metaProfileProcess = ZiggeoMetaProfileProcess(self)
return self.__metaProfileProcess
def webhooks(self):
if (self.__webhooks == None):
self.__webhooks = ZiggeoWebhooks(self)
return self.__webhooks
def analytics(self):
if (self.__analytics == None):
self.__analytics = ZiggeoAnalytics(self)
return self.__analytics | ziggeo | /ziggeo-2.29.tar.gz/ziggeo-2.29/Ziggeo.py | Ziggeo.py |
import base64, json, ntpath
try:
#For Python 3.0 and later
from urllib import request as urllib2
from urllib import parse as urllib
basestring = str
except ImportError:
#For Python 2's urllib2
import urllib2, urllib
from MultiPartForm import MultiPartForm
class ZiggeoConnect:
def __init__(self, application, baseuri):
self.__application = application
self.__baseuri = baseuri
def request(self, method, path, data = None, file = None, timeout = None):
if timeout is None:
timeout = self.__application.config.request_timeout
for trying in range(0, self.__application.config.resilience_factor) :
request_result = self.singleRequest(method, path, data, file, timeout)
if (request_result.code < 500 and request_result.code >= 200):
if request_result.code >= 300:
return "{\"code\": \""+str(request_result.code)+"\", \"msg\": \""+request_result.msg+"\"}"
try:
accept_ranges = request_result.getheader('Accept-Ranges')
if (accept_ranges == 'bytes'):
return request_result.read()
else:
return request_result.read().decode('ascii')
except AttributeError as e:
return request_result.read()
break
return "{\"code\": \""+str(request_result.code)+"\", \"msg\": \""+request_result.msg+"\"}"
def singleRequest(self, method, path, data, file, timeout):
path = path.encode("ascii", "ignore")
if (method == "GET" and data != None):
path = path.decode('ascii', 'ignore') + "?" + urllib.urlencode(data)
if (method != "GET" and method != "POST"):
path = path.decode('ascii', 'ignore') + "?_method=" + method
if not isinstance(path, basestring):
path = path.decode("ascii", "ignore")
if (path.split(":")[0] == 'https'): #S3 based upload
request = urllib2.Request(path)
else:
request = urllib2.Request(self.__baseuri + path)
base64string = base64.encodebytes(('%s:%s' % (self.__application.token, self.__application.private_key)).encode()).decode().replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
if (method == "GET"):
try:
result = urllib2.urlopen(request, None, timeout)
return result
except urllib2.HTTPError as e:
return e
else:
if (data == None):
data = {}
if (file == None):
data = urllib.urlencode(data)
binary_data = data.encode("ascii")
try:
result = urllib2.urlopen(request, binary_data, timeout)
return result
except urllib2.HTTPError as e:
return e
else:
form_file = [('file', ntpath.basename(file), open(file, "rb"))]
content_type, body = MultiPartForm().encode(data, form_file)
request.add_header('Content-type', content_type)
request.add_header('Content-length', len(body))
try:
result = urllib2.urlopen(request, body, timeout)
return result
except urllib2.HTTPError as e:
return e
def requestJSON(self, method, path, data = None, file = None):
return json.loads(self.request(method, path, data, file))
def get(self, path, data = None, file = None):
return self.request("GET", path, data, file)
def getJSON(self, path, data = None, file = None):
return self.requestJSON("GET", path, data, file)
def post(self, path, data = None, file = None):
return self.request("POST", path, data, file)
def postJSON(self, path, data = None, file = None):
return self.requestJSON("POST", path, data, file)
def postUploadJSON(self, path, scope, data = None, file = None, type_key = None):
if (data == None):
data={}
data[type_key] = file.split(".")[-1]
result = self.requestJSON("POST", path, data, None)
self.request("POST", result['url_data']['url'], result['url_data']['fields'], file)
return result[scope]
def delete(self, path, data = None, file = None):
return self.request("DELETE", path, data, file)
def deleteJSON(self, path, data = None, file = None):
return self.requestJSON("DELETE", path, data, file) | ziggeo | /ziggeo-2.29.tar.gz/ziggeo-2.29/ZiggeoConnect.py | ZiggeoConnect.py |
# Ziggie macro modelling
Ziggie is a Python package for infectious disease modelling.
The macro module facilitates compartmental modelling using difference equations,
or macro models.
## Quick start
Ziggie requires Python 3.5 or later.
First install it:
```bash
pip install ziggie
```
Here's Python code to create and run a simple SIR model
```Python
from ziggie import macro
# Simple SIR model with susceptible population of 1 million and
# one infection. Effective contact rate per day is 0.6 and the
# infection duration is ten days.
simple = {
'name': 'Simple model',
'compartments': {
'S': 1000000, # Susceptible
'I': 1, # Infectious
'R': 0 # Recovered
},
'transitions': {
# (sometimes called beta in the literature)
'S_I': 0.6, # Effective contact rate
'I_R': 0.1 # Recovery rate per day
},
}
# Make a list of outputs with the results
# which by default are printed out every 50 days and
# at the beginning and end of the simulation.
# Each entry in the results table is an updated model
# For a particular day.
results = macro.simulate([simple])
print("Final day's results")
print(results[-1])
# Flatten the final day's results into a table
table = macro.modelList_to_table(results[-1])
print("Time series table")
print(table)
# Put all the results into a CSV file
macro.series_to_csv(results, "my_csv_file.csv")
# Run model and create CSV in one step
macro.series_to_csv(macro.simulate([simple]), "my_csv_file.csv")
```
The output is:
```Python
Final day's results
[{'name': 'Simple model', 'compartments': {'S': 65525.67886409458, 'I': 1.5383929100326267e-07, 'R': 56934475.32113576}, 'transitions': {'S_I': 0.6, 'I_R': 0.1}, 'parameters': {'from': 0, 'to': 365, 'record_frequency': 50, 'reduce_infectivity': 1.0, 'asymptomatic_infectiousness': 1.0, 'treatment_infectiousness': 1.0, 'noise': 0.0, 'discrete': False, 'record_first': True, 'record_last': True, 'transition_funcs': {'S_I': <function delta_S_I at 0x7fa842cdb8b0>, 'S_E': <function delta_S_I at 0x7fa842cdb8b0>, 'S_I1': <function delta_S_I1 at 0x7fa84651e5e0>, 'S_E1': <function delta_S_I1 at 0x7fa84651e5e0>, 'B_S': <function delta_birth_X at 0x7fa84651e3a0>, 'default': <function delta_X_Y at 0x7fa846519310>}, 'before_funcs': [], 'after_funcs': []}, 'iteration': 365}]
Time series table
[['iter', 'name_0', 'S', 'I', 'R'], [0, 'Simple model', 57000000, 1, 0], [50, 'Simple model', 2200495.449318898, 28345727.9672264, 26453777.583454713], [100, 'Simple model', 66701.56917131442, 167716.11455651774, 56765583.316272154], [150, 'Simple model', 65531.91780545574, 898.2457754223102, 56933570.83641911], [200, 'Simple model', 65525.71227208052, 4.810125157065176, 56934470.477602795], [250, 'Simple model', 65525.67904299378, 0.025758303410947692, 56934475.29519872], [300, 'Simple model', 65525.67886505151, 0.00013793615973905623, 56934475.32099703], [350, 'Simple model', 65525.67886409864, 7.38650518227673e-07, 56934475.32113517], [365, 'Simple model', 65525.67886409458, 1.5383929100326267e-07, 56934475.32113576]]
```
You can also open one of the generated CSV files in your favourite spreadsheet program.
It's also easy to use with numpy. In the above example the table variable
consists of string and floats. Numpy arrays really only make sense if they're
numbers, so we'll chop off the strings before conversion.. Here's a continuation
of the above example:
```Python
import numpy
table_float = [t[2:] for t in table[1:]]
print(numpy.array(table_float))
```
The output should be:
```Python
array([[5.70000000e+07, 1.00000000e+00, 0.00000000e+00],
[2.20049545e+06, 2.83457280e+07, 2.64537776e+07],
[6.67015692e+04, 1.67716115e+05, 5.67655833e+07],
[6.55319178e+04, 8.98245775e+02, 5.69335708e+07],
[6.55257123e+04, 4.81012516e+00, 5.69344705e+07],
[6.55256790e+04, 2.57583034e-02, 5.69344753e+07],
[6.55256789e+04, 1.37936160e-04, 5.69344753e+07],
[6.55256789e+04, 7.38650518e-07, 5.69344753e+07],
[6.55256789e+04, 1.53839291e-07, 5.69344753e+07]])
```
## Transitions
A transition name consists of two compartment names separated by an
underscore. There are default transition functions, depending on the
compartments involved, which you can override by including a 'parameters'
dictionary in your model, and then overriding entries in its 'transition_funcs' sub-dictionary.
This is what the default *transition_funcs* dictionary looks like:
```Python
'transition_funcs': {
'S_I': delta_S_I,
'S_E': delta_S_I,
'S_I1': delta_S_I1,
'S_E1': delta_S_I1,
'B_S': delta_birth_X,
'default': delta_X_Y
}
```
The transition function to execute is looked up in this dictionary. If it's not
found the function specified by 'default' is executed.
For most transitions, a proportion of the "from" compartment is moved
to the "to" compartment. The delta_X_Y function takes care of this.
But for new infections you almost always want a standard SIR-like equation such
as this:
delta = susceptibles * number of contacts per iteration *
risk of infection per contact *
total number of infectious individuals /
total population
susceptibles -= delta
infectious (or exposed) individuals += delta
Two functions are provided to deal with this: the very simple delta_S_I
and the more sophisticated (but slower) delta_S_I1.
The delta_SI1 calculates the total number of infectious individuals by adding
all the compartments starting with an *I* as well as all compartments starting
with an *A* (asymptomatic individuals) and *T* (treated individuals). Moreover
the infectiousness of the asymptomatic individuals is multiplied by the
parameter *asymptomatic_infectiousness* and the infectiousness of treated
individuals by *treatment_infectiousness* (both default to 1).
## Compartment names which have meaning
Some of the compartment name prefixes are meaningful, in that the code might
make assumptions about the compartment. A compartment name generally starts with
one of these meaningful prefixes and then a unique identifier. E.g. I1, I2, I3
or I4 for various stages of infectiousness.
- S - Susceptible (See delta_S_I and delta_S_I1)
- E - Exposed (See delta_S_I and delta_S_I1)
- I - Infectious (See delta_S_I and delta_S_I1)
- A - Asymptomatic (See delta_S_I1 and asymptomatic_infectiousness)
- T - On treatment (See delta_S_I1 and treatment_infectiousness)
- N - Population size. Strictly reserved. Do not prefix any compartment N.
- D - Dead (not included in totalling N)
- B - Birth (not included in totalling N)
- R - Recovered
- M - Maternal immunity
- V - Vaccinated
You can also prefix a compartment name with any other letter.
## Noise
You can also add noise to your model so that it is stochastic. We add a
'parameters' key to our model, and within the parameters, we add a 'noise'
key. Like so:
```Python
{
'name': 'Simple model',
'compartments': {
'S': 57000000,
'I': 1,
'R': 0
},
'transitions': {
'S_I': 0.6,
'I_R': 0.1
},
'parameters': {
'noise': 0.05
}
}
```
Now every transition calculation is multipled by a uniform random number in the
range [1-0.05, 1+0.05].
## Parameters
Besides 'noise' there are many other parameters that can be modified
directly. These are the default parameters:
```Python
{
# The model is executed from iteration 'from' to iteration 'to' - 1.
'from': 0,
'to': 365,
# The results are recorded every 'record_frequency' iterations. Set
# to 1 if you want to record the output of every iteration.
'record_frequency': 50, # Write results every X iterations
# Multiply S_E or S_I by X every iteration if reduce_infectivity
# function executed. This is useful for modelling heterogeneity, i.e. the
# fact that usually the most susceptible people get infected earliest in an
# epidemic
'reduce_infectivity': 1.0,
# If you have multiple infectiousness compartments including
# asymptomatic and treatment compartments, you can
'asymptomatic_infectiousness': 1.0,
'treatment_infectiousness': 1.0,
# Add stochastic noise to transitions
'noise': 0.0,
# Round all transition calculations to round numbers (not tested yet)
'discrete': False,
# Include the initial state of the model in the results
'record_first': True,
# Include the final state of the model in the results.
# If record_frequency is 1 or divides into the "to" parameter,
# you probably want to set this to False.
'record_last': True,
# The transition functions
'transition_funcs': {
'S_I': delta_S_I,
'S_E': delta_S_I,
'S_I1': delta_S_I1,
'S_E1': delta_S_I1,
'B_S': delta_birth_X,
'default': delta_X_Y
},
# Any functions specified here are executed for each model before
# each iteration
'before_funcs': [],
# Any functions specified here are executed for each model after
# each iteration
'after_funcs': [],
}
```
## More sophisticated example
Let's say we want to model the Covid-19 epidemic in South Africa.
Our model world might like this:
- We have three distinct geographies: (1) urban areas with formal housing (2)
urban areas with informal housing and (3) rural areas.
- Each area has three distinct age groups: (1) 0-24 years old, (2) 25-54, and
(3) 55 and older.
- People start off (S)usceptible. Upon being infected they are (E)xposed. They
transition from (E)xposed to either (A)symptomatic or infectious with mild
symptoms(Im). Asymptomatic people transition to (R)ecovered. Infectious with
mild symptoms transition to infectious with critical symptoms (Ic) and then
either to (R)ecovered or (D)eath.
- After each iteration, there is a bit of migration between the three
geographies.
The source code file ziggie/samples.py contains a class called MacroModels
which contains a method called corona that implements this. Here it is:
```Python
from copy import deepcopy
from ziggie import macro
def corona(self):
parameters = {
'to': 365,
'record_frequency': 1,
'record_last': False,
'noise': 0.1,
'asymptomatic_infectiousness': 0.75,
'reduce_infectivity': 0.999,
'after_funcs': [macro.reduce_infectivity, ],
'transition_funcs': {
'S_E': macro.delta_S_I1,
}
}
parameters_rural = deepcopy(parameters)
parameters_rural['after_funcs'] = [macro.reduce_infectivity,
mix_models, ]
return [
{
'name': 'Urban informal',
'parameters': parameters,
'transitions': {
'S_E': 0.4,
'E_A': 0.125,
'E_Im': 0.125,
'Im_Ic': 0.2,
'Ic_R': 0.2,
'A_R': 0.2
},
'groups': [
{
'name': '0-24',
'transitions': {
'E_A': 0.25,
'E_Im': 0.01,
'Ic_D': 0.002
},
'compartments': {
'S': 2100000,
'E': 10,
'Im': 0,
'Ic': 0,
'A': 0,
'R': 0,
'D': 0
}
},
{
'name': '25-54',
'transitions': {
'Ic_D': 0.0032
},
'compartments': {
'S': 2100000,
'E': 0,
'Im': 0,
'Ic': 0,
'A': 0,
'R': 0,
'D': 0
}
},
{
'name': '55-',
'transitions': {
'Ic_D': 0.032
},
'compartments': {
'S': 600000,
'E': 0,
'Im': 0,
'Ic': 0,
'A': 0,
'R': 0,
'D': 0
}
}
]
},
{
'name': 'Urban formal',
'parameters': parameters,
'transitions': {
'S_E': 0.3,
'E_A': 0.125,
'E_Im': 0.125,
'Im_Ic': 0.2,
'Ic_R': 0.2,
'A_R': 0.2
},
'groups': [
{
'name': '0-24',
'transitions': {
'E_A': 0.25,
'E_Im': 0.01,
'Ic_D': 0.002
},
'compartments': {
'S': 16940000,
'E': 10,
'Im': 0,
'Ic': 0,
'A': 0,
'R': 0,
'D': 0
}
},
{
'name': '25-54',
'transitions': {
'Ic_D': 0.003
},
'compartments': {
'S': 16940000,
'E': 0,
'Im': 0,
'Ic': 0,
'A': 0,
'R': 0,
'D': 0
}
},
{
'name': '55-',
'transitions': {
'Ic_D': 0.03
},
'compartments': {
'S': 4620000,
'E': 0,
'Im': 0,
'Ic': 0,
'A': 0,
'R': 0,
'D': 0
}
}
]
},
{
'name': 'Rural',
'parameters': parameters_rural,
'transitions': {
'S_E': 0.27,
'E_A': 0.125,
'E_Im': 0.125,
'Im_Ic': 0.2,
'Ic_R': 0.2,
'A_R': 0.2
},
'groups': [
{
'name': '0-24',
'transitions': {
'E_A': 0.25,
'E_Im': 0.01,
'Ic_D': 0.002
},
'compartments': {
'S': 7260000,
'E': 10,
'Im': 0,
'Ic': 0,
'A': 0,
'R': 0,
'D': 0
}
},
{
'name': '25-54',
'transitions': {
'Ic_D': 0.0035
},
'compartments': {
'S': 7260000,
'E': 0,
'Im': 0,
'Ic': 0,
'A': 0,
'R': 0,
'D': 0
}
},
{
'name': '55-',
'transitions': {
'Ic_D': 0.035
},
'compartments': {
'S': 2000000,
'E': 0,
'Im': 0,
'Ic': 0,
'A': 0,
'R': 0,
'D': 0
}
}
]
}
]
m = corona()
results = macro.simulate(m)
print("Number of results:", len(results)) # Outputs 366
print("Number of models:", len(results[-1])) # Outputs 3
totals = [macro.calc_totals(results[-1][i])
for i in range(len(results[-1]))
print(macro.sum_totals(totals))
```
The output is something like this:
```
Number of results: 366
Number of models: 3
{'N': 59685873.7194506, 'S': 47387513.37539025, 'E': 283059.79850507394, 'Im': 108802.85166144818, 'Ic': 106605.46023480814, 'A': 264806.9195997677, 'R': 11535085.314059254, 'D': 134156.28054939664}
```
| ziggie | /ziggie-0.1.1.tar.gz/ziggie-0.1.1/README.md | README.md |
import socket
import requests
class ZiggoMediaboxXL(object):
"""
Ziggo Mediabox XL object.
Library to command the Ziggo Mediabox XL.
"""
def __init__(self, ip, timeout=5):
self._ip = ip
self._timeout = timeout
self._port = {"state": 62137, "cmd": 5900}
self._channels_url = 'https://restapi.ziggo.nl/1.0/channels-overview'
self._fetch_channels()
self._keys = {
"POWER": "E0 00", "OK": "E0 01", "BACK": "E0 02",
"CHAN_UP": "E0 06", "CHAN_DOWN": "E0 07",
"HELP": "E0 09", "MENU": "E0 0A", "GUIDE": "E0 0B",
"INFO": "EO 0E", "TEXT": "E0 0F", "MENU1": "E0 11",
"MENU2": "EO 15", "DPAD_UP": "E1 00",
"DPAD_DOWN": "E1 01", "DPAD_LEFT": "E1 02",
"DPAD_RIGHT": "E1 03", "PAUSE": "E4 00", "STOP": "E4 02",
"RECORD": "E4 04", "FWD": "E4 05", "RWD": "E4 07",
"MENU3": "E4 07", "ONDEMAND": "EF 28", "DVR": "EF 29",
"TV": "EF 2A"}
for i in range(10):
self._keys["NUM_{}".format(i)] = "E3 {:02d}".format(i)
def _fetch_channels(self):
"""Retrieve Ziggo channel information."""
json = requests.get(self._channels_url).json()
self._channels = {c['channel']['code']: c['channel']['name']
for c in json['channels']}
def channels(self):
return self._channels
def test_connection(self):
"""Make sure we can reach the given IP address."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(self._timeout)
try:
if sock.connect_ex((self._ip, self._port['cmd'])) == 0:
return True
else:
return False
except socket.error:
raise
def turned_on(self):
"""Update and return switched on state."""
self.update_state()
return self.state
def update_state(self):
"""Find out whether the media box is turned on/off."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(self._timeout)
try:
if sock.connect_ex((self._ip, self._port['state'])) == 0:
self.state = True
else:
self.state = False
sock.close()
except socket.error:
raise
def send_keys(self, keys):
"""Send keys to the device."""
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(self._timeout)
sock.connect((self._ip, self._port['cmd']))
# mandatory dance
version_info = sock.recv(15)
sock.send(version_info)
sock.recv(2)
sock.send(bytes.fromhex('01'))
sock.recv(4)
sock.recv(24)
# send our command now!
for key in keys:
if key in self._keys:
sock.send(bytes.fromhex("04 01 00 00 00 00 " +
self._keys[key]))
sock.send(bytes.fromhex("04 00 00 00 00 00 " +
self._keys[key]))
sock.close()
except socket.error:
raise | ziggo-mediabox-xl | /ziggo_mediabox_xl-1.1.0-py3-none-any.whl/ziggo_mediabox_xl.py | ziggo_mediabox_xl.py |
=================
Ziggo Mediabox XL
=================
.. image:: https://img.shields.io/pypi/v/ziggo_mediabox_xl.svg
:target: https://pypi.python.org/pypi/ziggo_mediabox_xl
.. image:: https://img.shields.io/travis/b10m/ziggo_mediabox_xl.svg
:target: https://travis-ci.org/b10m/ziggo_mediabox_xl
.. image:: https://readthedocs.org/projects/ziggo-mediabox-xl/badge/?version=latest
:target: https://ziggo-mediabox-xl.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
.. image:: https://pyup.io/repos/github/b10m/ziggo_mediabox_xl/shield.svg
:target: https://pyup.io/repos/github/b10m/ziggo_mediabox_xl/
:alt: Updates
Installation
------------
From PyPI
~~~~~~~~~
Assuming you already are inside a virtualenv:
.. code-block:: bash
pip install ziggo_mediabox_xl
From Git
~~~~~~~~
Create a new virtualenv (if you are not already in one) and install the
necessary packages:
.. code-block:: bash
git clone https://github.com/b10m/ziggo_mediabox_xl.git
cd ziggo_mediabox_xl
mkvirtualenv ziggo_mediabox_xl
pip install -r requirements.txt
Usage
-----
This quick example will connect to the IP address listed, verify the box
is turned on and sends NUM_3, NUM_0, and NUM_2 to the device. This will
result in the same action as pressing 302 on your remote control (the
Disney Jr. channel will be selected).
.. code-block:: python
from ziggo_mediabox_xl import ZiggoMediaboxXL
box = ZiggoMediaboxXL('aaa.bbb.ccc.ddd')
if box.turned_on():
box.send_keys(['NUM_3', 'NUM_0', 'NUM_2'])
=======
History
=======
1.1.0 (2018-11-03)
------------------
* Added timeout option.
* Upgraded to
1.0.0 (2017-11-23)
------------------
* First release on PyPI.
| ziggo-mediabox-xl | /ziggo_mediabox_xl-1.1.0-py3-none-any.whl/ziggo_mediabox_xl-1.1.0.dist-info/DESCRIPTION.rst | DESCRIPTION.rst |
## Python API fetching Ziggo F1 broadcasts
Use this package to get Formula 1 broadcasts from Ziggo.

## Usage
```` python
import aiohttp
import asyncio
from datetime import date
from ziggof1broadcasts import get_ziggo_f1_broadcasts
async def main():
session = aiohttp.ClientSession()
# By default, it will return the broadcasts of today, but you can provide a date
startTime = date.fromisoformat('2020-08-16')
broadcasts = await get_ziggo_f1_broadcasts(session, startTime=startTime)
print(f"{broadcasts}")
await session.close()
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
````
| ziggof1broadcasts | /ziggof1broadcasts-0.1.0.tar.gz/ziggof1broadcasts-0.1.0/README.md | README.md |
## Python API fetching Ziggo outages & announcements
Use this package to check with the VodafoneZiggo API for outages & announcements on a VodafoneZiggo address.

## Usage
```` python
import asyncio
import aiohttp
from ziggostatus import get_ziggo_status
postalcode = "1012JS" # Amsterdam - Dam Square
housenumber = "1"
async def main():
session = aiohttp.ClientSession()
status = await get_ziggo_status(postalcode, housenumber, session)
print(f"{status}")
await session.close()
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
````
| ziggostatus | /ziggostatus-0.1.1.tar.gz/ziggostatus-0.1.1/README.md | README.md |
ziggurat_foundations Individual Contributor License Agreement
Thank you for your interest in contributing to ziggurat_foundations ("We" or "Us").
This contributor agreement ("Agreement") documents the rights granted by contributors to Us.
To make this document effective, **please sign it and send it to Us by electronic submission,
following the instructions at AUTHORS.rst file in the repository**.
This is a legally binding document, so please read it carefully before agreeing to it.
The Agreement may cover more than one software project managed by Us.
1. Definitions
"You" means the individual who Submits a Contribution to Us.
"Contribution" means any work of authorship that is Submitted by You to Us in which
You own or assert ownership of the Copyright. If You do not own the Copyright in the entire
work of authorship, please follow the instructions in AUTHORS.rst file in the repository.
"Copyright" means all rights protecting works of authorship owned or controlled by You,
including copyright, moral and neighboring rights, as appropriate, for the full term of
their existence including any extensions by You.
"Material" means the work of authorship which is made available by Us to third parties.
When this Agreement covers more than one software project, the Material means the work of
authorship to which the Contribution was Submitted. After You Submit the Contribution,
it may be included in the Material.
"Submit" means any form of electronic, verbal, or written communication sent to Us or our
representatives, including but not limited to electronic mailing lists, source code
control systems, and issue tracking systems that are managed by, or on behalf of, Us for
the purpose of discussing and improving the Material, but excluding communication that
is conspicuously marked or otherwise designated in writing by You as "Not a Contribution."
"Submission Date" means the date on which You Submit a Contribution to Us.
"Effective Date" means the date You execute this Agreement or
the date You first Submit a Contribution to Us, whichever is earlier.
2. Grant of Rights
2.1 Copyright License
(a) You retain ownership of the Copyright in Your Contribution and have the same rights to use or
license the Contribution which You would have had without entering into the Agreement.
(b) To the maximum extent permitted by the relevant law, You grant to Us a perpetual,
worldwide, non-exclusive, transferable, royalty-free, irrevocable license under the
Copyright covering the Contribution, with the right to sublicense such rights through
multiple tiers of sublicensees, to reproduce, modify, display, perform and distribute
the Contribution as part of the Material; provided that this license is conditioned upon
compliance with Section 2.3.
2.2 Patent License
For patent claims including, without limitation, method, process, and apparatus
claims which You own, control or have the right to grant, now or in the future, You grant to Us
a perpetual, worldwide, non-exclusive, transferable, royalty-free, irrevocable patent license,
with the right to sublicense these rights to multiple tiers of sublicensees, to make, have made,
use, sell, offer for sale, import and otherwise transfer the Contribution and the Contribution
in combination with the Material (and portions of such combination). This license is granted
only to the extent that the exercise of the licensed rights infringes such patent claims;
and provided that this license is conditioned upon compliance with Section 2.3.
2.3 Outbound License
Based on the grant of rights in Sections 2.1 and 2.2, if We include Your Contribution in a
Material, We may license the Contribution under any license, including copyleft, permissive,
commercial, or proprietary licenses. As a condition on the exercise of this right,
We agree to also license the Contribution under the terms of the license or licenses which
We are using for the Material on the Submission Date.
2.4 Moral Rights.
If moral rights apply to the Contribution, to the maximum extent permitted by law,
You waive and agree not to assert such moral rights against Us or our successors in interest,
or any of our licensees, either direct or indirect.
2.5 Our Rights. You acknowledge that We are not obligated to use Your Contribution as part of
the Material and may decide to include any Contribution We consider appropriate.
2.6 Reservation of Rights. Any rights not expressly licensed under this section are
expressly reserved by You.
3. Agreement
You confirm that:
(a) You have the legal authority to enter into this Agreement.
(b) You own the Copyright and patent claims covering the Contribution
which are required to grant the rights under Section 2.
(c) The grant of rights under Section 2 does not violate any grant of rights which
You have made to third parties, including Your employer.
If You are an employee, You have had Your employer approve this Agreement or
sign the Entity version of this document. If You are less than eighteen years old,
please have Your parents or guardian sign the Agreement.
(d) You have followed the instructions in AUTHORS.rst file in the repository,
if You do not own the Copyright in the entire work of authorship Submitted.
4. Disclaimer
EXCEPT FOR THE EXPRESS WARRANTIES IN SECTION 3, THE CONTRIBUTION IS PROVIDED "AS IS".
MORE PARTICULARLY, ALL EXPRESS OR IMPLIED WARRANTIES INCLUDING, WITHOUT LIMITATION,
ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
ARE EXPRESSLY DISCLAIMED BY YOU TO US. TO THE EXTENT THAT ANY SUCH WARRANTIES CANNOT BE DISCLAIMED,
SUCH WARRANTY IS LIMITED IN DURATION TO THE MINIMUM PERIOD PERMITTED BY LAW.
5. Consequential Damage Waiver
TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT WILL YOU BE LIABLE FOR ANY
LOSS OF PROFITS, LOSS OF ANTICIPATED SAVINGS, LOSS OF DATA, INDIRECT, SPECIAL, INCIDENTAL,
CONSEQUENTIAL AND EXEMPLARY DAMAGES ARISING OUT OF THIS AGREEMENT REGARDLESS OF THE LEGAL OR
EQUITABLE THEORY (CONTRACT, TORT OR OTHERWISE) UPON WHICH THE CLAIM IS BASED.
6. Miscellaneous
6.1 This Agreement will be governed by and construed in accordance with the laws of POLAND
excluding its conflicts of law provisions. Under certain circumstances, the governing law
in this section might be superseded by the United Nations Convention on Contracts for the
International Sale of Goods ("UN Convention") and the parties intend to avoid the application of
the UN Convention to this Agreement and, thus, exclude the application of the UN Convention
in its entirety to this Agreement.
6.2 This Agreement sets out the entire agreement between You and Us for Your Contributions to
Us and overrides all other agreements or understandings.
6.3 If You or We assign the rights or obligations received through this Agreement to a third party,
as a condition of the assignment, that third party must agree in writing to abide by all the rights
and obligations in the Agreement.
6.4 The failure of either party to require performance by the other party of any provision of this
Agreement in one situation shall not affect the right of a party to require such performance
at any time in the future. A waiver of performance under a provision in one situation shall not be
considered a waiver of the performance of the provision in the future or a waiver
of the provision in its entirety.
6.5 If any provision of this Agreement is found void and unenforceable, such provision will
be replaced to the extent possible with a provision that comes closest to the meaning of the
original provision and which is enforceable. The terms and conditions set forth in this
Agreement shall apply notwithstanding any failure of essential purpose of this Agreement or
any limited remedy to the maximum extent possible under law.
You
________________________
Name: __________________
Address: ________________
________________________
Us
Name: Webreactor/Marcin Lulek
Address: Poland,
Pabianice 95-200
20-Stycznia 66/25
| ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/CLA_INDIVIDIAL.md | CLA_INDIVIDIAL.md |
# Change Log
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
<!--
PRs should document their user-visible changes (if any) in the
Unreleased section, uncommenting the header as necessary.
-->
<!-- ## Unreleased -->
<!-- ### Changed -->
<!-- ### Added -->
<!-- ### Removed -->
<!-- ### Fixed -->
## [0.8.4] - 2021-04-18
**This is the last release that is compatible with Python versions older than 3.6**
### Added
* pyramid extensions can now accept `session_provider_callable` not only as a path to callable but
also as plain callable object
* tests for pyramid extensions
### Changed
* Become compatible with sqlalchemy 1.4.x
* Allow obtaining node resource by model name #70 (thanks Francis Charette Migneault)
* remove deprecated base model code
* reduce warnings generated
* prepare for pyramid versions higher than 2.0
### Fixed
* missing argument in specific permission fetching codepath #69
## [0.8.3] - 2019-02-09
### Added
* pyramid extensions can now accept `session_provider_callable` not only as a path to callable but
also as plain callable object
* tests for pyramid extensions
### Changed
* fixed deprecated usage of `set_request_property` (thanks Francis Charette Migneault)
## [0.8.2] - 2018-11-08
### Fixed
* Fixed migrations under Mysql 8.x
## [0.8.0] [0.8.1] - 2018-09-28
### Changed
* [Breaking] Most of deprecated model methods got removed, please use services instead
* [Breaking] ziggurat_model_init has now explicit order of model classes set
## [0.7.3] - 2018-02-19
### Changed
* Use pbkdf2_sha256 by default (bcypt hashes will work and will be migrated on first login for users)
**Important**
By default `check_password` will migrate your existing bcrypt hashes to pbkdf2_sha256, if you want to keep bcrypt,
you can either pass `passwordmanager_schemes` list to `ziggurat_model_init` or use your own passwordmanager.
## [0.7.2] - 2018-02-17
### Changed
* Fixed some minor issues that resulted in bad migrations when someone used `autogenerate in alembic`
* Added a way to read sqlalchemy.url from environment variable (thanks René Jochum)
## [0.7.1] - 2017-02-19
### Changed
* fixed tree ordering for nested elements
* fixed `perm_by_group_and_perm_name` method
## [0.7.0] - 2016-11-25
### Added
* Introduced ResourceTreeService for nested resource management
(currently only PostgreSQL support is implemented)
* lots of new tests
### Changed
* added deprecation warnings
* [breaking] code reorganization that might break existing code
* [breaking] _ziggurat_services is now a list
2016-07-05
----------
* Release: 0.6.8
* use importlib for imports to avoid issues with unicode passed to __import__()
2016-05-05
----------
* Release: 0.6.6/0.6.7
* increased field sizes for external identity tokens, permission names
and username
* perm name is checked to be lowercase in all databases, not only
for postgresql
2016-04-27
----------
* Release: 0.6.5
* got rid of all unicode warnings generated by sqlalchemy and start to use
unicode for all strings
* user.resources_with_possible_perms() passes resource_types properly
2016-04-19
----------
* Release: 0.6.3/0.6.4
* extended functionality of `populate_obj` function of Base class
* minor bugfix for mysql
**BACKWARDS INCOMPATIBLE CHANGES**
* external_identities.local_user_name column is dropped and replaced with
local_user_id
2015-11-13
----------
* Release: 0.6.2
* replace cryptacular with passlib
2015-09-19
----------
* Release: 0.6.1
* some fixes for ext.pyramid not-working completly with service related changes
2015-08-03
----------
* Release: 0.6.0
* models are decoupled from services that interact with models
(all model querying methods now live in services)
**BACKWARDS INCOMPATIBLE CHANGES**
* import related changes:
* ziggurat_foundations.models doesn't import all the models anymore
every model now lives in separate file
* permission related functions now live in permissions module
2015-06-15
----------
* Release: 0.5.6
* user model gains security date column
* minor bugfixes
2015-06-07
----------
* Release: 0.5.5
* added persist(), delete(), base_query() methods to Base model
2015-04-27
----------
* Release: 0.5.3
* resource.groups_for_perm() returns groups/permissions for single resource
2015-04-24
----------
* Release: 0.5.2
* resource.users_for_perm() now accepts `limit_group_permissions` param that
that makes it return just group with perm name instead tuples including every
user/perm pairs
2015-04-23
----------
* Release: 0.5.1
* Group.resources_with_possible_perms() added
2015-04-17
----------
* Release: 0.5
* Now uses detailed permissions
* BACKWARDS INCOMPATIBLE API CHANGES
* ResourceMixin.users_for_perm() accepts additional parameters group_ids, and user_ids
to limit the amount of results if needed
* User.permissions, Resource.perms_for_user, Resource.direct_perms_for_user,
Resource.group_perms_for_user, Resource.users_for_perm_detailed, Resource.users_for_perm
now return list of detailed PermissionTuple's instead simple [id, perm_name] pairs
this will break your application
You can use ziggurat_foundations.utils.permission_to_04_acls() to convert
the new tuples to pre 0.5 format
2015-02-18
----------
* Release: 0.4.3
* Added a way to filter on resource types in UserMixin.resources_with_perms()
* Made User.resources dynamic relationship
2014-08-25
----------
* Second Alpha Release 0.4
* Move to paginate from webhelpers.paginate
* Users can now log in via username or email address
* Python 3 compatibale after moving away from webhelpers.paginate
2012-11-28
----------
* First Alpha Release 0.3 - This release should have a fairly stable API
* Hundreds of small and big changes - based on all great feedback we are now
using surrogate pkeys instead of natural pkeys for most models.
As result of this few methods started accepting id's instead usernames,
so consider yourself warned that this release might be bw. incompatible a bit
with your application
* At this point all tests should pass on mysql, postgresql, sqlite
2012-06-26
----------
* added some indexes to resource owners
* previous revision ID: 3cfc41c4a5f0
2012-06-25
----------
* dropped groups.id column
* previous revision ID: 53927300c277
2012-06-05
----------
* shrinked integer sized on primary keys
* previous revision ID: 54d08f9adc8c
2012-05-27 version 0.2 First public release
-------------------------------------------
* added proper alembic(pre 0.3.3 trunk) support for multiple alembic migrations via separate versioning table
* please do manual stamp for CURRENT revision ID: 54d08f9adc8c
* changes for first public pypi release
* Possible backwards incompatibility: Remove cache keyword cruft
2012-05-25
----------
* Possible backwards incompatibility: Remove invalidate keyword cruft
2012-03-10
----------
* Add registration date to user model, changed last_login_date to no-timezone type (this seem trivial enough to not faciliate schema change)
* previous revision ID: 2d472fe79b95
2012-02-19
----------
* Made external identity fields bigger
* previous revision ID: 264049f80948
2012-02-13
----------
* Bumped alembic machinery to 0.2
* Enabled developers to set their own custom password managers
* added ordering column for resources in tree
* Stubs for tree traversal
* previous revision ID: 46a9c4fb9560
2011-12-20
----------
* Made hash fields bigger
* previous revision ID: 5c84d7260c5
2011-11-15
----------
* Added ExternalIdentityMixin - for storing information about user profiles connected to 3rd party identites like facebook/twitter/google/github etc.
* previous revision ID: 24ab8d11f014
2011-11-03
----------
* added alembic migration support
* previous revision ID: 2bb1ba973f0b
2011-08-14
----------
* resource.users_for_perm(), resource.direct_perms_for_user() and resource.group_perms_for_user() return tuple (user/group_name,perm_name) now
.. hint::
What "previous revision ID" means?
If you are updating the package that never was stamped with
alembic you may need to stamp the database manually with following revision id,
from this point onwards you will be able to update schemas automaticly.
Alembic 0.3.3+ (or current trunk for 2012-05-27) is required for this to function properly
| ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/CHANGES.md | CHANGES.md |
ziggurat_foundations Entity Contributor License Agreement
Thank you for your interest in contributing to ziggurat_foundations ("We" or "Us").
This contributor agreement ("Agreement") documents the rights granted by contributors to Us.
To make this document effective, please **sign it and send it to Us by electronic submission,
following the instructions at AUTHORS.rst file in the repository**. This is a legally binding document,
so please read it carefully before agreeing to it. The Agreement may cover more than
one software project managed by Us.
1. Definitions
"You" means any Legal Entity on behalf of whom a Contribution has been received by Us.
"Legal Entity" means an entity which is not a natural person. "Affiliates" means other Legal
Entities that control, are controlled by, or under common control with that Legal Entity.
For the purposes of this definition, "control" means (i) the power, direct or indirect, to
cause the direction or management of such Legal Entity, whether by contract or otherwise,
(ii) ownership of fifty percent (50%) or more of the outstanding shares or securities which
vote to elect the management or other persons who direct such Legal Entity or
(iii) beneficial ownership of such entity.
"Contribution" means any work of authorship that is Submitted by You to Us in which You own
or assert ownership of the Copyright. If You do not own the Copyright in the entire work of
authorship, please follow the instructions in AUTHORS.rst file in the repository.
"Copyright" means all rights protecting works of authorship owned or controlled by You or
Your Affiliates, including copyright, moral and neighboring rights, as appropriate,
for the full term of their existence including any extensions by You.
"Material" means the work of authorship which is made available by Us to third parties.
When this Agreement covers more than one software project, the Material means the work of
authorship to which the Contribution was Submitted. After You Submit the Contribution,
it may be included in the Material.
"Submit" means any form of electronic, verbal, or written communication sent to
Us or our representatives, including but not limited to electronic mailing lists,
source code control systems, and issue tracking systems that are managed by, or on behalf of,
Us for the purpose of discussing and improving the Material, but excluding communication that is
conspicuously marked or otherwise designated in writing by You as "Not a Contribution."
"Submission Date" means the date on which You Submit a Contribution to Us.
"Effective Date" means the date You execute this Agreement or the date You first
Submit a Contribution to Us, whichever is earlier.
2. Grant of Rights
2.1 Copyright License
(a) You retain ownership of the Copyright in Your Contribution and have the same rights to use or
license the Contribution which You would have had without entering into the Agreement.
(b) To the maximum extent permitted by the relevant law, You grant to Us a perpetual, worldwide,
non-exclusive, transferable, royalty-free, irrevocable license under the Copyright covering
the Contribution, with the right to sublicense such rights through multiple tiers of sublicensees,
to reproduce, modify, display, perform and distribute the Contribution as part of the Material;
provided that this license is conditioned upon compliance with Section 2.3.
2.2 Patent License
For patent claims including, without limitation, method, process, and apparatus claims which
You or Your Affiliates own, control or have the right to grant, now or in the future,
You grant to Us a perpetual, worldwide, non-exclusive, transferable, royalty-free, irrevocable
patent license, with the right to sublicense these rights to multiple tiers of sublicensees,
to make, have made, use, sell, offer for sale, import and otherwise transfer the Contribution
and the Contribution in combination with the Material (and portions of such combination).
This license is granted only to the extent that the exercise of the licensed rights infringes
such patent claims; and provided that this license is conditioned upon compliance with Section 2.3.
2.3 Outbound License
Based on the grant of rights in Sections 2.1 and 2.2, if We include Your Contribution in a Material,
We may license the Contribution under any license, including copyleft, permissive, commercial,
or proprietary licenses. As a condition on the exercise of this right,
We agree to also license the Contribution under the terms of the license or licenses which
We are using for the Material on the Submission Date.
2.4 Moral Rights. If moral rights apply to the Contribution, to the maximum extent permitted by
law, You waive and agree not to assert such moral rights against Us or our successors in interest,
or any of our licensees, either direct or indirect.
2.5 Our Rights. You acknowledge that We are not obligated to use Your Contribution as part of the
Material and may decide to include any Contribution We consider appropriate.
2.6 Reservation of Rights. Any rights not expressly licensed under this section are expressly reserved by You.
3. Agreement
You confirm that:
(a) You have the legal authority to enter into this Agreement.
(b) You or Your Affiliates own the Copyright and patent claims covering the Contribution which
are required to grant the rights under Section 2.
(c) The grant of rights under Section 2 does not violate any grant of rights which You or Your
Affiliates have made to third parties.
(d) You have followed the instructions in AUTHORS.rst file in the repository,
if You do not own the Copyright in the entire work of authorship Submitted.
4. Disclaimer
EXCEPT FOR THE EXPRESS WARRANTIES IN SECTION 3, THE CONTRIBUTION IS PROVIDED "AS IS".
MORE PARTICULARLY, ALL EXPRESS OR IMPLIED WARRANTIES INCLUDING, WITHOUT LIMITATION,
ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
ARE EXPRESSLY DISCLAIMED BY YOU TO US. TO THE EXTENT THAT ANY SUCH WARRANTIES CANNOT BE DISCLAIMED,
SUCH WARRANTY IS LIMITED IN DURATION TO THE MINIMUM PERIOD PERMITTED BY LAW.
5. Consequential Damage Waiver
TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT WILL YOU BE LIABLE FOR ANY
LOSS OF PROFITS, LOSS OF ANTICIPATED SAVINGS, LOSS OF DATA, INDIRECT, SPECIAL, INCIDENTAL,
CONSEQUENTIAL AND EXEMPLARY DAMAGES ARISING OUT OF THIS AGREEMENT REGARDLESS OF THE LEGAL
OR EQUITABLE THEORY (CONTRACT, TORT OR OTHERWISE) UPON WHICH THE CLAIM IS BASED.
6. Miscellaneous
6.1 This Agreement will be governed by and construed in accordance with the laws of Poland
excluding its conflicts of law provisions. Under certain circumstances, the governing law in
this section might be superseded by the United Nations Convention on Contracts for the
International Sale of Goods ("UN Convention") and the parties intend to avoid the application
of the UN Convention to this Agreement and, thus, exclude the application of the UN Convention
in its entirety to this Agreement.
6.2 This Agreement sets out the entire agreement between You and Us for Your Contributions to Us
and overrides all other agreements or understandings.
6.3 If You or We assign the rights or obligations received through this Agreement to a third party,
as a condition of the assignment, that third party must agree in writing to abide by all the rights
and obligations in the Agreement.
6.4 The failure of either party to require performance by the other party of any provision of this
Agreement in one situation shall not affect the right of a party to require such performance at
any time in the future. A waiver of performance under a provision in one situation shall
not be considered a waiver of the performance of the provision in the future or a waiver of
the provision in its entirety.
6.5 If any provision of this Agreement is found void and unenforceable, such provision will be
replaced to the extent possible with a provision that comes closest to the meaning of the
original provision and which is enforceable. The terms and conditions set forth in this
Agreement shall apply notwithstanding any failure of essential purpose of this Agreement or
any limited remedy to the maximum extent possible under law.
You
________________________
Name: __________________
Title: ___________________
Address: ________________
________________________
Us
Name: Webreactor/Marcin Lulek
Address: Poland,
Pabianice 95-200
20-Stycznia 66/25
| ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/CLA_ENTITY.md | CLA_ENTITY.md |
# Ziggurat Foundations
[![Build Status]](https://travis-ci.org/ergo/ziggurat_foundations) [![logo]](https://gitter.im/ergo/ziggurat_foundations)
**DOCUMENTATION**: http://readthedocs.org/docs/ziggurat-foundations/en/latest/
**BUG TRACKER**: https://github.com/ergo/ziggurat_foundations
High level mixins for adding authorization, resource ownership and permission management
fast, simple and easy. In summary, Ziggurat Foundations is a set of framework agnostic
set of SQLAalchemy classes, so it can be used with Flask, Pyramid or other popular frameworks.
It is the perfect solution for handling complex login and user
management systems, from e-commerce systems, to private intranets or large CMS systems.
It can easily be extended to support any additional features you may need (explained
further in the documentation)
Zigg has been used (at scale) for very large implementations (millions of real users) and
has been extended for custom applications such as geo-location applications that rely
on pin-point accuracy for a users location. Zigg has been designed to work for
high end environments, where the user(s) are at the main focus of the application
(for example Zigg could become the backbone for a social media style application).
The aim of this project is to supply set of generic models that cover the most
common needs in application development when it comes to authorization - using
flat and tree like data structures. We provide most commonly needed features in a "standard"
application, but provide them as mixins as we understand that every implementation
has its own use case and in doing so, extending the base models is very easy.
Zigg supplies extendable, robust and well tested models that include:
- User - base for user accounts
- Group - container for many users
- Resource - Arbitrary database entity that can represent various object hierarchies -
blogs, forums, cms documents, pages etc.
Zigg provides standard functions that let you:
- Assign arbitrary permissions directly to users (ie. access certain views)
- Assign users to groups
- Assign arbitrary permissions to groups
- Assign arbitrary resource permissions to users (ie. only user X can access private forum)
- Assign arbitrary resource permissions to groups
- Manage nested resources with tree service
- Assign a user o an external identity (such as facebook/twitter)
- Manage the sign in/sign out process
- Change users password and generate security codes
- Example root context factory for assigning permissions per request (framework integration)
Ziggurat Foundations is BSD Licensed
# Local development using docker
docker-compose run --rm app bash
cd ../application;
To run sqlite tests:
tox
To run postgres tests:
DB_STRING="postgresql://test:test@db:5432/test" DB=postgres tox
To run mysql tests:
DB_STRING="mysql+mysqldb://test:test@db_mysql/test" DB=mysql tox
[Build Status]: https://travis-ci.org/ergo/ziggurat_foundations.svg?branch=master
[logo]: https://badges.gitter.im/ergo/ziggurat_foundations.svg
| ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/README.md | README.md |
Contributor Agreement
=====================
The submitter agrees by adding his or her name within the section below named
"Contributors" and submitting the resulting modified document to the
canonical shared repository location for this software project (whether
directly, as a user with "direct commit access", or via a "pull request"), he
or she is signing a contract electronically. The submitter becomes a
Contributor after a) he or she signs this document by adding their name (and company name)
beneath the "Contributors" section below, and b) the resulting document is
accepted into the canonical version control repository.
(http://www.harmonyagreements.org)
CLA_INDIVIDUAL.md is the text for contributions from individuals.
CLA_ENTITY.md is the text for contributions from legal entities.
List of Contributors
====================
The below-signed are contributors to a code repository that is part of the
project. Each below-signed contributor has read,
understand and agrees to the terms above in the section within this document
entitled "Contributor Agreement" as of the date beside his or her name.
Current or previous core committers:
* Marcin Lulek (Ergo^) 2000-01-01
Contributors:
* Luke Crooks (crooksey) 2014-08-22
* Svintsov Dmitry (uralbash) 2014-12-24
* Marcin Kuzminski (marcinkuzminski) 2011-11-14
* Lukasz Fidosz (virhilo) 2011-01-01
* Arian Maykon de Araújo Diógenes (arianmaykon) 2016-04-19
* Edward Arghiroiu (themightyonyx) 2015-11-13
* Łukasz Bołdys (utek) 2013-11-22
* Christian Benke (peletiah) 2017-02-19
* René Jochum (pcdummy) 2018-02-13
* Francis Charette Migneault (fmigneault) 2019-02-07
| ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/AUTHORS.rst | AUTHORS.rst |
from __future__ import unicode_literals
import pkg_resources
from ziggurat_foundations.utils import ModelProxy, noop # noqa
__version__ = pkg_resources.get_distribution("ziggurat_foundations").parsed_version
def import_model_service_mappings():
from ziggurat_foundations.models.services.user import UserService
from ziggurat_foundations.models.services.group import GroupService
from ziggurat_foundations.models.services.group_permission import (
GroupPermissionService,
)
from ziggurat_foundations.models.services.user_permission import (
UserPermissionService,
)
from ziggurat_foundations.models.services.user_resource_permission import (
UserResourcePermissionService,
)
from ziggurat_foundations.models.services.group_resource_permission import (
GroupResourcePermissionService,
) # noqa
from ziggurat_foundations.models.services.resource import ResourceService
from ziggurat_foundations.models.services.resource_tree import ResourceTreeService
from ziggurat_foundations.models.services.external_identity import (
ExternalIdentityService,
)
return {
"User": [UserService],
"Group": [GroupService],
"GroupPermission": [GroupPermissionService],
"UserPermission": [UserPermissionService],
"UserResourcePermission": [UserResourcePermissionService],
"GroupResourcePermission": [GroupResourcePermissionService],
"Resource": [ResourceService, ResourceTreeService],
"ExternalIdentity": [ExternalIdentityService],
}
def make_passwordmanager(schemes=None):
"""
schemes contains a list of replace this list with the hash(es) you wish
to support.
this example sets pbkdf2_sha256 as the default,
with support for legacy bcrypt hashes.
:param schemes:
:return: CryptContext()
"""
from passlib.context import CryptContext
if not schemes:
schemes = ["pbkdf2_sha256", "bcrypt"]
pwd_context = CryptContext(schemes=schemes, deprecated="auto")
return pwd_context
def ziggurat_model_init(
user=None,
group=None,
user_group=None,
group_permission=None,
user_permission=None,
user_resource_permission=None,
group_resource_permission=None,
resource=None,
external_identity=None,
*args,
**kwargs
):
"""
This function handles attaching model to service if model has one specified
as `_ziggurat_service`, Also attached a proxy object holding all model
definitions that services might use
:param args:
:param kwargs:
:param passwordmanager, the password manager to override default one
:param passwordmanager_schemes, list of schemes for default
passwordmanager to use
:return:
"""
models = ModelProxy()
models.User = user
models.Group = group
models.UserGroup = user_group
models.GroupPermission = group_permission
models.UserPermission = user_permission
models.UserResourcePermission = user_resource_permission
models.GroupResourcePermission = group_resource_permission
models.Resource = resource
models.ExternalIdentity = external_identity
model_service_mapping = import_model_service_mappings()
if kwargs.get("passwordmanager"):
user.passwordmanager = kwargs["passwordmanager"]
else:
user.passwordmanager = make_passwordmanager(
kwargs.get("passwordmanager_schemes")
)
for name, cls in models.items():
# if model has a manager attached attached the class also to manager
services = model_service_mapping.get(name, [])
for service in services:
setattr(service, "model", cls)
setattr(service, "models_proxy", models) | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/__init__.py | __init__.py |
from collections import namedtuple
import sqlalchemy as sa
from .models.base import get_db_session
try:
try:
from pyramid.authorization import Allow, Deny, ALL_PERMISSIONS
except ImportError:
from pyramid.security import Allow, Deny, ALL_PERMISSIONS
except ImportError as e:
Allow = "Allow"
Deny = "Deny"
# borrowed directly from pyramid - to avoid dependency on pyramid itself
# source https://github.com/Pylons/pyramid/blob/master/pyramid/security.py
class AllPermissionsList(object):
""" Stand in 'permission list' to represent all permissions """
def __iter__(self):
return ()
def __contains__(self, other):
return True
def __eq__(self, other):
return isinstance(other, self.__class__)
ALL_PERMISSIONS = AllPermissionsList()
__all__ = [
"ANY_PERMISSION_CLS",
"ANY_PERMISSION",
"resource_permissions_for_users",
"permission_to_04_acls",
"permission_to_pyramid_acls",
]
class ANY_PERMISSION_CLS(object):
def __eq__(self, other):
return "__any_permission__" == other
def __ne__(self, other):
return "__any_permission__" != other
ANY_PERMISSION = ANY_PERMISSION_CLS()
PermissionTuple = namedtuple(
"PermissionTuple",
["user", "perm_name", "type", "group", "resource", "owner", "allowed"],
)
def resource_permissions_for_users(
models_proxy,
perm_names,
resource_ids=None,
user_ids=None,
group_ids=None,
resource_types=None,
limit_group_permissions=False,
skip_user_perms=False,
skip_group_perms=False,
db_session=None,
):
"""
Returns permission tuples that match one of passed permission names
perm_names - list of permissions that can be matched
user_ids - restrict to specific users
group_ids - restrict to specific groups
resource_ids - restrict to specific resources
limit_group_permissions - should be used if we do not want to have
user objects returned for group permissions, this might cause performance
issues for big groups
"""
db_session = get_db_session(db_session)
# fetch groups and their permissions (possibly with users belonging
# to group if needed)
query = db_session.query(
models_proxy.GroupResourcePermission.perm_name,
models_proxy.User,
models_proxy.Group,
sa.literal("group").label("type"),
models_proxy.Resource,
)
query = query.join(
models_proxy.Group,
models_proxy.Group.id == models_proxy.GroupResourcePermission.group_id,
)
query = query.join(
models_proxy.Resource,
models_proxy.Resource.resource_id
== models_proxy.GroupResourcePermission.resource_id,
)
if limit_group_permissions:
query = query.outerjoin(models_proxy.User, models_proxy.User.id == None) # noqa
else:
query = query.join(
models_proxy.UserGroup,
models_proxy.UserGroup.group_id
== models_proxy.GroupResourcePermission.group_id,
)
query = query.outerjoin(
models_proxy.User, models_proxy.User.id == models_proxy.UserGroup.user_id
)
if resource_ids:
query = query.filter(
models_proxy.GroupResourcePermission.resource_id.in_(resource_ids)
)
if resource_types:
query = query.filter(models_proxy.Resource.resource_type.in_(resource_types))
if perm_names not in ([ANY_PERMISSION], ANY_PERMISSION) and perm_names:
query = query.filter(
models_proxy.GroupResourcePermission.perm_name.in_(perm_names)
)
if group_ids:
query = query.filter(
models_proxy.GroupResourcePermission.group_id.in_(group_ids)
)
if user_ids and not limit_group_permissions:
query = query.filter(models_proxy.UserGroup.user_id.in_(user_ids))
# 2nd query that will fetch users with direct resource permissions
query2 = db_session.query(
models_proxy.UserResourcePermission.perm_name,
models_proxy.User,
models_proxy.Group,
sa.literal("user").label("type"),
models_proxy.Resource,
)
query2 = query2.join(
models_proxy.User,
models_proxy.User.id == models_proxy.UserResourcePermission.user_id,
)
query2 = query2.join(
models_proxy.Resource,
models_proxy.Resource.resource_id
== models_proxy.UserResourcePermission.resource_id,
)
# group needs to be present to work for union, but never actually matched
query2 = query2.outerjoin(models_proxy.Group, models_proxy.Group.id == None) # noqa
if perm_names not in ([ANY_PERMISSION], ANY_PERMISSION) and perm_names:
query2 = query2.filter(
models_proxy.UserResourcePermission.perm_name.in_(perm_names)
)
if resource_ids:
query2 = query2.filter(
models_proxy.UserResourcePermission.resource_id.in_(resource_ids)
)
if resource_types:
query2 = query2.filter(models_proxy.Resource.resource_type.in_(resource_types))
if user_ids:
query2 = query2.filter(
models_proxy.UserResourcePermission.user_id.in_(user_ids)
)
if not skip_group_perms and not skip_user_perms:
query = query.union(query2)
elif skip_group_perms:
query = query2
users = [
PermissionTuple(
row.User,
row.perm_name,
row.type,
row.Group or None,
row.Resource,
False,
True,
)
for row in query
]
return users
def permission_to_04_acls(permissions):
"""
Legacy acl format kept for bw. compatibility
:param permissions:
:return:
"""
acls = []
for perm in permissions:
if perm.type == "user":
acls.append((perm.user.id, perm.perm_name))
elif perm.type == "group":
acls.append(("group:%s" % perm.group.id, perm.perm_name))
return acls
def permission_to_pyramid_acls(permissions):
"""
Returns a list of permissions in a format understood by pyramid
:param permissions:
:return:
"""
acls = []
for perm in permissions:
if perm.type == "user":
acls.append((Allow, perm.user.id, perm.perm_name))
elif perm.type == "group":
acls.append((Allow, "group:%s" % perm.group.id, perm.perm_name))
return acls | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/permissions.py | permissions.py |
from __future__ import with_statement, unicode_literals
import os
from alembic import context
from sqlalchemy import create_engine
from sqlalchemy.schema import MetaData
from logging.config import fileConfig
# pylint: disable=no-member
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = MetaData(
naming_convention={
"ix": "ix_%(column_0_label)s",
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s",
}
)
def get_url():
url = os.getenv("DB_URL", "")
if url == "":
url = config.get_main_option("sqlalchemy.url")
return url
# target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = get_url()
context.configure(
url=url,
version_table="alembic_ziggurat_foundations_version",
transaction_per_migration=True,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = create_engine(get_url())
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata,
version_table="alembic_ziggurat_foundations_version",
transaction_per_migration=True,
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online() | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/migrations/env.py | env.py |
from __future__ import unicode_literals
import sqlalchemy as sa
from alembic import op
from alembic.context import get_context
from sqlalchemy.dialects.mysql.base import MySQLDialect
# revision identifiers, used by Alembic.
revision = "3cfc41c4a5f0"
down_revision = "53927300c277"
def upgrade():
c = get_context()
if isinstance(c.connection.engine.dialect, MySQLDialect):
insp = sa.inspect(c.connection.engine)
for t in [
"groups_permissions",
"groups_resources_permissions",
"users_groups",
"resources",
]:
for constraint in insp.get_foreign_keys(t):
if constraint["referred_columns"] == ["group_name"]:
op.drop_constraint(constraint["name"], t, type_="foreignkey")
op.drop_column("groups", "id")
op.alter_column(
"groups", "group_name", type_=sa.Unicode(128), existing_type=sa.Unicode(50)
)
op.create_primary_key("groups_pkey", "groups", columns=["group_name"])
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
None,
"groups_permissions",
"groups",
remote_cols=["group_name"],
local_cols=["group_name"],
onupdate="CASCADE",
ondelete="CASCADE",
)
op.create_foreign_key(
None,
"groups_resources_permissions",
"groups",
remote_cols=["group_name"],
local_cols=["group_name"],
onupdate="CASCADE",
ondelete="CASCADE",
)
op.create_foreign_key(
None,
"users_groups",
"groups",
remote_cols=["group_name"],
local_cols=["group_name"],
onupdate="CASCADE",
ondelete="CASCADE",
)
op.create_foreign_key(
None,
"resources",
"groups",
remote_cols=["group_name"],
local_cols=["owner_group_name"],
onupdate="CASCADE",
ondelete="SET NULL",
)
def downgrade():
pass | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/migrations/versions/3cfc41c4a5f0_groups_pkey_change.py | 3cfc41c4a5f0_groups_pkey_change.py |
from __future__ import unicode_literals
import sqlalchemy as sa
from alembic import op
# downgrade revision identifier, used by Alembic.
revision = "2bb1ba973f0b"
down_revision = None
def upgrade():
op.create_table(
"groups",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("group_name", sa.Unicode(50), unique=True),
sa.Column("description", sa.Text()),
sa.Column("member_count", sa.Integer, nullable=False, default=0),
)
op.create_table(
"groups_permissions",
sa.Column(
"group_name",
sa.Unicode(50),
sa.ForeignKey("groups.group_name", onupdate="CASCADE", ondelete="CASCADE"),
primary_key=True,
),
sa.Column("perm_name", sa.Unicode(30), primary_key=True),
)
op.create_table(
"users",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("user_name", sa.Unicode(30), unique=True),
sa.Column("user_password", sa.Unicode(40)),
sa.Column("email", sa.Unicode(100), nullable=False, unique=True),
sa.Column("status", sa.SmallInteger(), nullable=False),
sa.Column("security_code", sa.Unicode(40), default="default"),
sa.Column(
"last_login_date",
sa.TIMESTAMP(timezone=False),
default=sa.sql.func.now(),
server_default=sa.func.now(),
),
)
op.create_table(
"users_permissions",
sa.Column(
"user_name",
sa.Unicode(50),
sa.ForeignKey("users.user_name", onupdate="CASCADE", ondelete="CASCADE"),
primary_key=True,
),
sa.Column("perm_name", sa.Unicode(30), primary_key=True),
)
op.create_table(
"users_groups",
sa.Column(
"group_name",
sa.Unicode(50),
sa.ForeignKey("groups.group_name", onupdate="CASCADE", ondelete="CASCADE"),
primary_key=True,
),
sa.Column(
"user_name",
sa.Unicode(30),
sa.ForeignKey("users.user_name", onupdate="CASCADE", ondelete="CASCADE"),
primary_key=True,
),
)
op.create_table(
"resources",
sa.Column(
"resource_id",
sa.BigInteger(),
primary_key=True,
nullable=False,
autoincrement=True,
),
sa.Column("resource_name", sa.Unicode(100), nullable=False),
sa.Column("resource_type", sa.Unicode(30), nullable=False),
sa.Column(
"owner_group_name",
sa.Unicode(50),
sa.ForeignKey("groups.group_name", onupdate="CASCADE", ondelete="SET NULL"),
),
sa.Column(
"owner_user_name",
sa.Unicode(30),
sa.ForeignKey("users.user_name", onupdate="CASCADE", ondelete="SET NULL"),
),
)
op.create_table(
"groups_resources_permissions",
sa.Column(
"group_name",
sa.Unicode(50),
sa.ForeignKey("groups.group_name", onupdate="CASCADE", ondelete="CASCADE"),
primary_key=True,
),
sa.Column(
"resource_id",
sa.BigInteger(),
sa.ForeignKey(
"resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"
),
primary_key=True,
autoincrement=False,
),
sa.Column("perm_name", sa.Unicode(50), primary_key=True),
)
op.create_table(
"users_resources_permissions",
sa.Column(
"user_name",
sa.Unicode(50),
sa.ForeignKey("users.user_name", onupdate="CASCADE", ondelete="CASCADE"),
primary_key=True,
),
sa.Column(
"resource_id",
sa.BigInteger(),
sa.ForeignKey(
"resources.resource_id", onupdate="CASCADE", ondelete="CASCADE"
),
primary_key=True,
autoincrement=False,
),
sa.Column("perm_name", sa.Unicode(50), primary_key=True),
)
def downgrade():
pass | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/migrations/versions/2bb1ba973f0b_initial_structure.py | 2bb1ba973f0b_initial_structure.py |
from __future__ import unicode_literals
from alembic import op
from alembic.context import get_context
from sqlalchemy.dialects.postgresql.base import PGDialect
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "438c27ec1c9"
down_revision = "439766f6104d"
# correct keys for pre 0.5.6 naming convention
def upgrade():
c = get_context()
insp = sa.inspect(c.connection.engine)
# existing migration
# pre naming convention keys
groups_permissions_pkey = "groups_permissions_pkey"
groups_pkey = "groups_pkey"
groups_resources_permissions_pkey = "groups_resources_permissions_pkey"
users_groups_pkey = "users_groups_pkey"
users_permissions_pkey = "users_permissions_pkey"
users_resources_permissions_pkey = "users_resources_permissions_pkey"
if isinstance(c.connection.engine.dialect, PGDialect):
op.execute(
"ALTER INDEX groups_unique_group_name_key RENAME to ix_groups_uq_group_name_key"
) # noqa
op.drop_constraint("groups_permissions_perm_name_check", "groups_permissions")
op.execute(
"""
ALTER TABLE groups_permissions
ADD CONSTRAINT ck_groups_permissions_perm_name CHECK (perm_name::text = lower(perm_name::text));
"""
) # noqa
op.drop_constraint(
"groups_resources_permissions_perm_name_check",
"groups_resources_permissions",
)
op.execute(
"""
ALTER TABLE groups_resources_permissions
ADD CONSTRAINT ck_groups_resources_permissions_perm_name CHECK (perm_name::text = lower(perm_name::text));
"""
) # noqa
op.drop_constraint("user_permissions_perm_name_check", "users_permissions")
op.execute(
"""
ALTER TABLE users_permissions
ADD CONSTRAINT ck_user_permissions_perm_name CHECK (perm_name::text = lower(perm_name::text));
"""
) # noqa
op.drop_constraint(
"users_resources_permissions_perm_name_check", "users_resources_permissions"
)
op.execute(
"""
ALTER TABLE users_resources_permissions
ADD CONSTRAINT ck_users_resources_permissions_perm_name CHECK (perm_name::text = lower(perm_name::text));
"""
) # noqa
op.execute("ALTER INDEX users_email_key2 RENAME to ix_users_uq_lower_email")
op.execute(
"ALTER INDEX users_username_uq2 RENAME to ix_users_ux_lower_username"
) # noqa
if (
groups_permissions_pkey
== insp.get_pk_constraint("groups_permissions")["name"]
):
op.execute(
"ALTER INDEX groups_permissions_pkey RENAME to pk_groups_permissions"
) # noqa
if groups_pkey == insp.get_pk_constraint("groups")["name"]:
op.execute("ALTER INDEX groups_pkey RENAME to pk_groups")
if (
groups_resources_permissions_pkey
== insp.get_pk_constraint("groups_resources_permissions")["name"]
):
op.execute(
"ALTER INDEX groups_resources_permissions_pkey RENAME to pk_groups_resources_permissions"
) # noqa
if users_groups_pkey == insp.get_pk_constraint("users_groups")["name"]:
op.execute("ALTER INDEX users_groups_pkey RENAME to pk_users_groups")
if (
users_permissions_pkey
== insp.get_pk_constraint("users_permissions")["name"]
):
op.execute(
"ALTER INDEX users_permissions_pkey RENAME to pk_users_permissions"
) # noqa
if (
users_resources_permissions_pkey
== insp.get_pk_constraint("users_resources_permissions")["name"]
):
op.execute(
"ALTER INDEX users_resources_permissions_pkey RENAME to pk_users_resources_permissions"
) # noqa
if (
"external_identities_pkey"
== insp.get_pk_constraint("external_identities")["name"]
):
op.execute(
"ALTER INDEX external_identities_pkey RENAME to pk_external_identities"
) # noqa
if "external_identities_local_user_name_fkey" in [
c["name"] for c in insp.get_foreign_keys("external_identities") # noqa
]:
op.drop_constraint(
"external_identities_local_user_name_fkey",
"external_identities",
type_="foreignkey",
)
op.create_foreign_key(
None,
"external_identities",
"users",
remote_cols=["user_name"],
local_cols=["local_user_name"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if "groups_permissions_group_id_fkey" in [
c["name"] for c in insp.get_foreign_keys("groups_permissions")
]:
op.drop_constraint(
"groups_permissions_group_id_fkey",
"groups_permissions",
type_="foreignkey",
)
op.create_foreign_key(
None,
"groups_permissions",
"groups",
remote_cols=["id"],
local_cols=["group_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if "groups_group_name_key" in [
c["name"] for c in insp.get_unique_constraints("groups")
]:
op.execute(
"ALTER INDEX groups_group_name_key RENAME to uq_groups_group_name"
) # noqa
if "groups_resources_permissions_group_id_fkey" in [
c["name"]
for c in insp.get_foreign_keys("groups_resources_permissions") # noqa
]:
op.drop_constraint(
"groups_resources_permissions_group_id_fkey",
"groups_resources_permissions",
type_="foreignkey",
)
op.create_foreign_key(
None,
"groups_resources_permissions",
"groups",
remote_cols=["id"],
local_cols=["group_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if "groups_resources_permissions_resource_id_fkey" in [
c["name"]
for c in insp.get_foreign_keys("groups_resources_permissions") # noqa
]:
op.drop_constraint(
"groups_resources_permissions_resource_id_fkey",
"groups_resources_permissions",
type_="foreignkey",
)
op.create_foreign_key(
None,
"groups_resources_permissions",
"resources",
remote_cols=["resource_id"],
local_cols=["resource_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if "resources_pkey" == insp.get_pk_constraint("resources")["name"]:
op.execute("ALTER INDEX resources_pkey RENAME to pk_resources")
if "resources_owner_group_id_fkey" in [
c["name"] for c in insp.get_foreign_keys("resources")
]:
op.drop_constraint(
"resources_owner_group_id_fkey", "resources", type_="foreignkey"
)
op.create_foreign_key(
None,
"resources",
"groups",
remote_cols=["id"],
local_cols=["owner_group_id"],
onupdate="CASCADE",
ondelete="SET NULL",
)
if "resources_owner_user_id_fkey" in [
c["name"] for c in insp.get_foreign_keys("resources")
]:
op.drop_constraint(
"resources_owner_user_id_fkey", "resources", type_="foreignkey"
)
op.create_foreign_key(
None,
"resources",
"users",
remote_cols=["id"],
local_cols=["owner_user_id"],
onupdate="CASCADE",
ondelete="SET NULL",
)
if "resources_parent_id_fkey" in [
c["name"] for c in insp.get_foreign_keys("resources")
]:
op.drop_constraint(
"resources_parent_id_fkey", "resources", type_="foreignkey"
)
op.create_foreign_key(
None,
"resources",
"resources",
remote_cols=["resource_id"],
local_cols=["parent_id"],
onupdate="CASCADE",
ondelete="SET NULL",
)
if "users_pkey" == insp.get_pk_constraint("users")["name"]:
op.execute("ALTER INDEX users_pkey RENAME to pk_users")
if "users_email_key" in [
c["name"] for c in insp.get_unique_constraints("users")
]:
op.execute("ALTER INDEX users_email_key RENAME to uq_users_email")
if "users_user_name_key" in [
c["name"] for c in insp.get_unique_constraints("users")
]:
op.execute("ALTER INDEX users_user_name_key RENAME to uq_users_user_name")
if "users_groups_group_id_fkey" in [
c["name"] for c in insp.get_foreign_keys("users_groups")
]:
op.drop_constraint(
"users_groups_group_id_fkey", "users_groups", type_="foreignkey"
)
op.create_foreign_key(
None,
"users_groups",
"groups",
remote_cols=["id"],
local_cols=["group_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if "users_groups_user_id_fkey" in [
c["name"] for c in insp.get_foreign_keys("users_groups")
]:
op.drop_constraint(
"users_groups_user_id_fkey", "users_groups", type_="foreignkey"
)
op.create_foreign_key(
None,
"users_groups",
"users",
remote_cols=["id"],
local_cols=["user_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if "users_permissions_user_id_fkey" in [
c["name"] for c in insp.get_foreign_keys("users_permissions")
]:
op.drop_constraint(
"users_permissions_user_id_fkey",
"users_permissions",
type_="foreignkey",
)
op.create_foreign_key(
None,
"users_permissions",
"users",
remote_cols=["id"],
local_cols=["user_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if "users_resources_permissions_resource_id_fkey" in [
c["name"]
for c in insp.get_foreign_keys( # noqa # noqa
"users_resources_permissions"
)
]:
op.drop_constraint(
"users_resources_permissions_resource_id_fkey",
"users_resources_permissions",
type_="foreignkey",
)
op.create_foreign_key(
None,
"users_resources_permissions",
"resources",
remote_cols=["resource_id"],
local_cols=["resource_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if "users_resources_permissions_user_id_fkey" in [
c["name"]
for c in insp.get_foreign_keys("users_resources_permissions") # noqa
]:
op.drop_constraint(
"users_resources_permissions_user_id_fkey",
"users_resources_permissions",
type_="foreignkey",
)
op.create_foreign_key(
None,
"users_resources_permissions",
"users",
remote_cols=["id"],
local_cols=["user_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
def downgrade():
pass | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/migrations/versions/438c27ec1c9_normalize_constraint_and_key_names.py | 438c27ec1c9_normalize_constraint_and_key_names.py |
from __future__ import unicode_literals
import sqlalchemy as sa
from alembic import op
from alembic.context import get_context
from sqlalchemy.dialects.mysql.base import MySQLDialect
# revision identifiers, used by Alembic.
revision = "20671b28c538"
down_revision = "4c10d97c509"
def upgrade():
c = get_context()
insp = sa.inspect(c.connection.engine)
# existing migration
# pre naming convention keys
groups_permissions_pkey = "groups_permissions_pkey"
groups_pkey = "groups_pkey"
groups_resources_permissions_pkey = "groups_resources_permissions_pkey"
users_groups_pkey = "users_groups_pkey"
users_permissions_pkey = "users_permissions_pkey"
users_resources_permissions_pkey = "users_resources_permissions_pkey"
# inspected keys
groups_permissions_pkey = insp.get_pk_constraint("groups_permissions")["name"]
groups_pkey = insp.get_pk_constraint("groups")["name"]
groups_resources_permissions_pkey = insp.get_pk_constraint(
"groups_resources_permissions"
)["name"]
users_groups_pkey = insp.get_pk_constraint("users_groups")["name"]
users_permissions_pkey = insp.get_pk_constraint("users_permissions")["name"]
users_resources_permissions_pkey = insp.get_pk_constraint(
"users_resources_permissions"
)["name"]
op.drop_constraint("groups_pkey", "groups", type_="primary")
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column(
"groups", sa.Column("id", sa.Integer, primary_key=True, autoincrement=False)
)
op.create_primary_key(groups_pkey, "groups", columns=["id"])
op.alter_column(
"groups",
"id",
type_=sa.Integer,
existing_type=sa.Integer,
autoincrement=True,
existing_autoincrement=False,
nullable=False,
)
else:
op.add_column(
"groups", sa.Column("id", sa.Integer, primary_key=True, autoincrement=True)
)
op.create_primary_key(groups_pkey, "groups", columns=["id"])
if isinstance(c.connection.engine.dialect, MySQLDialect):
for t in ["groups_permissions", "groups_resources_permissions", "users_groups"]:
for constraint in insp.get_foreign_keys(t):
if constraint["referred_columns"] == ["group_name"]:
op.drop_constraint(constraint["name"], t, type_="foreignkey")
for t in ["users_resources_permissions", "users_permissions", "users_groups"]:
for constraint in insp.get_foreign_keys(t):
if constraint["referred_columns"] == ["user_name"]:
op.drop_constraint(constraint["name"], t, type_="foreignkey")
for constraint in insp.get_foreign_keys("resources"):
if constraint["referred_columns"] in [["user_name"], ["group_name"]]:
op.drop_constraint(constraint["name"], "resources", type_="foreignkey")
op.add_column(
"resources",
sa.Column(
"owner_user_id",
sa.Integer(),
sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="SET NULL"),
),
)
op.add_column(
"resources",
sa.Column(
"owner_group_id",
sa.Integer(),
sa.ForeignKey("groups.id", onupdate="CASCADE", ondelete="SET NULL"),
),
)
# update the data
resources_table = sa.Table(
"resources", sa.MetaData(), autoload=True, autoload_with=c.connection
)
users_table = sa.Table(
"users", sa.MetaData(), autoload=True, autoload_with=c.connection
)
groups_table = sa.Table(
"groups", sa.MetaData(), autoload=True, autoload_with=c.connection
)
stmt = (
resources_table.update()
.values(owner_user_id=users_table.c.id)
.where(users_table.c.user_name == resources_table.c.owner_user_name)
)
op.execute(stmt)
stmt = (
resources_table.update()
.values(owner_group_id=groups_table.c.id)
.where(groups_table.c.group_name == resources_table.c.owner_group_name)
)
op.execute(stmt)
# mysql is stupid as usual so we cant create FKEY and add PKEY later,
# need to set PKEY first and then set FKEY
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column("groups_permissions", sa.Column("group_id", sa.Integer()))
else:
op.add_column(
"groups_permissions",
sa.Column(
"group_id",
sa.Integer(),
sa.ForeignKey(
"groups.id", onupdate="CASCADE", ondelete="CASCADE" # noqa # noqa
),
),
) # noqa
groups_permissions_table = sa.Table(
"groups_permissions", sa.MetaData(), autoload=True, autoload_with=c.connection
)
stmt = (
groups_permissions_table.update()
.values(group_id=groups_table.c.id)
.where(groups_table.c.group_name == groups_permissions_table.c.group_name)
)
op.execute(stmt)
op.drop_constraint(groups_permissions_pkey, "groups_permissions", type_="primary")
op.create_primary_key(
groups_permissions_pkey, "groups_permissions", columns=["group_id", "perm_name"]
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
None,
"groups_permissions",
"groups",
remote_cols=["id"],
local_cols=["group_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column(
"groups_resources_permissions", sa.Column("group_id", sa.Integer())
)
else:
op.add_column(
"groups_resources_permissions",
sa.Column(
"group_id",
sa.Integer(),
sa.ForeignKey("groups.id", onupdate="CASCADE", ondelete="CASCADE"),
),
)
groups_resources_permissions_table = sa.Table(
"groups_resources_permissions",
sa.MetaData(),
autoload=True,
autoload_with=c.connection,
)
stmt = (
groups_resources_permissions_table.update()
.values(group_id=groups_table.c.id)
.where(
groups_table.c.group_name == groups_resources_permissions_table.c.group_name
)
)
op.execute(stmt)
op.drop_constraint(
groups_resources_permissions_pkey,
"groups_resources_permissions",
type_="primary",
)
op.create_primary_key(
groups_resources_permissions_pkey,
"groups_resources_permissions",
columns=["group_id", "resource_id", "perm_name"],
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
None,
"groups_resources_permissions",
"groups",
remote_cols=["id"],
local_cols=["group_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column("users_groups", sa.Column("group_id", sa.Integer()))
else:
op.add_column(
"users_groups",
sa.Column(
"group_id",
sa.Integer(),
sa.ForeignKey(
"groups.id", onupdate="CASCADE", ondelete="CASCADE" # noqa
),
),
) # noqa
users_groups_table = sa.Table(
"users_groups", sa.MetaData(), autoload=True, autoload_with=c.connection
)
stmt = (
users_groups_table.update()
.values(group_id=groups_table.c.id)
.where(groups_table.c.group_name == users_groups_table.c.group_name)
)
op.execute(stmt)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column("users_groups", sa.Column("user_id", sa.Integer()))
else:
op.add_column(
"users_groups",
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey(
"users.id", onupdate="CASCADE", ondelete="CASCADE" # noqa
),
),
) # noqa
users_groups_table = sa.Table(
"users_groups", sa.MetaData(), autoload=True, autoload_with=c.connection
)
stmt = (
users_groups_table.update()
.values(user_id=users_table.c.id)
.where(users_table.c.user_name == users_groups_table.c.user_name)
)
op.execute(stmt)
op.drop_constraint(users_groups_pkey, "users_groups", type_="primary")
op.create_primary_key(
users_groups_pkey, "users_groups", columns=["user_id", "group_id"]
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
None,
"users_groups",
"groups",
remote_cols=["id"],
local_cols=["group_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
op.create_foreign_key(
None,
"users_groups",
"users",
remote_cols=["id"],
local_cols=["user_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column("users_permissions", sa.Column("user_id", sa.Integer()))
else:
op.add_column(
"users_permissions",
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey(
"users.id", onupdate="CASCADE", ondelete="CASCADE" # noqa
),
),
) # noqa
users_permissions_table = sa.Table(
"users_permissions", sa.MetaData(), autoload=True, autoload_with=c.connection
)
stmt = (
users_permissions_table.update()
.values(user_id=users_table.c.id)
.where(users_table.c.user_name == users_permissions_table.c.user_name)
)
op.execute(stmt)
op.drop_constraint(users_permissions_pkey, "users_permissions", type_="primary")
op.create_primary_key(
users_permissions_pkey, "users_permissions", columns=["user_id", "perm_name"]
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
None,
"users_permissions",
"users",
remote_cols=["id"],
local_cols=["user_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.add_column("users_resources_permissions", sa.Column("user_id", sa.Integer()))
else:
op.add_column(
"users_resources_permissions",
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"),
),
)
users_resources_permissions_table = sa.Table(
"users_resources_permissions",
sa.MetaData(),
autoload=True,
autoload_with=c.connection,
)
stmt = (
users_resources_permissions_table.update()
.values(user_id=users_table.c.id)
.where(users_table.c.user_name == users_resources_permissions_table.c.user_name)
)
op.execute(stmt)
op.drop_constraint(
users_resources_permissions_pkey, "users_resources_permissions", type_="primary"
)
op.create_primary_key(
users_resources_permissions_pkey,
"users_resources_permissions",
columns=["user_id", "resource_id", "perm_name"],
)
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
None,
"users_resources_permissions",
"users",
remote_cols=["id"],
local_cols=["user_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
op.drop_column("resources", "owner_user_name")
op.drop_column("resources", "owner_group_name")
op.drop_column("groups_permissions", "group_name")
op.drop_column("groups_resources_permissions", "group_name")
op.drop_column("users_resources_permissions", "user_name")
op.drop_column("users_groups", "group_name")
op.drop_column("users_groups", "user_name")
op.drop_column("users_permissions", "user_name")
def downgrade():
pass | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/migrations/versions/20671b28c538_change_all_linking_k.py | 20671b28c538_change_all_linking_k.py |
from __future__ import unicode_literals
import sqlalchemy as sa
from alembic import op
from alembic.context import get_context
from sqlalchemy.dialects.mysql.base import MySQLDialect
# revision identifiers, used by Alembic.
revision = "53927300c277"
down_revision = "54d08f9adc8c"
def upgrade():
c = get_context()
# drop foreign keys for mysql
kwargs = {}
if isinstance(c.connection.engine.dialect, MySQLDialect):
kwargs["autoincrement"] = True
insp = sa.inspect(c.connection.engine)
for t in [
"groups_resources_permissions",
"users_resources_permissions",
"resources",
]:
for constraint in insp.get_foreign_keys(t):
if constraint["referred_columns"] == ["resource_id"]:
op.drop_constraint(constraint["name"], t, type_="foreignkey")
op.alter_column(
"resources",
"resource_id",
type_=sa.Integer(),
existing_type=sa.BigInteger(),
nullable=False,
**kwargs
)
op.alter_column(
"resources", "parent_id", type_=sa.Integer(), existing_type=sa.BigInteger()
)
op.alter_column(
"users_resources_permissions",
"resource_id",
type_=sa.Integer(),
existing_type=sa.BigInteger(),
nullable=False,
)
op.alter_column(
"groups_resources_permissions",
"resource_id",
type_=sa.Integer(),
existing_type=sa.BigInteger(),
nullable=False,
)
# recreate foreign keys for mysql
if isinstance(c.connection.engine.dialect, MySQLDialect):
op.create_foreign_key(
"groups_resources_permissions_resource_fk",
"groups_resources_permissions",
"resources",
["resource_id"],
["resource_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
op.create_foreign_key(
"users_resources_permissions_fk",
"users_resources_permissions",
"resources",
["resource_id"],
["resource_id"],
onupdate="CASCADE",
ondelete="CASCADE",
)
def downgrade():
pass | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/migrations/versions/53927300c277_change_primary_key_s.py | 53927300c277_change_primary_key_s.py |
from __future__ import unicode_literals
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declared_attr
from ziggurat_foundations.exc import ZigguratException
from ziggurat_foundations.models.base import BaseModel
__all__ = ["ResourceMixin"]
class ResourceMixin(BaseModel):
"""
Mixin for Resource model
"""
__possible_permissions__ = ()
@declared_attr
def __tablename__(self):
return "resources"
@declared_attr
def resource_id(self):
return sa.Column(
sa.Integer(), primary_key=True, nullable=False, autoincrement=True
)
@declared_attr
def parent_id(self):
return sa.Column(
sa.Integer(),
sa.ForeignKey(
"resources.resource_id", onupdate="CASCADE", ondelete="SET NULL"
),
)
@declared_attr
def ordering(self):
return sa.Column(sa.Integer(), default=0, nullable=False)
@declared_attr
def resource_name(self):
return sa.Column(sa.Unicode(100), nullable=False)
@declared_attr
def resource_type(self):
return sa.Column(sa.Unicode(30), nullable=False)
@declared_attr
def owner_group_id(self):
return sa.Column(
sa.Integer,
sa.ForeignKey("groups.id", onupdate="CASCADE", ondelete="SET NULL"),
index=True,
)
@declared_attr
def owner_user_id(self):
return sa.Column(
sa.Integer,
sa.ForeignKey("users.id", onupdate="CASCADE", ondelete="SET NULL"),
index=True,
)
@declared_attr
def group_permissions(self):
""" returns all group permissions for this resource"""
return sa.orm.relationship(
"GroupResourcePermission",
cascade="all, delete-orphan",
passive_deletes=True,
passive_updates=True,
)
@declared_attr
def user_permissions(self):
""" returns all user permissions for this resource"""
return sa.orm.relationship(
"UserResourcePermission",
cascade="all, delete-orphan",
passive_deletes=True,
passive_updates=True,
)
@declared_attr
def groups(self):
""" returns all groups that have permissions for this resource"""
return sa.orm.relationship(
"Group",
secondary="groups_resources_permissions",
passive_deletes=True,
passive_updates=True,
overlaps="groups,resource_permissions,group_permissions",
)
@declared_attr
def users(self):
""" returns all users that have permissions for this resource"""
return sa.orm.relationship(
"User",
secondary="users_resources_permissions",
passive_deletes=True,
passive_updates=True,
overlaps="user_permissions"
)
__mapper_args__ = {"polymorphic_on": resource_type}
__table_args__ = {"mysql_engine": "InnoDB", "mysql_charset": "utf8"}
def __repr__(self):
return "<Resource: %s, %s, id: %s position: %s>" % (
self.resource_type,
self.resource_name,
self.resource_id,
self.ordering,
)
@property
def __acl__(self):
raise ZigguratException("Model should implement __acl__")
@sa.orm.validates("user_permissions", "group_permissions")
def validate_permission(self, key, permission):
""" validate if resource can have specific permission """
if permission.perm_name not in self.__possible_permissions__:
raise AssertionError(
"perm_name is not one of {}".format(self.__possible_permissions__)
)
return permission | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/models/resource.py | resource.py |
from __future__ import unicode_literals
import sqlalchemy as sa
from ziggurat_foundations.exc import ZigguratSessionException
class BaseModel(object):
""" Basic class that all other classes inherit from that supplies some
basic methods useful for interaction with packages like:
deform, colander or wtforms """
@classmethod
def _get_keys(cls):
""" returns column names for this model """
return sa.orm.class_mapper(cls).c.keys()
@classmethod
def get_primary_key(cls):
return sa.orm.class_mapper(cls).primary_key
def get_dict(self, exclude_keys=None, include_keys=None):
"""
return dictionary of keys and values corresponding to this model's
data - if include_keys is null the function will return all keys
:param exclude_keys: (optional) is a list of columns from model that
should not be returned by this function
:param include_keys: (optional) is a list of columns from model that
should be returned by this function
:return:
"""
d = {}
exclude_keys_list = exclude_keys or []
include_keys_list = include_keys or []
for k in self._get_keys():
if k not in exclude_keys_list and (
k in include_keys_list or not include_keys
):
d[k] = getattr(self, k)
return d
def get_appstruct(self):
""" return list of tuples keys and values corresponding to this model's
data """
result = []
for k in self._get_keys():
result.append((k, getattr(self, k)))
return result
def populate_obj(self, appstruct, exclude_keys=None, include_keys=None):
"""
updates instance properties *for column names that exist*
for this model and are keys present in passed dictionary
:param appstruct: (dictionary)
:param exclude_keys: (optional) is a list of columns from model that
should not be updated by this function
:param include_keys: (optional) is a list of columns from model that
should be updated by this function
:return:
"""
exclude_keys_list = exclude_keys or []
include_keys_list = include_keys or []
for k in self._get_keys():
if (
k in appstruct
and k not in exclude_keys_list
and (k in include_keys_list or not include_keys)
):
setattr(self, k, appstruct[k])
def populate_obj_from_obj(self, instance, exclude_keys=None, include_keys=None):
"""
updates instance properties *for column names that exist*
for this model and are properties present in passed dictionary
:param instance:
:param exclude_keys: (optional) is a list of columns from model that
should not be updated by this function
:param include_keys: (optional) is a list of columns from model that
should be updated by this function
:return:
"""
exclude_keys_list = exclude_keys or []
include_keys_list = include_keys or []
for k in self._get_keys():
if (
hasattr(instance, k)
and k not in exclude_keys_list
and (k in include_keys_list or not include_keys)
):
setattr(self, k, getattr(instance, k))
def get_db_session(self, session=None):
"""
Attempts to return session via get_db_session utility function
:meth:`~ziggurat_foundations.models.get_db_session`
:param session:
:return:
"""
return get_db_session(session, self)
def persist(self, flush=False, db_session=None):
"""
Adds object to session, if the object was freshly created this will
persist the object in the storage on commit
:param flush: boolean - if true then the session will be flushed
instantly
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
db_session.add(self)
if flush:
db_session.flush()
def delete(self, db_session=None):
"""
Deletes the object via session, this will permanently delete the
object from storage on commit
:param db_session:
:return:
"""
db_session = get_db_session(db_session, self)
db_session.delete(self)
def get_db_session(session=None, obj=None):
"""
utility function that attempts to return sqlalchemy session that could
have been created/passed in one of few ways:
* It first tries to read session attached to instance
if object argument was passed
* then it tries to return session passed as argument
* finally tries to read pylons-like threadlocal called DBSession
* if this fails exception is thrown
:param session:
:param obj:
:return:
"""
# try to read the session from instance
from ziggurat_foundations import models
if obj:
return sa.orm.session.object_session(obj)
# try passed session
elif session:
return session
# try global pylons-like session then
elif models.DBSession:
return models.DBSession
raise ZigguratSessionException("No Session found") | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/models/base.py | base.py |
from __future__ import unicode_literals
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declared_attr
from ziggurat_foundations.models.base import BaseModel
__all__ = ["GroupMixin"]
class GroupMixin(BaseModel):
""" Mixin for Group model """
__table_args__ = {"mysql_engine": "InnoDB", "mysql_charset": "utf8"}
@declared_attr
def __tablename__(self):
return "groups"
# lists app wide permissions we might want to assign to groups
__possible_permissions__ = ()
@declared_attr
def id(self):
return sa.Column(sa.Integer(), primary_key=True)
@declared_attr
def group_name(self):
return sa.Column(sa.Unicode(128), nullable=False, unique=True)
@declared_attr
def description(self):
return sa.Column(sa.Text())
@declared_attr
def member_count(self):
return sa.Column(sa.Integer, nullable=False, default=0)
@declared_attr
def users(self):
""" relationship for users belonging to this group"""
return sa.orm.relationship(
"User",
secondary="users_groups",
order_by="User.user_name",
passive_deletes=True,
passive_updates=True,
backref="groups",
)
# dynamic property - useful
@declared_attr
def users_dynamic(self):
""" dynamic relationship for users belonging to this group
one can use filter """
return sa.orm.relationship(
"User", secondary="users_groups", order_by="User.user_name", lazy="dynamic", overlaps="groups,users"
)
@declared_attr
def permissions(self):
""" non-resource permissions assigned to this group"""
return sa.orm.relationship(
"GroupPermission",
backref="groups",
cascade="all, delete-orphan",
passive_deletes=True,
passive_updates=True,
)
@declared_attr
def resource_permissions(self):
""" permissions to specific resources this group has"""
return sa.orm.relationship(
"GroupResourcePermission",
backref="groups",
cascade="all, delete-orphan",
passive_deletes=True,
passive_updates=True,
)
@declared_attr
def resources(self):
""" Returns all resources directly owned by group, can be used to assign
ownership of new resources::
user.resources.append(resource) """
return sa.orm.relationship(
"Resource",
cascade="all",
passive_deletes=True,
passive_updates=True,
backref="owner_group",
)
@declared_attr
def resources_dynamic(self):
""" Returns all resources directly owned by group, can be used to assign
ownership of new resources::
user.resources.append(resource) """
return sa.orm.relationship(
"Resource",
cascade="all",
passive_deletes=True,
passive_updates=True,
lazy="dynamic",
overlaps="owner_group,resources"
)
@sa.orm.validates("permissions")
def validate_permission(self, key, permission):
""" validates if group can get assigned with permission"""
if permission.perm_name not in self.__possible_permissions__:
raise AssertionError(
"perm_name is not one of {}".format(self.__possible_permissions__)
)
return permission
def __repr__(self):
return "<Group: %s, %s>" % (self.group_name, self.id) | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/models/group.py | group.py |
from __future__ import unicode_literals
from datetime import datetime
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declared_attr
from ziggurat_foundations.models.base import BaseModel
__all__ = ["UserMixin"]
class UserMixin(BaseModel):
""" Base mixin for User object representation.
It supplies all the basic functionality from password hash generation
and matching to utility methods used for querying database for users
and their permissions or resources they have access to. It is meant
to be extended with other application specific properties"""
__mapper_args__ = {}
__table_args__ = {"mysql_engine": "InnoDB", "mysql_charset": "utf8"}
@declared_attr
def __tablename__(self):
return "users"
@declared_attr
def id(self):
""" Unique identifier of user object"""
return sa.Column(sa.Integer, primary_key=True, autoincrement=True)
@declared_attr
def user_name(self):
""" Unique user name user object"""
return sa.Column(sa.Unicode(128), unique=True)
@declared_attr
def user_password(self):
""" Password hash for user object """
return sa.Column(sa.Unicode(256))
@declared_attr
def email(self):
""" Email for user object """
return sa.Column(sa.Unicode(100), nullable=False, unique=True)
@declared_attr
def status(self):
""" Status of user object """
return sa.Column(sa.SmallInteger(), nullable=False, default=1)
@declared_attr
def security_code(self):
""" Security code user object (can be used for password reset etc. """
return sa.Column(sa.Unicode(256), default="default")
@declared_attr
def last_login_date(self):
""" Date of user's last login """
return sa.Column(
sa.TIMESTAMP(timezone=False),
default=lambda x: datetime.utcnow(),
server_default=sa.func.now(),
)
@declared_attr
def registered_date(self):
""" Date of user's registration """
return sa.Column(
sa.TIMESTAMP(timezone=False),
default=lambda x: datetime.utcnow(),
server_default=sa.func.now(),
)
@declared_attr
def security_code_date(self):
""" Date of user's security code update """
return sa.Column(
sa.TIMESTAMP(timezone=False),
default=datetime(2000, 1, 1),
server_default="2000-01-01 01:01",
)
def __repr__(self):
return "<User: %s>" % self.user_name
@declared_attr
def groups_dynamic(self):
""" returns dynamic relationship for groups - allowing for
filtering of data """
return sa.orm.relationship(
"Group",
secondary="users_groups",
lazy="dynamic",
passive_deletes=True,
passive_updates=True,
overlaps="groups,users,users_dynamic"
)
@declared_attr
def user_permissions(self):
"""
returns all direct non-resource permissions for this user,
allows to assign new permissions to user::
user.user_permissions.append(resource)
"""
return sa.orm.relationship(
"UserPermission",
cascade="all, delete-orphan",
passive_deletes=True,
passive_updates=True,
)
@declared_attr
def resource_permissions(self):
""" returns all direct resource permissions for this user """
return sa.orm.relationship(
"UserResourcePermission",
cascade="all, delete-orphan",
passive_deletes=True,
passive_updates=True,
overlaps="users"
)
@declared_attr
def resources(self):
""" Returns all resources directly owned by user, can be used to assign
ownership of new resources::
user.resources.append(resource) """
return sa.orm.relationship(
"Resource",
cascade="all",
passive_deletes=True,
passive_updates=True,
backref="owner",
lazy="dynamic",
)
@declared_attr
def external_identities(self):
""" dynamic relation for external identities for this user -
allowing for filtering of data """
return sa.orm.relationship(
"ExternalIdentity",
lazy="dynamic",
cascade="all, delete-orphan",
passive_deletes=True,
passive_updates=True,
backref="owner",
) | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/models/user.py | user.py |
from __future__ import unicode_literals
from collections import OrderedDict
import sqlalchemy as sa
from ziggurat_foundations.utils import noop
from ziggurat_foundations.exc import (
ZigguratResourceTreeMissingException,
ZigguratResourceTreePathException,
ZigguratResourceOutOfBoundaryException,
)
from ziggurat_foundations.models.base import get_db_session
from ziggurat_foundations.models.services.resource import ResourceService
__all__ = ["ResourceTreeServicePostgreSQL"]
class ResourceTreeServicePostgreSQL(object):
model = None
@classmethod
def from_resource_deeper(
cls, resource_id=None, limit_depth=1000000, db_session=None, *args, **kwargs
):
"""
This returns you subtree of ordered objects relative
to the start resource_id (currently only implemented in postgresql)
:param resource_id:
:param limit_depth:
:param db_session:
:return:
"""
tablename = cls.model.__table__.name
raw_q = """
WITH RECURSIVE subtree AS (
SELECT res.*, 1 AS depth, LPAD(res.ordering::CHARACTER VARYING, 7, '0') AS sorting,
res.resource_id::CHARACTER VARYING AS path
FROM {tablename} AS res WHERE res.resource_id = :resource_id
UNION ALL
SELECT res_u.*, depth+1 AS depth,
(st.sorting::CHARACTER VARYING || '/' || LPAD(res_u.ordering::CHARACTER VARYING, 7, '0') ) AS sorting,
(st.path::CHARACTER VARYING || '/' || res_u.resource_id::CHARACTER VARYING ) AS path
FROM {tablename} res_u, subtree st
WHERE res_u.parent_id = st.resource_id
)
SELECT * FROM subtree WHERE depth<=:depth ORDER BY sorting;
""".format(
tablename=tablename
) # noqa
db_session = get_db_session(db_session)
text_obj = sa.text(raw_q)
query = db_session.query(cls.model, sa.column("depth"), sa.column("sorting"), sa.column("path"))
query = query.from_statement(text_obj)
query = query.params(resource_id=resource_id, depth=limit_depth)
return query
@classmethod
def delete_branch(cls, resource_id=None, db_session=None, *args, **kwargs):
"""
This deletes whole branch with children starting from resource_id
:param resource_id:
:param db_session:
:return:
"""
tablename = cls.model.__table__.name
# lets lock rows to prevent bad tree states
resource = ResourceService.lock_resource_for_update(
resource_id=resource_id, db_session=db_session
)
parent_id = resource.parent_id
ordering = resource.ordering
raw_q = """
WITH RECURSIVE subtree AS (
SELECT res.resource_id
FROM {tablename} AS res WHERE res.resource_id = :resource_id
UNION ALL
SELECT res_u.resource_id
FROM {tablename} res_u, subtree st
WHERE res_u.parent_id = st.resource_id
)
DELETE FROM resources where resource_id in (select * from subtree);
""".format(
tablename=tablename
) # noqa
db_session = get_db_session(db_session)
text_obj = sa.text(raw_q)
db_session.execute(text_obj, params={"resource_id": resource_id})
cls.shift_ordering_down(parent_id, ordering, db_session=db_session)
return True
@classmethod
def from_parent_deeper(
cls, parent_id=None, limit_depth=1000000, db_session=None, *args, **kwargs
):
"""
This returns you subtree of ordered objects relative
to the start parent_id (currently only implemented in postgresql)
:param resource_id:
:param limit_depth:
:param db_session:
:return:
"""
if parent_id:
limiting_clause = "res.parent_id = :parent_id"
else:
limiting_clause = "res.parent_id is null"
tablename = cls.model.__table__.name
raw_q = """
WITH RECURSIVE subtree AS (
SELECT res.*, 1 AS depth, LPAD(res.ordering::CHARACTER VARYING, 7, '0') AS sorting,
res.resource_id::CHARACTER VARYING AS path
FROM {tablename} AS res WHERE {limiting_clause}
UNION ALL
SELECT res_u.*, depth+1 AS depth,
(st.sorting::CHARACTER VARYING || '/' || LPAD(res_u.ordering::CHARACTER VARYING, 7, '0') ) AS sorting,
(st.path::CHARACTER VARYING || '/' || res_u.resource_id::CHARACTER VARYING ) AS path
FROM {tablename} res_u, subtree st
WHERE res_u.parent_id = st.resource_id
)
SELECT * FROM subtree WHERE depth<=:depth ORDER BY sorting;
""".format(
tablename=tablename, limiting_clause=limiting_clause
) # noqa
db_session = get_db_session(db_session)
text_obj = sa.text(raw_q)
query = db_session.query(cls.model, sa.column("depth"), sa.column("sorting"), sa.column("path"))
query = query.from_statement(text_obj)
query = query.params(parent_id=parent_id, depth=limit_depth)
return query
@classmethod
def build_subtree_strut(cls, result, *args, **kwargs):
"""
Returns a dictionary in form of
{node:Resource, children:{node_id: Resource}}
:param result:
:return:
"""
items = list(result)
root_elem = {"node": None, "children": OrderedDict()}
if len(items) == 0:
return root_elem
for _, node in enumerate(items):
node_res = getattr(node, cls.model.__name__)
new_elem = {"node": node_res, "children": OrderedDict()}
path = list(map(int, node.path.split("/")))
parent_node = root_elem
normalized_path = path[:-1]
if normalized_path:
for path_part in normalized_path:
parent_node = parent_node["children"][path_part]
parent_node["children"][new_elem["node"].resource_id] = new_elem
return root_elem
@classmethod
def path_upper(
cls, object_id, limit_depth=1000000, db_session=None, *args, **kwargs
):
"""
This returns you path to root node starting from object_id
currently only for postgresql
:param object_id:
:param limit_depth:
:param db_session:
:return:
"""
tablename = cls.model.__table__.name
raw_q = """
WITH RECURSIVE subtree AS (
SELECT res.*, 1 as depth FROM {tablename} res
WHERE res.resource_id = :resource_id
UNION ALL
SELECT res_u.*, depth+1 as depth
FROM {tablename} res_u, subtree st
WHERE res_u.resource_id = st.parent_id
)
SELECT * FROM subtree WHERE depth<=:depth;
""".format(
tablename=tablename
)
db_session = get_db_session(db_session)
q = (
db_session.query(cls.model)
.from_statement(sa.text(raw_q))
.params(resource_id=object_id, depth=limit_depth)
)
return q
@classmethod
def move_to_position(
cls,
resource_id,
to_position,
new_parent_id=noop,
db_session=None,
*args,
**kwargs
):
"""
Moves node to new location in the tree
:param resource_id: resource to move
:param to_position: new position
:param new_parent_id: new parent id
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
# lets lock rows to prevent bad tree states
resource = ResourceService.lock_resource_for_update(
resource_id=resource_id, db_session=db_session
)
ResourceService.lock_resource_for_update(
resource_id=resource.parent_id, db_session=db_session
)
same_branch = False
# reset if parent is same as old
if new_parent_id == resource.parent_id:
new_parent_id = noop
if new_parent_id is not noop:
cls.check_node_parent(resource_id, new_parent_id, db_session=db_session)
else:
same_branch = True
if new_parent_id is noop:
# it is not guaranteed that parent exists
parent_id = resource.parent_id if resource else None
else:
parent_id = new_parent_id
cls.check_node_position(
parent_id, to_position, on_same_branch=same_branch, db_session=db_session
)
# move on same branch
if new_parent_id is noop:
order_range = list(sorted((resource.ordering, to_position)))
move_down = resource.ordering > to_position
query = db_session.query(cls.model)
query = query.filter(cls.model.parent_id == parent_id)
query = query.filter(cls.model.ordering.between(*order_range))
if move_down:
query.update(
{cls.model.ordering: cls.model.ordering + 1},
synchronize_session=False,
)
else:
query.update(
{cls.model.ordering: cls.model.ordering - 1},
synchronize_session=False,
)
db_session.flush()
db_session.expire(resource)
resource.ordering = to_position
# move between branches
else:
cls.shift_ordering_down(
resource.parent_id, resource.ordering, db_session=db_session
)
cls.shift_ordering_up(new_parent_id, to_position, db_session=db_session)
db_session.expire(resource)
resource.parent_id = new_parent_id
resource.ordering = to_position
db_session.flush()
return True
@classmethod
def shift_ordering_down(cls, parent_id, position, db_session=None, *args, **kwargs):
"""
Shifts ordering to "close gaps" after node deletion or being moved
to another branch, begins the shift from given position
:param parent_id:
:param position:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
query = db_session.query(cls.model)
query = query.filter(cls.model.parent_id == parent_id)
query = query.filter(cls.model.ordering >= position)
query.update(
{cls.model.ordering: cls.model.ordering - 1}, synchronize_session=False
)
db_session.flush()
@classmethod
def shift_ordering_up(cls, parent_id, position, db_session=None, *args, **kwargs):
"""
Shifts ordering to "open a gap" for node insertion,
begins the shift from given position
:param parent_id:
:param position:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
query = db_session.query(cls.model)
query = query.filter(cls.model.parent_id == parent_id)
query = query.filter(cls.model.ordering >= position)
query.update(
{cls.model.ordering: cls.model.ordering + 1}, synchronize_session=False
)
db_session.flush()
@classmethod
def set_position(cls, resource_id, to_position, db_session=None, *args, **kwargs):
"""
Sets node position for new node in the tree
:param resource_id: resource to move
:param to_position: new position
:param db_session:
:return:def count_children(cls, resource_id, db_session=None):
"""
db_session = get_db_session(db_session)
# lets lock rows to prevent bad tree states
resource = ResourceService.lock_resource_for_update(
resource_id=resource_id, db_session=db_session
)
cls.check_node_position(
resource.parent_id, to_position, on_same_branch=True, db_session=db_session
)
cls.shift_ordering_up(resource.parent_id, to_position, db_session=db_session)
db_session.flush()
db_session.expire(resource)
resource.ordering = to_position
return True
@classmethod
def check_node_parent(
cls, resource_id, new_parent_id, db_session=None, *args, **kwargs
):
"""
Checks if parent destination is valid for node
:param resource_id:
:param new_parent_id:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
new_parent = ResourceService.lock_resource_for_update(
resource_id=new_parent_id, db_session=db_session
)
# we are not moving to "root" so parent should be found
if not new_parent and new_parent_id is not None:
raise ZigguratResourceTreeMissingException("New parent node not found")
else:
result = cls.path_upper(new_parent_id, db_session=db_session)
path_ids = [r.resource_id for r in result]
if resource_id in path_ids:
raise ZigguratResourceTreePathException(
"Trying to insert node into itself"
)
@classmethod
def count_children(cls, resource_id, db_session=None, *args, **kwargs):
"""
Counts children of resource node
:param resource_id:
:param db_session:
:return:
"""
query = db_session.query(cls.model.resource_id)
query = query.filter(cls.model.parent_id == resource_id)
return query.count()
@classmethod
def check_node_position(
cls, parent_id, position, on_same_branch, db_session=None, *args, **kwargs
):
"""
Checks if node position for given parent is valid, raises exception if
this is not the case
:param parent_id:
:param position:
:param on_same_branch: indicates that we are checking same branch
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
if not position or position < 1:
raise ZigguratResourceOutOfBoundaryException(
"Position is lower than {}", value=1
)
item_count = cls.count_children(parent_id, db_session=db_session)
max_value = item_count if on_same_branch else item_count + 1
if position > max_value:
raise ZigguratResourceOutOfBoundaryException(
"Maximum resource ordering is {}", value=max_value
) | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/models/services/resource_tree_postgres.py | resource_tree_postgres.py |
from __future__ import unicode_literals
import sqlalchemy as sa
from ziggurat_foundations.models.base import get_db_session
from ziggurat_foundations.models.services import BaseService
from ziggurat_foundations.permissions import (
ANY_PERMISSION,
ALL_PERMISSIONS,
PermissionTuple,
resource_permissions_for_users,
)
__all__ = ["ResourceService"]
class ResourceService(BaseService):
@classmethod
def get(cls, resource_id, db_session=None):
"""
Fetch row using primary key -
will use existing object in session if already present
:param resource_id:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
return db_session.query(cls.model).get(resource_id)
@classmethod
def perms_for_user(cls, instance, user, db_session=None):
"""
returns all permissions that given user has for this resource
from groups and directly set ones too
:param instance:
:param user:
:param db_session:
:return:
"""
db_session = get_db_session(db_session, instance)
query = db_session.query(
cls.models_proxy.GroupResourcePermission.group_id.label("owner_id"),
cls.models_proxy.GroupResourcePermission.perm_name,
sa.literal("group").label("type"),
)
query = query.filter(
cls.models_proxy.GroupResourcePermission.group_id.in_(
[gr.id for gr in user.groups]
)
)
query = query.filter(
cls.models_proxy.GroupResourcePermission.resource_id == instance.resource_id
)
query2 = db_session.query(
cls.models_proxy.UserResourcePermission.user_id.label("owner_id"),
cls.models_proxy.UserResourcePermission.perm_name,
sa.literal("user").label("type"),
)
query2 = query2.filter(
cls.models_proxy.UserResourcePermission.user_id == user.id
)
query2 = query2.filter(
cls.models_proxy.UserResourcePermission.resource_id == instance.resource_id
)
query = query.union(query2)
groups_dict = dict([(g.id, g) for g in user.groups])
perms = [
PermissionTuple(
user,
row.perm_name,
row.type,
groups_dict.get(row.owner_id) if row.type == "group" else None,
instance,
False,
True,
)
for row in query
]
# include all perms if user is the owner of this resource
if instance.owner_user_id == user.id:
perms.append(
PermissionTuple(
user, ALL_PERMISSIONS, "user", None, instance, True, True
)
)
groups_dict = dict([(g.id, g) for g in user.groups])
if instance.owner_group_id in groups_dict:
perms.append(
PermissionTuple(
user,
ALL_PERMISSIONS,
"group",
groups_dict.get(instance.owner_group_id),
instance,
True,
True,
)
)
return perms
@classmethod
def direct_perms_for_user(cls, instance, user, db_session=None):
"""
returns permissions that given user has for this resource
without ones inherited from groups that user belongs to
:param instance:
:param user:
:param db_session:
:return:
"""
db_session = get_db_session(db_session, instance)
query = db_session.query(
cls.models_proxy.UserResourcePermission.user_id,
cls.models_proxy.UserResourcePermission.perm_name,
)
query = query.filter(cls.models_proxy.UserResourcePermission.user_id == user.id)
query = query.filter(
cls.models_proxy.UserResourcePermission.resource_id == instance.resource_id
)
perms = [
PermissionTuple(user, row.perm_name, "user", None, instance, False, True)
for row in query
]
# include all perms if user is the owner of this resource
if instance.owner_user_id == user.id:
perms.append(
PermissionTuple(user, ALL_PERMISSIONS, "user", None, instance, True, True)
)
return perms
@classmethod
def group_perms_for_user(cls, instance, user, db_session=None):
"""
returns permissions that given user has for this resource
that are inherited from groups
:param instance:
:param user:
:param db_session:
:return:
"""
db_session = get_db_session(db_session, instance)
perms = resource_permissions_for_users(
cls.models_proxy,
ANY_PERMISSION,
resource_ids=[instance.resource_id],
user_ids=[user.id],
db_session=db_session,
)
perms = [p for p in perms if p.type == "group"]
# include all perms if user is the owner of this resource
groups_dict = dict([(g.id, g) for g in user.groups])
if instance.owner_group_id in groups_dict:
perms.append(
PermissionTuple(
user,
ALL_PERMISSIONS,
"group",
groups_dict.get(instance.owner_group_id),
instance,
True,
True,
)
)
return perms
@classmethod
def users_for_perm(
cls,
instance,
perm_name,
user_ids=None,
group_ids=None,
limit_group_permissions=False,
skip_group_perms=False,
db_session=None,
):
"""
return PermissionTuples for users AND groups that have given
permission for the resource, perm_name is __any_permission__ then
users with any permission will be listed
:param instance:
:param perm_name:
:param user_ids: limits the permissions to specific user ids
:param group_ids: limits the permissions to specific group ids
:param limit_group_permissions: should be used if we do not want to have
user objects returned for group permissions, this might cause performance
issues for big groups
:param skip_group_perms: do not attach group permissions to the resultset
:param db_session:
:return:
""" # noqa
db_session = get_db_session(db_session, instance)
users_perms = resource_permissions_for_users(
cls.models_proxy,
[perm_name],
[instance.resource_id],
user_ids=user_ids,
group_ids=group_ids,
limit_group_permissions=limit_group_permissions,
skip_group_perms=skip_group_perms,
db_session=db_session,
)
if instance.owner_user_id:
users_perms.append(
PermissionTuple(
instance.owner, ALL_PERMISSIONS, "user", None, instance, True, True
)
)
if instance.owner_group_id and not skip_group_perms:
for user in instance.owner_group.users:
users_perms.append(
PermissionTuple(
user,
ALL_PERMISSIONS,
"group",
instance.owner_group,
instance,
True,
True,
)
)
return users_perms
@classmethod
def by_resource_id(cls, resource_id, db_session=None):
"""
fetch the resource by id
:param resource_id:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
query = db_session.query(cls.model).filter(
cls.model.resource_id == int(resource_id)
)
return query.first()
@classmethod
def perm_by_group_and_perm_name(
cls, resource_id, group_id, perm_name, db_session=None
):
"""
fetch permissions by group and permission name
:param resource_id:
:param group_id:
:param perm_name:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
query = db_session.query(cls.models_proxy.GroupResourcePermission)
query = query.filter(
cls.models_proxy.GroupResourcePermission.group_id == group_id
)
query = query.filter(
cls.models_proxy.GroupResourcePermission.perm_name == perm_name
)
query = query.filter(
cls.models_proxy.GroupResourcePermission.resource_id == resource_id
)
return query.first()
@classmethod
def groups_for_perm(
cls,
instance,
perm_name,
group_ids=None,
limit_group_permissions=False,
db_session=None,
):
"""
return PermissionTuples for groups that have given
permission for the resource, perm_name is __any_permission__ then
users with any permission will be listed
:param instance:
:param perm_name:
:param group_ids: limits the permissions to specific group ids
:param limit_group_permissions: should be used if we do not want to have
user objects returned for group permissions, this might cause performance
issues for big groups
:param db_session:
:return:
""" # noqa
db_session = get_db_session(db_session, instance)
group_perms = resource_permissions_for_users(
cls.models_proxy,
[perm_name],
[instance.resource_id],
group_ids=group_ids,
limit_group_permissions=limit_group_permissions,
skip_user_perms=True,
db_session=db_session,
)
if instance.owner_group_id:
for user in instance.owner_group.users:
group_perms.append(
PermissionTuple(
user,
ALL_PERMISSIONS,
"group",
instance.owner_group,
instance,
True,
True,
)
)
return group_perms
@classmethod
def lock_resource_for_update(cls, resource_id, db_session):
"""
Selects resource for update - locking access for other transactions
:param resource_id:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
query = db_session.query(cls.model)
query = query.filter(cls.model.resource_id == resource_id)
query = query.with_for_update()
return query.first() | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/models/services/resource.py | resource.py |
from __future__ import unicode_literals
from ziggurat_foundations.utils import noop
__all__ = ["ResourceTreeService"]
class ResourceTreeService(object):
model = None
def __init__(self, service_cls):
service_cls.model = self.model
self.service = service_cls
def from_resource_deeper(
self, resource_id=None, limit_depth=1000000, db_session=None, *args, **kwargs
):
"""
This returns you subtree of ordered objects relative
to the start resource_id (currently only implemented in postgresql)
:param resource_id:
:param limit_depth:
:param db_session:
:return:
"""
return self.service.from_resource_deeper(
resource_id=resource_id,
limit_depth=limit_depth,
db_session=db_session,
*args,
**kwargs
)
def delete_branch(self, resource_id=None, db_session=None, *args, **kwargs):
"""
This deletes whole branch with children starting from resource_id
:param resource_id:
:param db_session:
:return:
"""
return self.service.delete_branch(
resource_id=resource_id, db_session=db_session, *args, **kwargs
)
def from_parent_deeper(
self, parent_id=None, limit_depth=1000000, db_session=None, *args, **kwargs
):
"""
This returns you subtree of ordered objects relative
to the start parent_id (currently only implemented in postgresql)
:param resource_id:
:param limit_depth:
:param db_session:
:return:
"""
return self.service.from_parent_deeper(
parent_id=parent_id,
limit_depth=limit_depth,
db_session=db_session,
*args,
**kwargs
)
def build_subtree_strut(self, result, *args, **kwargs):
"""
Returns a dictionary in form of
{node:Resource, children:{node_id: Resource}}
:param result:
:return:
"""
return self.service.build_subtree_strut(result=result, *args, **kwargs)
def path_upper(
self, object_id, limit_depth=1000000, db_session=None, *args, **kwargs
):
"""
This returns you path to root node starting from object_id
currently only for postgresql
:param object_id:
:param limit_depth:
:param db_session:
:return:
"""
return self.service.path_upper(
object_id=object_id,
limit_depth=limit_depth,
db_session=db_session,
*args,
**kwargs
)
def move_to_position(
self,
resource_id,
to_position,
new_parent_id=noop,
db_session=None,
*args,
**kwargs
):
"""
Moves node to new location in the tree
:param resource_id: resource to move
:param to_position: new position
:param new_parent_id: new parent id
:param db_session:
:return:
"""
return self.service.move_to_position(
resource_id=resource_id,
to_position=to_position,
new_parent_id=new_parent_id,
db_session=db_session,
*args,
**kwargs
)
def shift_ordering_down(
self, parent_id, position, db_session=None, *args, **kwargs
):
"""
Shifts ordering to "close gaps" after node deletion or being moved
to another branch, begins the shift from given position
:param parent_id:
:param position:
:param db_session:
:return:
"""
return self.service.shift_ordering_down(
parent_id=parent_id,
position=position,
db_session=db_session,
*args,
**kwargs
)
def shift_ordering_up(self, parent_id, position, db_session=None, *args, **kwargs):
"""
Shifts ordering to "open a gap" for node insertion,
begins the shift from given position
:param parent_id:
:param position:
:param db_session:
:return:
"""
return self.service.shift_ordering_up(
parent_id=parent_id,
position=position,
db_session=db_session,
*args,
**kwargs
)
def set_position(self, resource_id, to_position, db_session=None, *args, **kwargs):
"""
Sets node position for new node in the tree
:param resource_id: resource to move
:param to_position: new position
:param db_session:
:return:def count_children(cls, resource_id, db_session=None):
"""
return self.service.set_position(
resource_id=resource_id,
to_position=to_position,
db_session=db_session,
*args,
**kwargs
)
def check_node_parent(
self, resource_id, new_parent_id, db_session=None, *args, **kwargs
):
"""
Checks if parent destination is valid for node
:param resource_id:
:param new_parent_id:
:param db_session:
:return:
"""
return self.service.check_node_parent(
resource_id=resource_id,
new_parent_id=new_parent_id,
db_session=db_session,
*args,
**kwargs
)
def count_children(self, resource_id, db_session=None, *args, **kwargs):
"""
Counts children of resource node
:param resource_id:
:param db_session:
:return:
"""
return self.service.count_children(
resource_id=resource_id, db_session=db_session, *args, **kwargs
)
def check_node_position(
self, parent_id, position, on_same_branch, db_session=None, *args, **kwargs
):
"""
Checks if node position for given parent is valid, raises exception if
this is not the case
:param parent_id:
:param position:
:param on_same_branch: indicates that we are checking same branch
:param db_session:
:return:
"""
return self.service.check_node_position(
parent_id=parent_id,
position=position,
on_same_branch=on_same_branch,
db_session=db_session,
*args,
**kwargs
) | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/models/services/resource_tree.py | resource_tree.py |
from __future__ import unicode_literals
from paginate_sqlalchemy import SqlalchemyOrmPage
from ziggurat_foundations.models.base import get_db_session
from ziggurat_foundations.models.services import BaseService
from ziggurat_foundations.permissions import (
ANY_PERMISSION,
ALL_PERMISSIONS,
PermissionTuple,
)
__all__ = ["GroupService"]
class GroupService(BaseService):
@classmethod
def get(cls, group_id, db_session=None):
"""
Fetch row using primary key -
will use existing object in session if already present
:param group_id:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
return db_session.query(cls.model).get(group_id)
@classmethod
def by_group_name(cls, group_name, db_session=None):
"""
fetch group by name
:param group_name:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
query = db_session.query(cls.model).filter(cls.model.group_name == group_name)
return query.first()
@classmethod
def get_user_paginator(
cls,
instance,
page=1,
item_count=None,
items_per_page=50,
user_ids=None,
GET_params=None,
):
"""
returns paginator over users belonging to the group
:param instance:
:param page:
:param item_count:
:param items_per_page:
:param user_ids:
:param GET_params:
:return:
"""
if not GET_params:
GET_params = {}
GET_params.pop("page", None)
query = instance.users_dynamic
if user_ids:
query = query.filter(cls.models_proxy.UserGroup.user_id.in_(user_ids))
return SqlalchemyOrmPage(
query,
page=page,
item_count=item_count,
items_per_page=items_per_page,
**GET_params
)
@classmethod
def resources_with_possible_perms(
cls,
instance,
perm_names=None,
resource_ids=None,
resource_types=None,
db_session=None,
):
"""
returns list of permissions and resources for this group,
resource_ids restricts the search to specific resources
:param instance:
:param perm_names:
:param resource_ids:
:param resource_types:
:param db_session:
:return:
"""
db_session = get_db_session(db_session, instance)
query = db_session.query(
cls.models_proxy.GroupResourcePermission.perm_name,
cls.models_proxy.Group,
cls.models_proxy.Resource,
)
query = query.filter(
cls.models_proxy.Resource.resource_id
== cls.models_proxy.GroupResourcePermission.resource_id
)
query = query.filter(
cls.models_proxy.Group.id
== cls.models_proxy.GroupResourcePermission.group_id
)
if resource_ids:
query = query.filter(
cls.models_proxy.GroupResourcePermission.resource_id.in_(resource_ids)
)
if resource_types:
query = query.filter(
cls.models_proxy.Resource.resource_type.in_(resource_types)
)
if perm_names not in ([ANY_PERMISSION], ANY_PERMISSION) and perm_names:
query = query.filter(
cls.models_proxy.GroupResourcePermission.perm_name.in_(perm_names)
)
query = query.filter(
cls.models_proxy.GroupResourcePermission.group_id == instance.id
)
perms = [
PermissionTuple(
None, row.perm_name, "group", instance, row.Resource, False, True
)
for row in query
]
for resource in instance.resources:
perms.append(
PermissionTuple(
None, ALL_PERMISSIONS, "group", instance, resource, True, True
)
)
return perms | ziggurat-foundations | /ziggurat_foundations-0.9.1.tar.gz/ziggurat_foundations-0.9.1/ziggurat_foundations/models/services/group.py | group.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.