docstring
stringlengths 52
499
| function
stringlengths 67
35.2k
| __index_level_0__
int64 52.6k
1.16M
|
---|---|---|
Encapsulates the render -> parse -> validate -> load process.
Args:
raw_config (str): the raw stacker configuration string.
environment (dict, optional): any environment values that should be
passed to the config
validate (bool): if provided, the config is validated before being
loaded.
Returns:
:class:`Config`: the parsed stacker config. | def render_parse_load(raw_config, environment=None, validate=True):
pre_rendered = render(raw_config, environment)
rendered = process_remote_sources(pre_rendered, environment)
config = parse(rendered)
# For backwards compatibility, if the config doesn't specify a namespace,
# we fall back to fetching it from the environment, if provided.
if config.namespace is None:
namespace = environment.get("namespace")
if namespace:
logger.warn("DEPRECATION WARNING: specifying namespace in the "
"environment is deprecated. See "
"https://stacker.readthedocs.io/en/latest/config.html"
"#namespace "
"for more info.")
config.namespace = namespace
if validate:
config.validate()
return load(config) | 171,730 |
Renders a config, using it as a template with the environment.
Args:
raw_config (str): the raw stacker configuration string.
environment (dict, optional): any environment values that should be
passed to the config
Returns:
str: the stacker configuration populated with any values passed from
the environment | def render(raw_config, environment=None):
t = Template(raw_config)
buff = StringIO()
if not environment:
environment = {}
try:
substituted = t.substitute(environment)
except KeyError as e:
raise exceptions.MissingEnvironment(e.args[0])
except ValueError:
# Support "invalid" placeholders for lookup placeholders.
substituted = t.safe_substitute(environment)
if not isinstance(substituted, str):
substituted = substituted.decode('utf-8')
buff.write(substituted)
buff.seek(0)
return buff.read() | 171,731 |
Parse a raw yaml formatted stacker config.
Args:
raw_config (str): the raw stacker configuration string in yaml format.
Returns:
:class:`Config`: the parsed stacker config. | def parse(raw_config):
# Convert any applicable dictionaries back into lists
# This is necessary due to the move from lists for these top level config
# values to either lists or OrderedDicts.
# Eventually we should probably just make them OrderedDicts only.
config_dict = yaml_to_ordered_dict(raw_config)
if config_dict:
for top_level_key in ['stacks', 'pre_build', 'post_build',
'pre_destroy', 'post_destroy']:
top_level_value = config_dict.get(top_level_key)
if isinstance(top_level_value, dict):
tmp_list = []
for key, value in top_level_value.items():
tmp_dict = copy.deepcopy(value)
if top_level_key == 'stacks':
tmp_dict['name'] = key
tmp_list.append(tmp_dict)
config_dict[top_level_key] = tmp_list
# Top-level excess keys are removed by Config._convert, so enabling strict
# mode is fine here.
try:
return Config(config_dict, strict=True)
except SchematicsError as e:
raise exceptions.InvalidConfig(e.errors) | 171,732 |
Loads a stacker configuration by modifying sys paths, loading lookups,
etc.
Args:
config (:class:`Config`): the stacker config to load.
Returns:
:class:`Config`: the stacker config provided above. | def load(config):
if config.sys_path:
logger.debug("Appending %s to sys.path.", config.sys_path)
sys.path.append(config.sys_path)
logger.debug("sys.path is now %s", sys.path)
if config.lookups:
for key, handler in config.lookups.items():
register_lookup_handler(key, handler)
return config | 171,733 |
Dumps a stacker Config object as yaml.
Args:
config (:class:`Config`): the stacker Config object.
stream (stream): an optional stream object to write to.
Returns:
str: the yaml formatted stacker Config. | def dump(config):
return yaml.safe_dump(
config.to_primitive(),
default_flow_style=False,
encoding='utf-8',
allow_unicode=True) | 171,734 |
Stage remote package sources and merge in remote configs.
Args:
raw_config (str): the raw stacker configuration string.
environment (dict, optional): any environment values that should be
passed to the config
Returns:
str: the raw stacker configuration string | def process_remote_sources(raw_config, environment=None):
config = yaml.safe_load(raw_config)
if config and config.get('package_sources'):
processor = SourceProcessor(
sources=config['package_sources'],
stacker_cache_dir=config.get('stacker_cache_dir')
)
processor.get_package_sources()
if processor.configs_to_merge:
for i in processor.configs_to_merge:
logger.debug("Merging in remote config \"%s\"", i)
remote_config = yaml.safe_load(open(i))
config = merge_map(remote_config, config)
# Call the render again as the package_sources may have merged in
# additional environment lookups
if not environment:
environment = {}
return render(str(config), environment)
return raw_config | 171,735 |
Create the content of DIDL desc element from a uri.
Args:
uri (str): A uri, eg:
``'x-sonos-http:track%3a3402413.mp3?sid=2&flags=32&sn=4'``
Returns:
str: The content of a desc element for that uri, eg
``'[email protected]'`` | def desc_from_uri(uri):
#
# If there is an sn parameter (which is the serial number of an account),
# we can obtain all the information we need from that, because we can find
# the relevant service_id in the account database (it is the same as the
# service_type). Consequently, the sid parameter is unneeded. But if sn is
# missing, we need the sid (service_type) parameter to find a relevant
# account
# urlparse does not work consistently with custom URI schemes such as
# those used by Sonos. This is especially broken in Python 2.6 and
# early versions of 2.7: http://bugs.python.org/issue9374
# As a workaround, we split off the scheme manually, and then parse
# the uri as if it were http
if ":" in uri:
_, uri = uri.split(":", 1)
query_string = parse_qs(urlparse(uri, 'http').query)
# Is there an account serial number?
if query_string.get('sn'):
account_serial_number = query_string['sn'][0]
try:
account = Account.get_accounts()[account_serial_number]
desc = "SA_RINCON{}_{}".format(
account.service_type, account.username)
return desc
except KeyError:
# There is no account matching this serial number. Fall back to
# using the service id to find an account
pass
if query_string.get('sid'):
service_id = query_string['sid'][0]
for service in MusicService._get_music_services_data().values():
if service_id == service["ServiceID"]:
service_type = service["ServiceType"]
account = Account.get_accounts_for_service(service_type)
if not account:
break
# Use the first account we find
account = account[0]
desc = "SA_RINCON{}_{}".format(
account.service_type, account.username)
return desc
# Nothing found. Default to the standard desc value. Is this the right
# thing to do?
desc = 'RINCON_AssociatedZPUDN'
return desc | 171,760 |
Fetch the music services data xml from a Sonos device.
Args:
soco (SoCo): a SoCo instance to query. If none is specified, a
random device will be used. Defaults to `None`.
Returns:
str: a string containing the music services data xml | def _get_music_services_data_xml(soco=None):
device = soco or discovery.any_soco()
log.debug("Fetching music services data from %s", device)
available_services = device.musicServices.ListAvailableServices()
descriptor_list_xml = available_services[
'AvailableServiceDescriptorList']
log.debug("Services descriptor list: %s", descriptor_list_xml)
return descriptor_list_xml | 171,766 |
Get the data relating to a named music service.
Args:
service_name (str): The name of the music service for which data
is required.
Returns:
dict: Data relating to the music service.
Raises:
`MusicServiceException`: if the music service cannot be found. | def get_data_for_name(cls, service_name):
for service in cls._get_music_services_data().values():
if service_name == service["Name"]:
return service
raise MusicServiceException(
"Unknown music service: '%s'" % service_name) | 171,769 |
Get metadata for a media item.
Args:
item_id (str): The item for which metadata is required.
Returns:
~collections.OrderedDict: The item's metadata, or `None`
See also:
The Sonos `getMediaMetadata API
<http://musicpartners.sonos.com/node/83>`_ | def get_media_metadata(self, item_id):
response = self.soap_client.call(
'getMediaMetadata',
[('id', item_id)])
return response.get('getMediaMetadataResult', None) | 171,775 |
Get extended metadata for a media item, such as related items.
Args:
item_id (str): The item for which metadata is required.
Returns:
~collections.OrderedDict: The item's extended metadata or None.
See also:
The Sonos `getExtendedMetadata API
<http://musicpartners.sonos.com/node/128>`_ | def get_extended_metadata(self, item_id):
response = self.soap_client.call(
'getExtendedMetadata',
[('id', item_id)])
return response.get('getExtendedMetadataResult', None) | 171,777 |
Form a music service data structure class from the class key
Args:
class_key (str): A concatenation of the base class (e.g. MediaMetadata)
and the class name
Returns:
class: Subclass of MusicServiceItem | def get_class(class_key):
if class_key not in CLASSES:
for basecls in (MediaMetadata, MediaCollection):
if class_key.startswith(basecls.__name__):
# So MediaMetadataTrack turns into MSTrack
class_name = 'MS' + class_key.replace(basecls.__name__, '')
if sys.version_info[0] == 2:
class_name = class_name.encode('ascii')
CLASSES[class_key] = type(class_name, (basecls,), {})
_LOG.info('Class %s created', CLASSES[class_key])
return CLASSES[class_key] | 171,782 |
Parse the response to a music service query and return a SearchResult
Args:
service (MusicService): The music service that produced the response
response (OrderedDict): The response from the soap client call
search_type (str): A string that indicates the search type that the
response is from
Returns:
SearchResult: A SearchResult object | def parse_response(service, response, search_type):
_LOG.debug('Parse response "%s" from service "%s" of type "%s"', response,
service, search_type)
items = []
# The result to be parsed is in either searchResult or getMetadataResult
if 'searchResult' in response:
response = response['searchResult']
elif 'getMetadataResult' in response:
response = response['getMetadataResult']
else:
raise ValueError('"response" should contain either the key '
'"searchResult" or "getMetadataResult"')
# Form the search metadata
search_metadata = {
'number_returned': response['count'],
'total_matches': None,
'search_type': search_type,
'update_id': None,
}
for result_type in ('mediaCollection', 'mediaMetadata'):
# Upper case the first letter (used for the class_key)
result_type_proper = result_type[0].upper() + result_type[1:]
raw_items = response.get(result_type, [])
# If there is only 1 result, it is not put in an array
if isinstance(raw_items, OrderedDict):
raw_items = [raw_items]
for raw_item in raw_items:
# Form the class_key, which is a unique string for this type,
# formed by concatenating the result type with the item type. Turns
# into e.g: MediaMetadataTrack
class_key = result_type_proper + raw_item['itemType'].title()
cls = get_class(class_key)
items.append(cls.from_music_service(service, raw_item))
return SearchResult(items, **search_metadata) | 171,783 |
Form and return a music service item uri
Args:
item_id (str): The item id
service (MusicService): The music service that the item originates from
is_track (bool): Whether the item_id is from a track or not
Returns:
str: The music service item uri | def form_uri(item_id, service, is_track):
if is_track:
uri = service.sonos_uri_from_id(item_id)
else:
uri = 'x-rincon-cpcontainer:' + item_id
return uri | 171,784 |
Init music service item
Args:
item_id (str): This is the Didl compatible id NOT the music item id
desc (str): A DIDL descriptor, default ``'RINCON_AssociatedZPUDN'
resources (list): List of DidlResource
uri (str): The uri for the location of the item
metdata_dict (dict): Mapping of metadata
music_service (MusicService): The MusicService instance the item
originates from | def __init__(self, item_id, desc, # pylint: disable=too-many-arguments
resources, uri, metadata_dict, music_service=None):
_LOG.debug('%s.__init__ with item_id=%s, desc=%s, resources=%s, '
'uri=%s, metadata_dict=..., music_service=%s',
self.__class__.__name__, item_id, desc, resources, uri,
music_service)
super(MusicServiceItem, self).__init__(metadata_dict)
self.item_id = item_id
self.desc = desc
self.resources = resources
self.uri = uri
self.music_service = music_service | 171,788 |
Return an element instantiated from the information that a music
service has (alternative constructor)
Args:
music_service (MusicService): The music service that content_dict
originated from
content_dict (OrderedDict): The data to instantiate the music
service item from
Returns:
MusicServiceItem: A MusicServiceItem instance | def from_music_service(cls, music_service, content_dict):
# Form the item_id
quoted_id = quote_url(content_dict['id'].encode('utf-8'))
# The hex prefix remains a mistery for now
item_id = '0fffffff{}'.format(quoted_id)
# Form the uri
is_track = cls == get_class('MediaMetadataTrack')
uri = form_uri(item_id, music_service, is_track)
# Form resources and get desc
resources = [DidlResource(uri=uri, protocol_info="DUMMY")]
desc = music_service.desc
return cls(item_id, desc, resources, uri, content_dict,
music_service=music_service) | 171,789 |
Return an ElementTree Element representing this instance.
Args:
include_namespaces (bool, optional): If True, include xml
namespace attributes on the root element
Return:
~xml.etree.ElementTree.Element: The (XML) Element representation of
this object | def to_element(self, include_namespaces=False):
# We piggy back on the implementation in DidlItem
didl_item = DidlItem(
title="DUMMY",
# This is ignored. Sonos gets the title from the item_id
parent_id="DUMMY", # Ditto
item_id=self.item_id,
desc=self.desc,
resources=self.resources
)
return didl_item.to_element(include_namespaces=include_namespaces) | 171,791 |
Fetch the account data from a Sonos device.
Args:
soco (SoCo): a SoCo instance to query. If soco is `None`, a
random device will be used.
Returns:
str: a byte string containing the account data xml | def _get_account_xml(soco):
# It is likely that the same information is available over UPnP as well
# via a call to
# systemProperties.GetStringX([('VariableName','R_SvcAccounts')]))
# This returns an encrypted string, and, so far, we cannot decrypt it
device = soco or discovery.any_soco()
log.debug("Fetching account data from %s", device)
settings_url = "http://{}:1400/status/accounts".format(
device.ip_address)
result = requests.get(settings_url).content
log.debug("Account data: %s", result)
return result | 171,798 |
Get a list of accounts for a given music service.
Args:
service_type (str): The service_type to use.
Returns:
list: A list of `Account` instances. | def get_accounts_for_service(cls, service_type):
return [
a for a in cls.get_accounts().values()
if a.service_type == service_type
] | 171,800 |
Demo function using soco.snapshot across multiple Sonos players.
Args:
zones (set): a set of SoCo objects
alert_uri (str): uri that Sonos can play as an alert
alert_volume (int): volume level for playing alert (0 tp 100)
alert_duration (int): length of alert (if zero then length of track)
fade_back (bool): on reinstating the zones fade up the sound? | def play_alert(zones, alert_uri, alert_volume=20, alert_duration=0, fade_back=False):
# Use soco.snapshot to capture current state of each zone to allow restore
for zone in zones:
zone.snap = Snapshot(zone)
zone.snap.snapshot()
print('snapshot of zone: {}'.format(zone.player_name))
# prepare all zones for playing the alert
for zone in zones:
# Each Sonos group has one coordinator only these can play, pause, etc.
if zone.is_coordinator:
if not zone.is_playing_tv: # can't pause TV - so don't try!
# pause music for each coordinators if playing
trans_state = zone.get_current_transport_info()
if trans_state['current_transport_state'] == 'PLAYING':
zone.pause()
# For every Sonos player set volume and mute for every zone
zone.volume = alert_volume
zone.mute = False
# play the sound (uri) on each sonos coordinator
print('will play: {} on all coordinators'.format(alert_uri))
for zone in zones:
if zone.is_coordinator:
zone.play_uri(uri=alert_uri, title='Sonos Alert')
# wait for alert_duration
time.sleep(alert_duration)
# restore each zone to previous state
for zone in zones:
print('restoring {}'.format(zone.player_name))
zone.snap.restore(fade=fade_back) | 171,801 |
Get an item from the cache for this combination of args and kwargs.
Args:
*args: any arguments.
**kwargs: any keyword arguments.
Returns:
object: The object which has been found in the cache, or `None` if
no unexpired item is found. This means that there is no point
storing an item in the cache if it is `None`. | def get(self, *args, **kwargs):
if not self.enabled:
return None
# Look in the cache to see if there is an unexpired item. If there is
# we can just return the cached result.
cache_key = self.make_key(args, kwargs)
# Lock and load
with self._cache_lock:
if cache_key in self._cache:
expirytime, item = self._cache[cache_key]
if expirytime >= time():
return item
else:
# An expired item is present - delete it
del self._cache[cache_key]
# Nothing found
return None | 171,804 |
Ensure an Album Art URI is an absolute URI.
Args:
url (str): the album art URI.
Returns:
str: An absolute URI. | def build_album_art_full_uri(self, url):
# Add on the full album art link, as the URI version
# does not include the ipaddress
if not url.startswith(('http:', 'https:')):
url = 'http://' + self.soco.ip_address + ':1400' + url
return url | 171,809 |
Update an item's Album Art URI to be an absolute URI.
Args:
item: The item to update the URI for | def _update_album_art_to_full_uri(self, item):
if getattr(item, 'album_art_uri', False):
item.album_art_uri = self.build_album_art_full_uri(
item.album_art_uri) | 171,810 |
Search for an artist, an artist's albums, or specific track.
Args:
artist (str): an artist's name.
album (str, optional): an album name. Default `None`.
track (str, optional): a track name. Default `None`.
full_album_art_uri (bool): whether the album art URI should be
absolute (i.e. including the IP address). Default `False`.
Returns:
A `SearchResult` instance. | def search_track(self, artist, album=None, track=None,
full_album_art_uri=False):
subcategories = [artist]
subcategories.append(album or '')
# Perform the search
result = self.get_album_artists(
full_album_art_uri=full_album_art_uri,
subcategories=subcategories, search_term=track,
complete_result=True)
result._metadata['search_type'] = 'search_track'
return result | 171,825 |
Get an artist's albums.
Args:
artist (str): an artist's name.
full_album_art_uri: whether the album art URI should be
absolute (i.e. including the IP address). Default `False`.
Returns:
A `SearchResult` instance. | def get_albums_for_artist(self, artist, full_album_art_uri=False):
subcategories = [artist]
result = self.get_album_artists(
full_album_art_uri=full_album_art_uri,
subcategories=subcategories,
complete_result=True)
reduced = [item for item in result if item.__class__ == DidlMusicAlbum]
# It is necessary to update the list of items in two places, due to
# a bug in SearchResult
result[:] = reduced
result._metadata.update({
'item_list': reduced,
'search_type': 'albums_for_artist',
'number_returned': len(reduced),
'total_matches': len(reduced)
})
return result | 171,826 |
Get the tracks of an artist's album.
Args:
artist (str): an artist's name.
album (str): an album name.
full_album_art_uri: whether the album art URI should be
absolute (i.e. including the IP address). Default `False`.
Returns:
A `SearchResult` instance. | def get_tracks_for_album(self, artist, album, full_album_art_uri=False):
subcategories = [artist, album]
result = self.get_album_artists(
full_album_art_uri=full_album_art_uri,
subcategories=subcategories,
complete_result=True)
result._metadata['search_type'] = 'tracks_for_album'
return result | 171,827 |
Called when a method on the instance cannot be found.
Causes an action to be sent to UPnP server. See also
`object.__getattr__`.
Args:
action (str): The name of the unknown method.
Returns:
callable: The callable to be invoked. . | def __getattr__(self, action):
# Define a function to be invoked as the method, which calls
# send_command.
def _dispatcher(self, *args, **kwargs):
return self.send_command(action, *args, **kwargs)
# rename the function so it appears to be the called method. We
# probably don't need this, but it doesn't harm
_dispatcher.__name__ = action
# _dispatcher is now an unbound menthod, but we need a bound method.
# This turns an unbound method into a bound method (i.e. one that
# takes self - an instance of the class - as the first parameter)
# pylint: disable=no-member
method = _dispatcher.__get__(self, self.__class__)
# Now we have a bound method, we cache it on this instance, so that
# next time we don't have to go through this again
setattr(self, action, method)
log.debug("Dispatching method %s", action)
# return our new bound method, which will be called by Python
return method | 171,834 |
Extract arguments and their values from a SOAP response.
Args:
xml_response (str): SOAP/xml response text (unicode,
not utf-8).
Returns:
dict: a dict of ``{argument_name: value}`` items. | def unwrap_arguments(xml_response):
# A UPnP SOAP response (including headers) looks like this:
# HTTP/1.1 200 OK
# CONTENT-LENGTH: bytes in body
# CONTENT-TYPE: text/xml; charset="utf-8" DATE: when response was
# generated
# EXT:
# SERVER: OS/version UPnP/1.0 product/version
#
# <?xml version="1.0"?>
# <s:Envelope
# xmlns:s="http://schemas.xmlsoap.org/soap/envelope/"
# s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
# <s:Body>
# <u:actionNameResponse
# xmlns:u="urn:schemas-upnp-org:service:serviceType:v">
# <argumentName>out arg value</argumentName>
# ... other out args and their values go here, if any
# </u:actionNameResponse>
# </s:Body>
# </s:Envelope>
# Get all tags in order. Elementree (in python 2.x) seems to prefer to
# be fed bytes, rather than unicode
xml_response = xml_response.encode('utf-8')
try:
tree = XML.fromstring(xml_response)
except XML.ParseError:
# Try to filter illegal xml chars (as unicode), in case that is
# the reason for the parse error
filtered = illegal_xml_re.sub('', xml_response.decode('utf-8'))\
.encode('utf-8')
tree = XML.fromstring(filtered)
# Get the first child of the <Body> tag which will be
# <{actionNameResponse}> (depends on what actionName is). Turn the
# children of this into a {tagname, content} dict. XML unescaping
# is carried out for us by elementree.
action_response = tree.find(
"{http://schemas.xmlsoap.org/soap/envelope/}Body")[0]
return dict((i.tag, i.text or "") for i in action_response) | 171,836 |
Disect a UPnP error, and raise an appropriate exception.
Args:
xml_error (str): a unicode string containing the body of the
UPnP/SOAP Fault response. Raises an exception containing the
error code. | def handle_upnp_error(self, xml_error):
# An error code looks something like this:
# HTTP/1.1 500 Internal Server Error
# CONTENT-LENGTH: bytes in body
# CONTENT-TYPE: text/xml; charset="utf-8"
# DATE: when response was generated
# EXT:
# SERVER: OS/version UPnP/1.0 product/version
# <?xml version="1.0"?>
# <s:Envelope
# xmlns:s="http://schemas.xmlsoap.org/soap/envelope/"
# s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
# <s:Body>
# <s:Fault>
# <faultcode>s:Client</faultcode>
# <faultstring>UPnPError</faultstring>
# <detail>
# <UPnPError xmlns="urn:schemas-upnp-org:control-1-0">
# <errorCode>error code</errorCode>
# <errorDescription>error string</errorDescription>
# </UPnPError>
# </detail>
# </s:Fault>
# </s:Body>
# </s:Envelope>
#
# All that matters for our purposes is the errorCode.
# errorDescription is not required, and Sonos does not seem to use it.
# NB need to encode unicode strings before passing to ElementTree
xml_error = xml_error.encode('utf-8')
error = XML.fromstring(xml_error)
log.debug("Error %s", xml_error)
error_code = error.findtext(
'.//{urn:schemas-upnp-org:control-1-0}errorCode')
if error_code is not None:
description = self.UPNP_ERRORS.get(int(error_code), '')
raise SoCoUPnPException(
message='UPnP Error {} received: {} from {}'.format(
error_code, description, self.soco.ip_address),
error_code=error_code,
error_description=description,
error_xml=xml_error
)
else:
# Unknown error, so just return the entire response
log.error("Unknown error received from %s", self.soco.ip_address)
raise UnknownSoCoException(xml_error) | 171,840 |
Convert any number of `DidlObjects <DidlObject>` to a unicode xml
string.
Args:
*args (DidlObject): One or more `DidlObject` (or subclass) instances.
Returns:
str: A unicode string representation of DIDL-Lite XML in the form
``'<DIDL-Lite ...>...</DIDL-Lite>'``. | def to_didl_string(*args):
didl = XML.Element(
'DIDL-Lite',
{
'xmlns': "urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/",
'xmlns:dc': "http://purl.org/dc/elements/1.1/",
'xmlns:upnp': "urn:schemas-upnp-org:metadata-1-0/upnp/",
'xmlns:r': "urn:schemas-rinconnetworks-com:metadata-1-0/"
})
for arg in args:
didl.append(arg.to_element())
if sys.version_info[0] == 2:
return XML.tostring(didl)
else:
return XML.tostring(didl, encoding='unicode') | 171,856 |
Set the resource properties from a ``<res>`` element.
Args:
element (~xml.etree.ElementTree.Element): The ``<res>``
element | def from_element(cls, element):
def _int_helper(name):
result = element.get(name)
if result is not None:
try:
return int(result)
except ValueError:
raise DIDLMetadataError(
'Could not convert {0} to an integer'.format(name))
else:
return None
content = {}
# required
content['protocol_info'] = element.get('protocolInfo')
if content['protocol_info'] is None:
raise DIDLMetadataError('Could not create Resource from Element: '
'protocolInfo not found (required).')
# Optional
content['import_uri'] = element.get('importUri')
content['size'] = _int_helper('size')
content['duration'] = element.get('duration')
content['bitrate'] = _int_helper('bitrate')
content['sample_frequency'] = _int_helper('sampleFrequency')
content['bits_per_sample'] = _int_helper('bitsPerSample')
content['nr_audio_channels'] = _int_helper('nrAudioChannels')
content['resolution'] = element.get('resolution')
content['color_depth'] = _int_helper('colorDepth')
content['protection'] = element.get('protection')
content['uri'] = element.text
return cls(**content) | 171,858 |
Return a dict representation of the `DidlResource`.
Args:
remove_nones (bool, optional): Optionally remove dictionary
elements when their value is `None`.
Returns:
dict: a dict representing the `DidlResource` | def to_dict(self, remove_nones=False):
content = {
'uri': self.uri,
'protocol_info': self.protocol_info,
'import_uri': self.import_uri,
'size': self.size,
'duration': self.duration,
'bitrate': self.bitrate,
'sample_frequency': self.sample_frequency,
'bits_per_sample': self.bits_per_sample,
'nr_audio_channels': self.nr_audio_channels,
'resolution': self.resolution,
'color_depth': self.color_depth,
'protection': self.protection,
}
if remove_nones:
# delete any elements that have a value of None to optimize size
# of the returned structure
nones = [k for k in content if content[k] is None]
for k in nones:
del content[k]
return content | 171,861 |
Create a new instance.
Args:
name (str): Name of the class.
bases (tuple): Base classes.
attrs (dict): attributes defined for the class. | def __new__(cls, name, bases, attrs):
new_cls = super(DidlMetaClass, cls).__new__(cls, name, bases, attrs)
# Register all subclasses with the global _DIDL_CLASS_TO_CLASS mapping
item_class = attrs.get('item_class', None)
if item_class is not None:
_DIDL_CLASS_TO_CLASS[item_class] = new_cls
return new_cls | 171,863 |
Create an instance of this class from an ElementTree xml Element.
An alternative constructor. The element must be a DIDL-Lite <item> or
<container> element, and must be properly namespaced.
Args:
xml (~xml.etree.ElementTree.Element): An
:class:`~xml.etree.ElementTree.Element` object. | def from_element(cls, element): # pylint: disable=R0914
# We used to check here that we have the right sort of element,
# ie a container or an item. But Sonos seems to use both
# indiscriminately, eg a playlistContainer can be an item or a
# container. So we now just check that it is one or the other.
tag = element.tag
if not (tag.endswith('item') or tag.endswith('container')):
raise DIDLMetadataError(
"Wrong element. Expected <item> or <container>,"
" got <{0}> for class {1}'".format(
tag, cls.item_class))
# and that the upnp matches what we are expecting
item_class = element.find(ns_tag('upnp', 'class')).text
# In case this class has an # specified unofficial
# subclass, ignore it by stripping it from item_class
if '.#' in item_class:
item_class = item_class[:item_class.find('.#')]
if item_class != cls.item_class:
raise DIDLMetadataError(
"UPnP class is incorrect. Expected '{0}',"
" got '{1}'".format(cls.item_class, item_class))
# parent_id, item_id and restricted are stored as attributes on the
# element
item_id = element.get('id', None)
if item_id is None:
raise DIDLMetadataError("Missing id attribute")
item_id = really_unicode(item_id)
parent_id = element.get('parentID', None)
if parent_id is None:
raise DIDLMetadataError("Missing parentID attribute")
parent_id = really_unicode(parent_id)
# CAUTION: This implementation deviates from the spec.
# Elements are normally required to have a `restricted` tag, but
# Spotify Direct violates this. To make it work, a missing restricted
# tag is interpreted as `restricted = True`.
restricted = element.get('restricted', None)
restricted = False if restricted in [0, 'false', 'False'] else True
# Similarily, all elements should have a title tag, but Spotify Direct
# does not comply
title_elt = element.find(ns_tag('dc', 'title'))
if title_elt is None or not title_elt.text:
title = ''
else:
title = really_unicode(title_elt.text)
# Deal with any resource elements
resources = []
for res_elt in element.findall(ns_tag('', 'res')):
resources.append(
DidlResource.from_element(res_elt))
# and the desc element (There is only one in Sonos)
desc = element.findtext(ns_tag('', 'desc'))
# Get values of the elements listed in _translation and add them to
# the content dict
content = {}
for key, value in cls._translation.items():
result = element.findtext(ns_tag(*value))
if result is not None:
# We store info as unicode internally.
content[key] = really_unicode(result)
# Convert type for original track number
if content.get('original_track_number') is not None:
content['original_track_number'] = \
int(content['original_track_number'])
# Now pass the content dict we have just built to the main
# constructor, as kwargs, to create the object
return cls(title=title, parent_id=parent_id, item_id=item_id,
restricted=restricted, resources=resources, desc=desc,
**content) | 171,865 |
Create an instance from a dict.
An alternative constructor. Equivalent to ``DidlObject(**content)``.
Args:
content (dict): a dict containing metadata information. Required.
Valid keys are the same as the parameters for `DidlObject`. | def from_dict(cls, content):
# Do we really need this constructor? Could use DidlObject(**content)
# instead. -- We do now
if 'resources' in content:
content['resources'] = [DidlResource.from_dict(x)
for x in content['resources']]
return cls(**content) | 171,866 |
Return the dict representation of the instance.
Args:
remove_nones (bool, optional): Optionally remove dictionary
elements when their value is `None`.
Returns:
dict: a dict representation of the `DidlObject`. | def to_dict(self, remove_nones=False):
content = {}
# Get the value of each attribute listed in _translation, and add it
# to the content dict
for key in self._translation:
if hasattr(self, key):
content[key] = getattr(self, key)
# also add parent_id, item_id, restricted, title and resources because
# they are not listed in _translation
content['parent_id'] = self.parent_id
content['item_id'] = self.item_id
content['restricted'] = self.restricted
content['title'] = self.title
if self.resources != []:
content['resources'] = [resource.to_dict(remove_nones=remove_nones)
for resource in self.resources]
content['desc'] = self.desc
return content | 171,870 |
Return an ElementTree Element representing this instance.
Args:
include_namespaces (bool, optional): If True, include xml
namespace attributes on the root element
Return:
~xml.etree.ElementTree.Element: an Element. | def to_element(self, include_namespaces=False):
elt_attrib = {}
if include_namespaces:
elt_attrib.update({
'xmlns': "urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/",
'xmlns:dc': "http://purl.org/dc/elements/1.1/",
'xmlns:upnp': "urn:schemas-upnp-org:metadata-1-0/upnp/",
})
elt_attrib.update({
'parentID': self.parent_id,
'restricted': 'true' if self.restricted else 'false',
'id': self.item_id
})
elt = XML.Element(self.tag, elt_attrib)
# Add the title, which should always come first, according to the spec
XML.SubElement(elt, 'dc:title').text = self.title
# Add in any resources
for resource in self.resources:
elt.append(resource.to_element())
# Add the rest of the metadata attributes (i.e all those listed in
# _translation) as sub-elements of the item element.
for key, value in self._translation.items():
if hasattr(self, key):
# Some attributes have a namespace of '', which means they
# are in the default namespace. We need to handle those
# carefully
tag = "%s:%s" % value if value[0] else "%s" % value[1]
XML.SubElement(elt, tag).text = ("%s" % getattr(self, key))
# Now add in the item class
XML.SubElement(elt, 'upnp:class').text = self.item_class
# And the desc element
desc_attrib = {'id': 'cdudn', 'nameSpace':
'urn:schemas-rinconnetworks-com:metadata-1-0/'}
desc_elt = XML.SubElement(elt, 'desc', desc_attrib)
desc_elt.text = self.desc
return elt | 171,871 |
Renew the event subscription.
You should not try to renew a subscription which has been
unsubscribed, or once it has expired.
Args:
requested_timeout (int, optional): The period for which a renewal
request should be made. If None (the default), use the timeout
requested on subscription. | def renew(self, requested_timeout=None):
# NB This code is sometimes called from a separate thread (when
# subscriptions are auto-renewed. Be careful to ensure thread-safety
if self._has_been_unsubscribed:
raise SoCoException(
'Cannot renew subscription once unsubscribed')
if not self.is_subscribed:
raise SoCoException(
'Cannot renew subscription before subscribing')
if self.time_left == 0:
raise SoCoException(
'Cannot renew subscription after expiry')
# SUBSCRIBE publisher path HTTP/1.1
# HOST: publisher host:publisher port
# SID: uuid:subscription UUID
# TIMEOUT: Second-requested subscription duration (optional)
headers = {
'SID': self.sid
}
if requested_timeout is None:
requested_timeout = self.requested_timeout
if requested_timeout is not None:
headers["TIMEOUT"] = "Second-{}".format(requested_timeout)
response = requests.request(
'SUBSCRIBE',
self.service.base_url + self.service.event_subscription_url,
headers=headers)
response.raise_for_status()
timeout = response.headers['timeout']
# According to the spec, timeout can be "infinite" or "second-123"
# where 123 is a number of seconds. Sonos uses "Second-123" (with a
# a capital letter)
if timeout.lower() == 'infinite':
self.timeout = None
else:
self.timeout = int(timeout.lstrip('Second-'))
self._timestamp = time.time()
self.is_subscribed = True
log.info(
"Renewed subscription to %s, sid: %s",
self.service.base_url + self.service.event_subscription_url,
self.sid) | 171,890 |
Play a track from the queue by index.
The index number is required as an argument, where the first index
is 0.
Args:
index (int): 0-based index of the track to play
start (bool): If the item that has been set should start playing | def play_from_queue(self, index, start=True):
# Grab the speaker's information if we haven't already since we'll need
# it in the next step.
if not self.speaker_info:
self.get_speaker_info()
# first, set the queue itself as the source URI
uri = 'x-rincon-queue:{0}#0'.format(self.uid)
self.avTransport.SetAVTransportURI([
('InstanceID', 0),
('CurrentURI', uri),
('CurrentURIMetaData', '')
])
# second, set the track number with a seek command
self.avTransport.Seek([
('InstanceID', 0),
('Unit', 'TRACK_NR'),
('Target', index + 1)
])
# finally, just play what's set if needed
if start:
self.play() | 171,901 |
Switch the speaker's input to line-in.
Args:
source (SoCo): The speaker whose line-in should be played.
Default is line-in from the speaker itself. | def switch_to_line_in(self, source=None):
if source:
uid = source.uid
else:
uid = self.uid
self.avTransport.SetAVTransportURI([
('InstanceID', 0),
('CurrentURI', 'x-rincon-stream:{0}'.format(uid)),
('CurrentURIMetaData', '')
]) | 171,918 |
Get information about the Sonos speaker.
Arguments:
refresh(bool): Refresh the speaker info cache.
timeout: How long to wait for the server to send
data before giving up, as a float, or a
`(connect timeout, read timeout)` tuple
e.g. (3, 5). Default is no timeout.
Returns:
dict: Information about the Sonos speaker, such as the UID,
MAC Address, and Zone Name. | def get_speaker_info(self, refresh=False, timeout=None):
if self.speaker_info and refresh is False:
return self.speaker_info
else:
response = requests.get('http://' + self.ip_address +
':1400/xml/device_description.xml',
timeout=timeout)
dom = XML.fromstring(response.content)
device = dom.find('{urn:schemas-upnp-org:device-1-0}device')
if device is not None:
self.speaker_info['zone_name'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}roomName')
# no zone icon in device_description.xml -> player icon
self.speaker_info['player_icon'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}iconList/'
'{urn:schemas-upnp-org:device-1-0}icon/'
'{urn:schemas-upnp-org:device-1-0}url'
)
self.speaker_info['uid'] = self.uid
self.speaker_info['serial_number'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}serialNum')
self.speaker_info['software_version'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}softwareVersion')
self.speaker_info['hardware_version'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}hardwareVersion')
self.speaker_info['model_number'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}modelNumber')
self.speaker_info['model_name'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}modelName')
self.speaker_info['display_version'] = device.findtext(
'{urn:schemas-upnp-org:device-1-0}displayVersion')
# no mac address - extract from serial number
mac = self.speaker_info['serial_number'].split(':')[0]
self.speaker_info['mac_address'] = mac
return self.speaker_info
return None | 171,924 |
Add a sequence of items to the queue.
Args:
items (list): A sequence of items to the be added to the queue
container (DidlObject, optional): A container object which
includes the items. | def add_multiple_to_queue(self, items, container=None):
if container is not None:
container_uri = container.resources[0].uri
container_metadata = to_didl_string(container)
else:
container_uri = '' # Sonos seems to accept this as well
container_metadata = '' # pylint: disable=redefined-variable-type
chunk_size = 16 # With each request, we can only add 16 items
item_list = list(items) # List for slicing
for index in range(0, len(item_list), chunk_size):
chunk = item_list[index:index + chunk_size]
uris = ' '.join([item.resources[0].uri for item in chunk])
uri_metadata = ' '.join([to_didl_string(item) for item in chunk])
self.avTransport.AddMultipleURIsToQueue([
('InstanceID', 0),
('UpdateID', 0),
('NumberOfURIs', len(chunk)),
('EnqueuedURIs', uris),
('EnqueuedURIsMetaData', uri_metadata),
('ContainerURI', container_uri),
('ContainerMetaData', container_metadata),
('DesiredFirstTrackNumberEnqueued', 0),
('EnqueueAsNext', 0)
]) | 171,931 |
Remove a track from the queue by index. The index number is
required as an argument, where the first index is 0.
Args:
index (int): The (0-based) index of the track to remove | def remove_from_queue(self, index):
# TODO: what do these parameters actually do?
updid = '0'
objid = 'Q:0/' + str(index + 1)
self.avTransport.RemoveTrackFromQueue([
('InstanceID', 0),
('ObjectID', objid),
('UpdateID', updid),
]) | 171,932 |
Helper method for `get_favorite_radio_*` methods.
Args:
favorite_type (str): Specify either `RADIO_STATIONS` or
`RADIO_SHOWS`.
start (int): Which number to start the retrieval from. Used for
paging.
max_items (int): The total number of results to return. | def __get_favorites(self, favorite_type, start=0, max_items=100):
if favorite_type not in (RADIO_SHOWS, RADIO_STATIONS):
favorite_type = SONOS_FAVORITES
response = self.contentDirectory.Browse([
('ObjectID',
'FV:2' if favorite_type is SONOS_FAVORITES
else 'R:0/{0}'.format(favorite_type)),
('BrowseFlag', 'BrowseDirectChildren'),
('Filter', '*'),
('StartingIndex', start),
('RequestedCount', max_items),
('SortCriteria', '')
])
result = {}
favorites = []
results_xml = response['Result']
if results_xml != '':
# Favorites are returned in DIDL-Lite format
metadata = XML.fromstring(really_utf8(results_xml))
for item in metadata.findall(
'{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}container'
if favorite_type == RADIO_SHOWS else
'{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}item'):
favorite = {}
favorite['title'] = item.findtext(
'{http://purl.org/dc/elements/1.1/}title')
favorite['uri'] = item.findtext(
'{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}res')
if favorite_type == SONOS_FAVORITES:
favorite['meta'] = item.findtext(
'{urn:schemas-rinconnetworks-com:metadata-1-0/}resMD')
favorites.append(favorite)
result['total'] = response['TotalMatches']
result['returned'] = len(favorites)
result['favorites'] = favorites
return result | 171,936 |
Create a new empty Sonos playlist.
Args:
title: Name of the playlist
:rtype: :py:class:`~.soco.data_structures.DidlPlaylistContainer` | def create_sonos_playlist(self, title):
response = self.avTransport.CreateSavedQueue([
('InstanceID', 0),
('Title', title),
('EnqueuedURI', ''),
('EnqueuedURIMetaData', ''),
])
item_id = response['AssignedObjectID']
obj_id = item_id.split(':', 2)[1]
uri = "file:///jffs/settings/savedqueues.rsq#{0}".format(obj_id)
res = [DidlResource(uri=uri, protocol_info="x-rincon-playlist:*:*:*")]
return DidlPlaylistContainer(
resources=res, title=title, parent_id='SQ:', item_id=item_id) | 171,937 |
Create a new Sonos playlist from the current queue.
Args:
title: Name of the playlist
:rtype: :py:class:`~.soco.data_structures.DidlPlaylistContainer` | def create_sonos_playlist_from_queue(self, title):
# Note: probably same as Queue service method SaveAsSonosPlaylist
# but this has not been tested. This method is what the
# controller uses.
response = self.avTransport.SaveQueue([
('InstanceID', 0),
('Title', title),
('ObjectID', '')
])
item_id = response['AssignedObjectID']
obj_id = item_id.split(':', 2)[1]
uri = "file:///jffs/settings/savedqueues.rsq#{0}".format(obj_id)
res = [DidlResource(uri=uri, protocol_info="x-rincon-playlist:*:*:*")]
return DidlPlaylistContainer(
resources=res, title=title, parent_id='SQ:', item_id=item_id) | 171,938 |
Remove a Sonos playlist.
Args:
sonos_playlist (DidlPlaylistContainer): Sonos playlist to remove
or the item_id (str).
Returns:
bool: True if succesful, False otherwise
Raises:
SoCoUPnPException: If sonos_playlist does not point to a valid
object. | def remove_sonos_playlist(self, sonos_playlist):
object_id = getattr(sonos_playlist, 'item_id', sonos_playlist)
return self.contentDirectory.DestroyObject([('ObjectID', object_id)]) | 171,939 |
Adds a queueable item to a Sonos' playlist.
Args:
queueable_item (DidlObject): the item to add to the Sonos' playlist
sonos_playlist (DidlPlaylistContainer): the Sonos' playlist to
which the item should be added | def add_item_to_sonos_playlist(self, queueable_item, sonos_playlist):
# Get the update_id for the playlist
response, _ = self.music_library._music_lib_search(
sonos_playlist.item_id, 0, 1)
update_id = response['UpdateID']
# Form the metadata for queueable_item
metadata = to_didl_string(queueable_item)
# Make the request
self.avTransport.AddURIToSavedQueue([
('InstanceID', 0),
('UpdateID', update_id),
('ObjectID', sonos_playlist.item_id),
('EnqueuedURI', queueable_item.resources[0].uri),
('EnqueuedURIMetaData', metadata),
# 2 ** 32 - 1 = 4294967295, this field has always this value. Most
# likely, playlist positions are represented as a 32 bit uint and
# this is therefore the largest index possible. Asking to add at
# this index therefore probably amounts to adding it "at the end"
('AddAtIndex', 4294967295)
]) | 171,940 |
Make a string unicode. Really.
Ensure ``in_string`` is returned as unicode through a series of
progressively relaxed decodings.
Args:
in_string (str): The string to convert.
Returns:
str: Unicode.
Raises:
ValueError | def really_unicode(in_string):
if isinstance(in_string, StringType):
for args in (('utf-8',), ('latin-1',), ('ascii', 'replace')):
try:
# pylint: disable=star-args
in_string = in_string.decode(*args)
break
except UnicodeDecodeError:
continue
if not isinstance(in_string, UnicodeType):
raise ValueError('%s is not a string at all.' % in_string)
return in_string | 171,947 |
Convert camelcase to lowercase and underscore.
Recipe from http://stackoverflow.com/a/1176023
Args:
string (str): The string to convert.
Returns:
str: The converted string. | def camel_to_underscore(string):
string = FIRST_CAP_RE.sub(r'\1_\2', string)
return ALL_CAP_RE.sub(r'\1_\2', string).lower() | 171,948 |
Return a pretty-printed version of a unicode XML string.
Useful for debugging.
Args:
unicode_text (str): A text representation of XML (unicode,
*not* utf-8).
Returns:
str: A pretty-printed version of the input. | def prettify(unicode_text):
import xml.dom.minidom
reparsed = xml.dom.minidom.parseString(unicode_text.encode('utf-8'))
return reparsed.toprettyxml(indent=" ", newl="\n") | 171,949 |
Prepare the http headers for sending.
Add the SOAPACTION header to the others.
Args:
http_headers (dict): A dict in the form {'Header': 'Value,..}
containing http headers to use for the http request.
soap_action (str): The value of the SOAPACTION header.
Returns:
dict: headers including the SOAPACTION header. | def prepare_headers(self, http_headers, soap_action):
headers = {'Content-Type': 'text/xml; charset="utf-8"'}
if soap_action is not None:
headers.update({'SOAPACTION': '"{}"'.format(soap_action)})
if http_headers is not None:
headers.update(http_headers)
return headers | 171,956 |
Prepare the SOAP message body for sending.
Args:
method (str): The name of the method to call.
parameters (list): A list of (name, value) tuples containing
the parameters to pass to the method.
namespace (str): tThe XML namespace to use for the method.
Returns:
str: A properly formatted SOAP Body. | def prepare_soap_body(self, method, parameters, namespace):
tags = []
for name, value in parameters:
tag = "<{name}>{value}</{name}>".format(
name=name, value=escape("%s" % value, {'"': """}))
# % converts to unicode because we are using unicode literals.
# Avoids use of 'unicode' function which does not exist in python 3
tags.append(tag)
wrapped_params = "".join(tags)
# Prepare the SOAP Body
if namespace is not None:
soap_body = (
'<{method} xmlns="{namespace}">'
'{params}'
'</{method}>'.format(
method=method, params=wrapped_params,
namespace=namespace
))
else:
soap_body = (
'<{method}>'
'{params}'
'</{method}>'.format(
method=method, params=wrapped_params
))
return soap_body | 171,957 |
Prepare the SOAP Envelope for sending.
Args:
prepared_soap_header (str): A SOAP Header prepared by
`prepare_soap_header`
prepared_soap_body (str): A SOAP Body prepared by
`prepare_soap_body`
Returns:
str: A prepared SOAP Envelope | def prepare_soap_envelope(self, prepared_soap_header, prepared_soap_body):
# pylint: disable=bad-continuation
soap_env_template = (
'<?xml version="1.0"?>'
'<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/"'
' s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">'
'{soap_header}'
'<s:Body>'
'{soap_body}'
'</s:Body>'
'</s:Envelope>') # noqa PEP8
return soap_env_template.format(
soap_header=prepared_soap_header,
soap_body=prepared_soap_body) | 171,958 |
Convert a unicode xml string to a list of `DIDLObjects <DidlObject>`.
Args:
string (str): A unicode string containing an XML representation of one
or more DIDL-Lite items (in the form ``'<DIDL-Lite ...>
...</DIDL-Lite>'``)
Returns:
list: A list of one or more instances of `DidlObject` or a subclass | def from_didl_string(string):
items = []
root = XML.fromstring(string.encode('utf-8'))
for elt in root:
if elt.tag.endswith('item') or elt.tag.endswith('container'):
item_class = elt.findtext(ns_tag('upnp', 'class'))
# In case this class has an # specified unofficial
# subclass, ignore it by stripping it from item_class
if '.#' in item_class:
item_class = item_class[:item_class.find('.#')]
try:
cls = _DIDL_CLASS_TO_CLASS[item_class]
except KeyError:
raise DIDLMetadataError("Unknown UPnP class: %s" % item_class)
item = cls.from_element(elt)
item = attempt_datastructure_upgrade(item)
items.append(item)
else:
# <desc> elements are allowed as an immediate child of <DIDL-Lite>
# according to the spec, but I have not seen one there in Sonos, so
# we treat them as illegal. May need to fix this if this
# causes problems.
raise DIDLMetadataError("Illegal child of DIDL element: <%s>"
% elt.tag)
_LOG.debug(
'Created data structures: %.20s (CUT) from Didl string "%.20s" (CUT)',
items, string,
)
return items | 171,981 |
Get a set of all alarms known to the Sonos system.
Args:
zone (`SoCo`, optional): a SoCo instance to query. If None, a random
instance is used. Defaults to `None`.
Returns:
set: A set of `Alarm` instances
Note:
Any existing `Alarm` instance will have its attributes updated to those
currently stored on the Sonos system. | def get_alarms(zone=None):
# Get a soco instance to query. It doesn't matter which.
if zone is None:
zone = discovery.any_soco()
response = zone.alarmClock.ListAlarms()
alarm_list = response['CurrentAlarmList']
tree = XML.fromstring(alarm_list.encode('utf-8'))
# An alarm list looks like this:
# <Alarms>
# <Alarm ID="14" StartTime="07:00:00"
# Duration="02:00:00" Recurrence="DAILY" Enabled="1"
# RoomUUID="RINCON_000ZZZZZZ1400"
# ProgramURI="x-rincon-buzzer:0" ProgramMetaData=""
# PlayMode="SHUFFLE_NOREPEAT" Volume="25"
# IncludeLinkedZones="0"/>
# <Alarm ID="15" StartTime="07:00:00"
# Duration="02:00:00" Recurrence="DAILY" Enabled="1"
# RoomUUID="RINCON_000ZZZZZZ01400"
# ProgramURI="x-rincon-buzzer:0" ProgramMetaData=""
# PlayMode="SHUFFLE_NOREPEAT" Volume="25"
# IncludeLinkedZones="0"/>
# </Alarms>
# pylint: disable=protected-access
alarms = tree.findall('Alarm')
result = set()
for alarm in alarms:
values = alarm.attrib
alarm_id = values['ID']
# If an instance already exists for this ID, update and return it.
# Otherwise, create a new one and populate its values
if Alarm._all_alarms.get(alarm_id):
instance = Alarm._all_alarms.get(alarm_id)
else:
instance = Alarm(None)
instance._alarm_id = alarm_id
Alarm._all_alarms[instance._alarm_id] = instance
instance.start_time = datetime.strptime(
values['StartTime'], "%H:%M:%S").time() # NB StartTime, not
# StartLocalTime, which is used by CreateAlarm
instance.duration = None if values['Duration'] == '' else\
datetime.strptime(values['Duration'], "%H:%M:%S").time()
instance.recurrence = values['Recurrence']
instance.enabled = values['Enabled'] == '1'
instance.zone = next((z for z in zone.all_zones
if z.uid == values['RoomUUID']), None)
# some alarms are not associated to zones -> filter these out
if instance.zone is None:
continue
instance.program_uri = None if values['ProgramURI'] ==\
"x-rincon-buzzer:0" else values['ProgramURI']
instance.program_metadata = values['ProgramMetaData']
instance.play_mode = values['PlayMode']
instance.volume = values['Volume']
instance.include_linked_zones = values['IncludeLinkedZones'] == '1'
result.add(instance)
return result | 172,000 |
Restore the state of a device to that which was previously saved.
For coordinator devices restore everything. For slave devices
only restore volume etc., not transport info (transport info
comes from the slave's coordinator).
Args:
fade (bool): Whether volume should be faded up on restore. | def restore(self, fade=False):
if self.is_coordinator:
# Start by ensuring that the speaker is paused as we don't want
# things all rolling back when we are changing them, as this could
# include things like audio
transport_info = self.device.get_current_transport_info()
if transport_info is not None:
if transport_info['current_transport_state'] == 'PLAYING':
self.device.pause()
# Check if the queue should be restored
self._restore_queue()
# Reinstate what was playing
if self.is_playing_queue and self.playlist_position > 0:
# was playing from playlist
if self.playlist_position is not None:
# The position in the playlist returned by
# get_current_track_info starts at 1, but when
# playing from playlist, the index starts at 0
# if position > 0:
self.playlist_position -= 1
self.device.play_from_queue(self.playlist_position, False)
if self.track_position is not None:
if self.track_position != "":
self.device.seek(self.track_position)
# reinstate track, position, play mode, cross fade
# Need to make sure there is a proper track selected first
self.device.play_mode = self.play_mode
self.device.cross_fade = self.cross_fade
elif self.is_playing_cloud_queue:
# was playing a cloud queue started by Alexa
# No way yet to re-start this so prevent it throwing an error!
pass
else:
# was playing a stream (radio station, file, or nothing)
# reinstate uri and meta data
if self.media_uri != "":
self.device.play_uri(
self.media_uri, self.media_metadata, start=False)
# For all devices:
# Reinstate all the properties that are pretty easy to do
self.device.mute = self.mute
self.device.bass = self.bass
self.device.treble = self.treble
self.device.loudness = self.loudness
# Reinstate volume
# Can only change volume on device with fixed volume set to False
# otherwise get uPnP error, so check first. Before issuing a network
# command to check, fixed volume always has volume set to 100.
# So only checked fixed volume if volume is 100.
if self.volume == 100:
fixed_vol = self.device.renderingControl.GetOutputFixed(
[('InstanceID', 0)])['CurrentFixed']
else:
fixed_vol = False
# now set volume if not fixed
if not fixed_vol:
if fade:
# if fade requested in restore
# set volume to 0 then fade up to saved volume (non blocking)
self.device.volume = 0
self.device.ramp_to_volume(self.volume)
else:
# set volume
self.device.volume = self.volume
# Now everything is set, see if we need to be playing, stopped
# or paused ( only for coordinators)
if self.is_coordinator:
if self.transport_state == 'PLAYING':
self.device.play()
elif self.transport_state == 'STOPPED':
self.device.stop() | 172,012 |
Maps values to colors
Args:
values (list or list of lists) - list of values to map to colors
cmap (str) - color map (default is 'husl')
res (int) - resolution of the color map (default: 100)
Returns:
list of rgb tuples | def vals2colors(vals,cmap='GnBu_d',res=100):
# flatten if list of lists
if any(isinstance(el, list) for el in vals):
vals = list(itertools.chain(*vals))
# get palette from seaborn
palette = np.array(sns.color_palette(cmap, res))
ranks = np.digitize(vals, np.linspace(np.min(vals), np.max(vals)+1, res+1)) - 1
return [tuple(i) for i in palette[ranks, :]] | 172,035 |
Maps values to bins
Args:
values (list or list of lists) - list of values to map to colors
res (int) - resolution of the color map (default: 100)
Returns:
list of numbers representing bins | def vals2bins(vals,res=100):
# flatten if list of lists
if any(isinstance(el, list) for el in vals):
vals = list(itertools.chain(*vals))
return list(np.digitize(vals, np.linspace(np.min(vals), np.max(vals)+1, res+1)) - 1) | 172,036 |
Build Grab exception from the pycurl exception
Args:
ex - the original pycurl exception
curl - the Curl instance raised the exception | def build_grab_exception(ex, curl):
# CURLE_WRITE_ERROR (23)
# An error occurred when writing received data to a local file, or
# an error was returned to libcurl from a write callback.
# This exception should be ignored if grab_callback_interrupted
# flag # is enabled (this happens when nohead or nobody options
# enabled)
#
# Also this error is raised when curl receives KeyboardInterrupt
# while it is processing some callback function
# (WRITEFUNCTION, HEADERFUNCTIO, etc)
# If you think WTF then see details here:
# https://github.com/pycurl/pycurl/issues/413
if ex.args[0] == 23:
if getattr(curl, 'grab_callback_interrupted', None) is True:
# If the execution of body_process callback is
# interrupted (body_maxsize, nobody and other options)
# then the pycurl raised exception with code 23
# We should ignore it
return None
else:
return error.GrabNetworkError(ex.args[1], ex)
else:
if ex.args[0] == 28:
return error.GrabTimeoutError(ex.args[1], ex)
elif ex.args[0] == 7:
return error.GrabConnectionError(ex.args[1], ex)
elif ex.args[0] == 67:
return error.GrabAuthError(ex.args[1], ex)
elif ex.args[0] == 47:
return error.GrabTooManyRedirectsError(ex.args[1], ex)
elif ex.args[0] == 6:
return error.GrabCouldNotResolveHostError(ex.args[1], ex)
elif ex.args[0] == 3:
return error.GrabInvalidUrl(ex.args[1], ex)
else:
return error.GrabNetworkError(ex.args[1], ex) | 172,085 |
Generate tuples of pairs of records from a block of records
Arguments:
blocks -- an iterable sequence of blocked records | def _blockedPairs(self, blocks):
block, blocks = core.peek(blocks)
self._checkBlock(block)
combinations = itertools.combinations
pairs = (combinations(sorted(block), 2) for block in blocks)
return pairs | 172,532 |
Generate tuples of pairs of records from a block of records
Arguments:
blocks -- an iterable sequence of blocked records | def _blockedPairs(self, blocks):
block, blocks = core.peek(blocks)
self._checkBlock(block)
product = itertools.product
pairs = (product(base, target) for base, target in blocks)
return pairs | 172,538 |
Read training from previously built training data file object
Arguments:
training_file -- file object containing the training data | def readTraining(self, training_file):
logger.info('reading training from file')
training_pairs = json.load(training_file,
cls=serializer.dedupe_decoder)
self.markPairs(training_pairs) | 172,545 |
Create a new ``MultipleResults`` object from a dictionary.
Keys of the dictionary are unpacked into result names.
Args:
result_dict (dict) - The dictionary to unpack.
Returns:
(:py:class:`MultipleResults <dagster.MultipleResults>`) A new ``MultipleResults`` object | def from_dict(result_dict):
check.dict_param(result_dict, 'result_dict', key_type=str)
results = []
for name, value in result_dict.items():
results.append(Result(value, name))
return MultipleResults(*results) | 172,757 |
Assigned parameters into the appropiate place in the input notebook
Args:
nb (NotebookNode): Executable notebook object
parameters (dict): Arbitrary keyword arguments to pass to the notebook parameters. | def replace_parameters(context, nb, parameters):
# Uma: This is a copy-paste from papermill papermill/execute.py:104 (execute_parameters).
# Typically, papermill injects the injected-parameters cell *below* the parameters cell
# but we want to *replace* the parameters cell, which is what this function does.
# Copy the nb object to avoid polluting the input
nb = copy.deepcopy(nb)
# Generate parameter content based on the kernel_name
param_content = DagsterTranslator.codify(parameters)
# papermill method choosed translator based on kernel_name and language,
# but we just call the DagsterTranslator
# translate_parameters(kernel_name, language, parameters)
newcell = nbformat.v4.new_code_cell(source=param_content)
newcell.metadata['tags'] = ['injected-parameters']
param_cell_index = _find_first_tagged_cell_index(nb, 'parameters')
injected_cell_index = _find_first_tagged_cell_index(nb, 'injected-parameters')
if injected_cell_index >= 0:
# Replace the injected cell with a new version
before = nb.cells[:injected_cell_index]
after = nb.cells[injected_cell_index + 1 :]
check.int_value_param(param_cell_index, -1, 'param_cell_index')
# We should have blown away the parameters cell if there is an injected-parameters cell
elif param_cell_index >= 0:
# Replace the parameter cell with the injected-parameters cell
before = nb.cells[:param_cell_index]
after = nb.cells[param_cell_index + 1 :]
else:
# Inject to the top of the notebook, presumably first cell includes dagstermill import
context.log.debug(
(
'Warning notebook has no parameters cell, '
'so first cell must import dagstermill and call dm.register_repo()'
)
)
before = nb.cells[:1]
after = nb.cells[1:]
nb.cells = before + [newcell] + after
nb.metadata.papermill['parameters'] = parameters
return nb | 172,927 |
Get a pipeline by name. Only constructs that pipeline and caches it.
Args:
name (str): Name of the pipeline to retriever
Returns:
PipelineDefinition: Instance of PipelineDefinition with that name. | def get_pipeline(self, name):
check.str_param(name, 'name')
if name in self._pipeline_cache:
return self._pipeline_cache[name]
try:
pipeline = self.pipeline_dict[name]()
except KeyError:
raise DagsterInvariantViolationError(
'Could not find pipeline "{name}". Found: {pipeline_names}.'.format(
name=name,
pipeline_names=', '.join(
[
'"{pipeline_name}"'.format(pipeline_name=pipeline_name)
for pipeline_name in self.pipeline_dict.keys()
]
),
)
)
check.invariant(
pipeline.name == name,
'Name does not match. Name in dict {name}. Name in pipeline {pipeline.name}'.format(
name=name, pipeline=pipeline
),
)
self._pipeline_cache[name] = check.inst(
pipeline,
PipelineDefinition,
(
'Function passed into pipeline_dict with key {key} must return a '
'PipelineDefinition'
).format(key=name),
)
return pipeline | 172,962 |
The schema for configuration data that describes the type, optionality, defaults, and description.
Args:
dagster_type (DagsterType):
A ``DagsterType`` describing the schema of this field, ie `Dict({'example': Field(String)})`
default_value (Any):
A default value to use that respects the schema provided via dagster_type
is_optional (bool): Whether the presence of this field is optional
despcription (str): | def Field(
dagster_type,
default_value=FIELD_NO_DEFAULT_PROVIDED,
is_optional=INFER_OPTIONAL_COMPOSITE_FIELD,
is_secret=False,
description=None,
):
config_type = resolve_to_config_type(dagster_type)
if not config_type:
raise DagsterInvalidDefinitionError(
(
'Attempted to pass {value_repr} to a Field that expects a valid '
'dagster type usable in config (e.g. Dict, NamedDict, Int, String et al).'
).format(value_repr=repr(dagster_type))
)
return FieldImpl(
config_type=resolve_to_config_type(dagster_type),
default_value=default_value,
is_optional=is_optional,
is_secret=is_secret,
description=description,
) | 173,134 |
Return the solid named "name". Throws if it does not exist.
Args:
name (str): Name of solid
Returns:
SolidDefinition: SolidDefinition with correct name. | def solid_named(self, name):
check.str_param(name, 'name')
if name not in self._solid_dict:
raise DagsterInvariantViolationError(
'Pipeline {pipeline_name} has no solid named {name}.'.format(
pipeline_name=self.name, name=name
)
)
return self._solid_dict[name] | 173,228 |
Create a context definition from a pre-existing context. This can be useful
in testing contexts where you may want to create a context manually and then
pass it into a one-off PipelineDefinition
Args:
context (ExecutionContext): The context that will provided to the pipeline.
Returns:
PipelineContextDefinition: The passthrough context definition. | def passthrough_context_definition(context_params):
check.inst_param(context_params, 'context', ExecutionContext)
context_definition = PipelineContextDefinition(context_fn=lambda *_args: context_params)
return {DEFAULT_CONTEXT_NAME: context_definition} | 173,269 |
A decorator for annotating a function that can take the selected properties
from a ``config_value`` in to an instance of a custom type.
Args:
config_cls (Selector) | def input_selector_schema(config_cls):
config_type = resolve_config_cls_arg(config_cls)
check.param_invariant(config_type.is_selector, 'config_cls')
def _wrap(func):
def _selector(context, config_value):
selector_key, selector_value = single_item(config_value)
return func(context, selector_key, selector_value)
return _create_input_schema(config_type, _selector)
return _wrap | 173,289 |
A decorator for a annotating a function that can take the selected properties
of a ``config_value`` and an instance of a custom type and materialize it.
Args:
config_cls (Selector): | def output_selector_schema(config_cls):
config_type = resolve_config_cls_arg(config_cls)
check.param_invariant(config_type.is_selector, 'config_cls')
def _wrap(func):
def _selector(context, config_value, runtime_value):
selector_key, selector_value = single_item(config_value)
return func(context, selector_key, selector_value, runtime_value)
return _create_output_schema(config_type, _selector)
return _wrap | 173,291 |
Download an object from s3.
Args:
info (ExpectationExecutionInfo): Must expose a boto3 S3 client as its `s3` resource.
Returns:
str:
The path to the downloaded object. | def download_from_s3(context):
target_file = context.solid_config['target_file']
return context.resources.download_manager.download_file_contents(context, target_file) | 173,325 |
Upload a file to s3.
Args:
info (ExpectationExecutionInfo): Must expose a boto3 S3 client as its `s3` resource.
Returns:
(str, str):
The bucket and key to which the file was uploaded. | def upload_to_s3(context, file_obj):
bucket = context.solid_config['bucket']
key = context.solid_config['key']
context.resources.s3.put_object(
Bucket=bucket, Body=file_obj.read(), Key=key, **(context.solid_config.get('kwargs') or {})
)
yield Result(bucket, 'bucket')
yield Result(key, 'key') | 173,326 |
"Synchronous" version of :py:func:`execute_pipeline_iterator`.
Note: raise_on_error is very useful in testing contexts when not testing for error
conditions
Parameters:
pipeline (PipelineDefinition): Pipeline to run
environment_dict (dict): The enviroment configuration that parameterizes this run
run_config (RunConfig): Configuration for how this pipeline will be executed
Returns:
:py:class:`PipelineExecutionResult` | def execute_pipeline(pipeline, environment_dict=None, run_config=None):
check.inst_param(pipeline, 'pipeline', PipelineDefinition)
environment_dict = check.opt_dict_param(environment_dict, 'environment_dict')
run_config = check_run_config_param(run_config)
environment_config = create_environment_config(pipeline, environment_dict)
intermediates_manager = construct_intermediates_manager(
run_config, environment_config, pipeline
)
with _pipeline_execution_context_manager(
pipeline, environment_config, run_config, intermediates_manager
) as pipeline_context:
event_list = list(_execute_pipeline_iterator(pipeline_context))
return PipelineExecutionResult(
pipeline,
run_config.run_id,
event_list,
lambda: _pipeline_execution_context_manager(
pipeline, environment_config, run_config, intermediates_manager
),
) | 173,415 |
Schema for configuration data with string keys and typed values via :py:class:`Field` .
Args:
fields (Dict[str, Field]) | def Dict(fields):
check_user_facing_fields_dict(fields, 'Dict')
class _Dict(_ConfigComposite):
def __init__(self):
key = 'Dict.' + str(DictCounter.get_next_count())
super(_Dict, self).__init__(
name=None,
key=key,
fields=fields,
description='A configuration dictionary with typed fields',
type_attributes=ConfigTypeAttributes(is_builtin=True),
)
return _Dict | 173,439 |
Selectors are used when you want to be able present several different options to the user but
force them to select one. For example, it would not make much sense to allow them
to say that a single input should be sourced from a csv and a parquet file: They must choose.
Note that in other type systems this might be called an "input union."
Args:
fields (Dict[str, Field]): | def Selector(fields):
check_user_facing_fields_dict(fields, 'Selector')
class _Selector(_ConfigSelector):
def __init__(self):
key = 'Selector.' + str(DictCounter.get_next_count())
super(_Selector, self).__init__(
key=key,
name=None,
fields=fields,
# description='A configuration dictionary with typed fields',
type_attributes=ConfigTypeAttributes(is_builtin=True),
)
return _Selector | 173,441 |
A :py:class`Selector` with a name, allowing it to be referenced by that name.
Args:
name (str):
fields (Dict[str, Field]) | def NamedSelector(name, fields, description=None, type_attributes=DEFAULT_TYPE_ATTRIBUTES):
check.str_param(name, 'name')
check_user_facing_fields_dict(fields, 'NamedSelector named "{}"'.format(name))
class _NamedSelector(_ConfigSelector):
def __init__(self):
super(_NamedSelector, self).__init__(
key=name,
name=name,
fields=fields,
description=description,
type_attributes=type_attributes,
)
return _NamedSelector | 173,442 |
Enforces lower and upper bounds for numeric flags.
Args:
parser: NumericParser (either FloatParser or IntegerParser), provides lower
and upper bounds, and help text to display.
name: str, name of the flag
flag_values: FlagValues. | def _register_bounds_validator_if_needed(parser, name, flag_values):
if parser.lower_bound is not None or parser.upper_bound is not None:
def checker(value):
if value is not None and parser.is_outside_bounds(value):
message = '%s is not %s' % (value, parser.syntactic_help)
raise _exceptions.ValidationError(message)
return True
_validators.register_validator(name, checker, flag_values=flag_values) | 175,720 |
Declares that all flags key to a module are key to the current module.
Args:
module: module, the module object from which all key flags will be declared
as key flags to the current module.
flag_values: FlagValues, the FlagValues instance in which the flags will
be declared as key flags. This should almost never need to be
overridden.
Raises:
Error: Raised when given an argument that is a module name (a string),
instead of a module object. | def adopt_module_key_flags(module, flag_values=_flagvalues.FLAGS):
if not isinstance(module, types.ModuleType):
raise _exceptions.Error('Expected a module object, not %r.' % (module,))
_internal_declare_key_flags(
[f.name for f in flag_values.get_key_flags_for_module(module.__name__)],
flag_values=flag_values)
# If module is this flag module, take _helpers.SPECIAL_FLAGS into account.
if module == _helpers.FLAGS_MODULE:
_internal_declare_key_flags(
# As we associate flags with get_calling_module_object_and_name(), the
# special flags defined in this module are incorrectly registered with
# a different module. So, we can't use get_key_flags_for_module.
# Instead, we take all flags from _helpers.SPECIAL_FLAGS (a private
# FlagValues, where no other module should register flags).
[_helpers.SPECIAL_FLAGS[name].name for name in _helpers.SPECIAL_FLAGS],
flag_values=_helpers.SPECIAL_FLAGS,
key_flag_values=flag_values) | 175,725 |
Changes the Kernel's /proc/self/status process name on Linux.
The kernel name is NOT what will be shown by the ps or top command.
It is a 15 character string stored in the kernel's process table that
is included in the kernel log when a process is OOM killed.
The first 15 bytes of name are used. Non-ASCII unicode is replaced with '?'.
Does nothing if /proc/self/comm cannot be written or prctl() fails.
Args:
name: bytes|unicode, the Linux kernel's command name to set. | def set_kernel_process_name(name):
if not isinstance(name, bytes):
name = name.encode('ascii', 'replace')
try:
# This is preferred to using ctypes to try and call prctl() when possible.
with open('/proc/self/comm', 'wb') as proc_comm:
proc_comm.write(name[:15])
except EnvironmentError:
try:
import ctypes
except ImportError:
return # No ctypes.
try:
libc = ctypes.CDLL('libc.so.6')
except EnvironmentError:
return # No libc.so.6.
pr_set_name = ctypes.c_ulong(15) # linux/prctl.h PR_SET_NAME value.
zero = ctypes.c_ulong(0)
try:
libc.prctl(pr_set_name, name, zero, zero, zero)
# Ignore the prctl return value. Nothing we can do if it errored.
except AttributeError:
return | 175,737 |
Parses the string argument and returns the native value.
By default it returns its argument unmodified.
Args:
argument: string argument passed in the commandline.
Raises:
ValueError: Raised when it fails to parse the argument.
TypeError: Raised when the argument has the wrong type.
Returns:
The parsed value in native type. | def parse(self, argument):
if not isinstance(argument, six.string_types):
raise TypeError('flag value must be a string, found "{}"'.format(
type(argument)))
return argument | 175,739 |
Initializes EnumParser.
Args:
enum_values: [str], a non-empty list of string values in the enum.
case_sensitive: bool, whether or not the enum is to be case-sensitive.
Raises:
ValueError: When enum_values is empty. | def __init__(self, enum_values, case_sensitive=True):
if not enum_values:
raise ValueError(
'enum_values cannot be empty, found "{}"'.format(enum_values))
super(EnumParser, self).__init__()
self.enum_values = enum_values
self.case_sensitive = case_sensitive | 175,746 |
Initializes EnumParser.
Args:
enum_class: class, the Enum class with all possible flag values.
Raises:
TypeError: When enum_class is not a subclass of Enum.
ValueError: When enum_class is empty. | def __init__(self, enum_class):
# Users must have an Enum class defined before using EnumClass flag.
# Therefore this dependency is guaranteed.
import enum
if not issubclass(enum_class, enum.Enum):
raise TypeError('{} is not a subclass of Enum.'.format(enum_class))
if not enum_class.__members__:
raise ValueError('enum_class cannot be empty, but "{}" is empty.'
.format(enum_class))
super(EnumClassParser, self).__init__()
self.enum_class = enum_class | 175,747 |
Determines validity of argument and returns the correct element of enum.
Args:
argument: str or Enum class member, the supplied flag value.
Returns:
The first matching Enum class member in Enum class.
Raises:
ValueError: Raised when argument didn't match anything in enum. | def parse(self, argument):
if isinstance(argument, self.enum_class):
return argument
if argument not in self.enum_class.__members__:
raise ValueError('value should be one of <%s>' %
'|'.join(self.enum_class.__members__.keys()))
else:
return self.enum_class[argument] | 175,748 |
Initializer.
Args:
comma_compat: bool, whether to support comma as an additional separator.
If False then only whitespace is supported. This is intended only for
backwards compatibility with flags that used to be comma-separated. | def __init__(self, comma_compat=False):
self._comma_compat = comma_compat
name = 'whitespace or comma' if self._comma_compat else 'whitespace'
super(WhitespaceSeparatedListParser, self).__init__(None, name) | 175,754 |
Parses argument as whitespace-separated list of strings.
It also parses argument as comma-separated list of strings if requested.
Args:
argument: string argument passed in the commandline.
Returns:
[str], the parsed flag value. | def parse(self, argument):
if isinstance(argument, list):
return argument
elif not argument:
return []
else:
if self._comma_compat:
argument = argument.replace(',', ' ')
return argument.split() | 175,755 |
Sets whether or not to use GNU style scanning.
GNU style allows mixing of flag and non-flag arguments. See
http://docs.python.org/library/getopt.html#getopt.gnu_getopt
Args:
gnu_getopt: bool, whether or not to use GNU style scanning. | def set_gnu_getopt(self, gnu_getopt=True):
self.__dict__['__use_gnu_getopt'] = gnu_getopt
self.__dict__['__use_gnu_getopt_explicitly_set'] = True | 175,757 |
Records the module that defines a specific flag.
We keep track of which flag is defined by which module so that we
can later sort the flags by module.
Args:
module_name: str, the name of a Python module.
flag: Flag, the Flag instance that is key to the module. | def register_flag_by_module(self, module_name, flag):
flags_by_module = self.flags_by_module_dict()
flags_by_module.setdefault(module_name, []).append(flag) | 175,758 |
Records the module that defines a specific flag.
Args:
module_id: int, the ID of the Python module.
flag: Flag, the Flag instance that is key to the module. | def register_flag_by_module_id(self, module_id, flag):
flags_by_module_id = self.flags_by_module_id_dict()
flags_by_module_id.setdefault(module_id, []).append(flag) | 175,759 |
Specifies that a flag is a key flag for a module.
Args:
module_name: str, the name of a Python module.
flag: Flag, the Flag instance that is key to the module. | def register_key_flag_for_module(self, module_name, flag):
key_flags_by_module = self.key_flags_by_module_dict()
# The list of key flags for the module named module_name.
key_flags = key_flags_by_module.setdefault(module_name, [])
# Add flag, but avoid duplicates.
if flag not in key_flags:
key_flags.append(flag) | 175,760 |
Checks whether a Flag object is registered under long name or short name.
Args:
flag_obj: Flag, the Flag instance to check for.
Returns:
bool, True iff flag_obj is registered under long name or short name. | def _flag_is_registered(self, flag_obj):
flag_dict = self._flags()
# Check whether flag_obj is registered under its long name.
name = flag_obj.name
if flag_dict.get(name, None) == flag_obj:
return True
# Check whether flag_obj is registered under its short name.
short_name = flag_obj.short_name
if (short_name is not None and
flag_dict.get(short_name, None) == flag_obj):
return True
return False | 175,761 |
Cleans up unregistered flags from all module -> [flags] dictionaries.
If flag_obj is registered under either its long name or short name, it
won't be removed from the dictionaries.
Args:
flag_obj: Flag, the Flag instance to clean up for. | def _cleanup_unregistered_flag_from_module_dicts(self, flag_obj):
if self._flag_is_registered(flag_obj):
return
for flags_by_module_dict in (self.flags_by_module_dict(),
self.flags_by_module_id_dict(),
self.key_flags_by_module_dict()):
for flags_in_module in six.itervalues(flags_by_module_dict):
# While (as opposed to if) takes care of multiple occurrences of a
# flag in the list for the same module.
while flag_obj in flags_in_module:
flags_in_module.remove(flag_obj) | 175,762 |
Returns the list of flags defined by a module.
Args:
module: module|str, the module to get flags from.
Returns:
[Flag], a new list of Flag instances. Caller may update this list as
desired: none of those changes will affect the internals of this
FlagValue instance. | def _get_flags_defined_by_module(self, module):
if not isinstance(module, str):
module = module.__name__
return list(self.flags_by_module_dict().get(module, [])) | 175,763 |
Returns the list of key flags for a module.
Args:
module: module|str, the module to get key flags from.
Returns:
[Flag], a new list of Flag instances. Caller may update this list as
desired: none of those changes will affect the internals of this
FlagValue instance. | def get_key_flags_for_module(self, module):
if not isinstance(module, str):
module = module.__name__
# Any flag is a key flag for the module that defined it. NOTE:
# key_flags is a fresh list: we can update it without affecting the
# internals of this FlagValues object.
key_flags = self._get_flags_defined_by_module(module)
# Take into account flags explicitly declared as key for a module.
for flag in self.key_flags_by_module_dict().get(module, []):
if flag not in key_flags:
key_flags.append(flag)
return key_flags | 175,764 |
Return the name of the module defining this flag, or default.
Args:
flagname: str, name of the flag to lookup.
default: Value to return if flagname is not defined. Defaults
to None.
Returns:
The name of the module which registered the flag with this name.
If no such module exists (i.e. no flag with this name exists),
we return default. | def find_module_defining_flag(self, flagname, default=None):
registered_flag = self._flags().get(flagname)
if registered_flag is None:
return default
for module, flags in six.iteritems(self.flags_by_module_dict()):
for flag in flags:
# It must compare the flag with the one in _flags. This is because a
# flag might be overridden only for its long name (or short name),
# and only its short name (or long name) is considered registered.
if (flag.name == registered_flag.name and
flag.short_name == registered_flag.short_name):
return module
return default | 175,765 |
Return the ID of the module defining this flag, or default.
Args:
flagname: str, name of the flag to lookup.
default: Value to return if flagname is not defined. Defaults
to None.
Returns:
The ID of the module which registered the flag with this name.
If no such module exists (i.e. no flag with this name exists),
we return default. | def find_module_id_defining_flag(self, flagname, default=None):
registered_flag = self._flags().get(flagname)
if registered_flag is None:
return default
for module_id, flags in six.iteritems(self.flags_by_module_id_dict()):
for flag in flags:
# It must compare the flag with the one in _flags. This is because a
# flag might be overridden only for its long name (or short name),
# and only its short name (or long name) is considered registered.
if (flag.name == registered_flag.name and
flag.short_name == registered_flag.short_name):
return module_id
return default | 175,766 |
Returns value if setting flag |name| to |value| returned True.
Args:
name: str, name of the flag to set.
value: Value to set.
Returns:
Flag value on successful call.
Raises:
UnrecognizedFlagError
IllegalFlagValueError | def _set_unknown_flag(self, name, value):
setter = self.__dict__['__set_unknown']
if setter:
try:
setter(name, value)
return value
except (TypeError, ValueError): # Flag value is not valid.
raise _exceptions.IllegalFlagValueError(
'"{1}" is not valid for --{0}' .format(name, value))
except NameError: # Flag name is not valid.
pass
raise _exceptions.UnrecognizedFlagError(name, value) | 175,767 |
Appends flags registered in another FlagValues instance.
Args:
flag_values: FlagValues, the FlagValues instance from which to copy flags. | def append_flag_values(self, flag_values):
for flag_name, flag in six.iteritems(flag_values._flags()): # pylint: disable=protected-access
# Each flags with short_name appears here twice (once under its
# normal name, and again with its short name). To prevent
# problems (DuplicateFlagError) with double flag registration, we
# perform a check to make sure that the entry we're looking at is
# for its normal name.
if flag_name == flag.name:
try:
self[flag_name] = flag
except _exceptions.DuplicateFlagError:
raise _exceptions.DuplicateFlagError.from_flag(
flag_name, self, other_flag_values=flag_values) | 175,768 |
Asserts if all validators in the list are satisfied.
It asserts validators in the order they were created.
Args:
validators: Iterable(validators.Validator), validators to be
verified.
Raises:
AttributeError: Raised if validators work with a non-existing flag.
IllegalFlagValueError: Raised if validation fails for at least one
validator. | def _assert_validators(self, validators):
for validator in sorted(
validators, key=lambda validator: validator.insertion_index):
try:
validator.verify(self)
except _exceptions.ValidationError as e:
message = validator.print_flags_with_values(self)
raise _exceptions.IllegalFlagValueError('%s: %s' % (message, str(e))) | 175,773 |
Changes the default value of the named flag object.
The flag's current value is also updated if the flag is currently using
the default value, i.e. not specified in the command line, and not set
by FLAGS.name = value.
Args:
name: str, the name of the flag to modify.
value: The new default value.
Raises:
UnrecognizedFlagError: Raised when there is no registered flag named name.
IllegalFlagValueError: Raised when value is not valid. | def set_default(self, name, value):
fl = self._flags()
if name not in fl:
self._set_unknown_flag(name, value)
return
fl[name]._set_default(value) # pylint: disable=protected-access
self._assert_validators(fl[name].validators) | 175,775 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.