_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q278900
last_name
test
def last_name(languages=None): """ return a random last name >>> from mock import patch >>> with patch('%s._get_lastnames' % __name__, lambda *args: ['aaa']): ... last_name() 'Aaa' >>> with patch('%s.get_lastnames' % __name__, lambda lang: ['%s_lastname'% lang]): ... last_name(['it']) 'It_Lastname' """ choices = []
python
{ "resource": "" }
q278901
Axes.render
test
def render(self): """Render the axes data into the dict data""" for opt,values in self.data.items(): if opt == 'ticks': self['chxtc'] = '|'.join(values)
python
{ "resource": "" }
q278902
GChart.dataset
test
def dataset(self, data, series=''): """ Update the chart's dataset, can be two dimensional or contain string data """
python
{ "resource": "" }
q278903
GChart.render
test
def render(self): """ Renders the chart context and axes into the dict data """ self.update(self.axes.render()) encoder = Encoder(self._encoding, None, self._series) if not 'chs' in self: self['chs'] = '300x150' else: size = self['chs'].split('x') assert len(size) == 2, 'Invalid size, must be in the format WxH' self.check_size(*map(int,size)) assert 'cht' in self, 'No chart type defined, use type method' self['cht'] = self.check_type(self['cht']) if ('any' in dir(self._dataset) and self._dataset.any()) or self._dataset: self['chd'] = encoder.encode(self._dataset) elif not 'choe' in self: assert 'chd' in self, 'You must have a dataset, or use chd'
python
{ "resource": "" }
q278904
GChart.check_type
test
def check_type(self, type): """Check to see if the type is either in TYPES or fits type name Returns proper type """ if type in TYPES: return type tdict = dict(zip(TYPES,TYPES)) tdict.update({ 'line': 'lc', 'bar': 'bvs', 'pie': 'p', 'venn': 'v',
python
{ "resource": "" }
q278905
GChart.url
test
def url(self): """ Returns the rendered URL of the chart
python
{ "resource": "" }
q278906
GChart.show
test
def show(self, *args, **kwargs): """ Shows the chart URL in a webbrowser Other arguments passed to webbrowser.open """
python
{ "resource": "" }
q278907
GChart.save
test
def save(self, fname=None): """ Download the chart from the URL into a filename as a PNG The filename defaults to the chart title (chtt) if any """ if not fname: fname = self.getname() assert fname != None, 'You must specify a filename to save to' if not fname.endswith('.png'):
python
{ "resource": "" }
q278908
GChart.urlopen
test
def urlopen(self): """ Grabs readable PNG file pointer """ req = Request(str(self)) try: return urlopen(req) except HTTPError: _print('The server
python
{ "resource": "" }
q278909
GChart.image
test
def image(self): """ Returns a PngImageFile instance of the chart You must have PIL installed for this to work """ try: try: import Image except ImportError: from PIL import Image except ImportError:
python
{ "resource": "" }
q278910
GChart.write
test
def write(self, fp): """ Writes out PNG image data in chunks to file pointer fp fp must support w or wb """ urlfp = self.urlopen().fp while 1:
python
{ "resource": "" }
q278911
GChart.checksum
test
def checksum(self): """ Returns the unique SHA1 hexdigest of the chart URL param parts good for unittesting...
python
{ "resource": "" }
q278912
amount
test
def amount(min=1, max=sys.maxsize, decimal_places=2): """ return a random floating number :param min: minimum value :param max: maximum value :param decimal_places: decimal places :return:
python
{ "resource": "" }
q278913
entity_name_decorator
test
def entity_name_decorator(top_cls): """ Assign an entity name based on the class immediately inhering from Base. This is needed because we don't want entity names to come from any class that simply inherits our classes, just the ones in our module. For example, if you create a class Project2 that exists outside of kalibro_client and inherits from Project, it's entity name should still
python
{ "resource": "" }
q278914
LessOrEqual.unprotected_and_protected_claims
test
def unprotected_and_protected_claims(self): """ This is both verified and self asserted information. As expected verified information beats self-asserted so if there is both self-asserted and verified values for a claim then only the verified will be returned. """ if self.sup: res = {} for k, v in self.le.items():
python
{ "resource": "" }
q278915
Operator.signing_keys_as_jwks
test
def signing_keys_as_jwks(self): """ Build a JWKS from the signing keys belonging to the self signer :return: Dictionary """ _l = [x.serialize() for x in self.self_signer.keyjar.get_signing_key()] if not _l: _l
python
{ "resource": "" }
q278916
Operator.unpack_metadata_statement
test
def unpack_metadata_statement(self, ms_dict=None, jwt_ms='', keyjar=None, cls=ClientMetadataStatement, liss=None): """ Starting with a signed JWT or a JSON document unpack and verify all the separate metadata statements. :param ms_dict: Metadata statement as a dictionary :param jwt_ms: Metadata statement as JWT :param keyjar: Keys that should be used to verify the signature of the document :param cls: What type (Class) of metadata statement this is :param liss: list of FO identifiers that matters. The rest will be ignored :return: A ParseInfo instance """ if not keyjar: if self.jwks_bundle: keyjar = self.jwks_bundle.as_keyjar() else: keyjar = KeyJar()
python
{ "resource": "" }
q278917
Operator.pack_metadata_statement
test
def pack_metadata_statement(self, metadata, receiver='', iss='', lifetime=0, sign_alg=''): """ Given a MetadataStatement instance create a signed JWT. :param metadata: Original metadata statement as a MetadataStatement
python
{ "resource": "" }
q278918
Operator.evaluate_metadata_statement
test
def evaluate_metadata_statement(self, metadata, keyjar=None): """ Computes the resulting metadata statement from a compounded metadata statement. If something goes wrong during the evaluation an exception is raised :param metadata: The compounded metadata statement as a dictionary :return: A list of :py:class:`fedoidc.operator.LessOrEqual` instances, one per FO. """ # start from the innermost metadata statement and work outwards res = dict([(k, v) for k, v in metadata.items() if k not in IgnoreKeys]) les = [] if 'metadata_statements' in metadata: for fo, ms in metadata['metadata_statements'].items():
python
{ "resource": "" }
q278919
Operator.correct_usage
test
def correct_usage(self, metadata, federation_usage): """ Remove MS paths that are marked to be used for another usage :param metadata: Metadata statement as dictionary :param federation_usage: In which context this is expected to used. :return: Filtered Metadata statement. """ if 'metadata_statements' in metadata: _msl = {} for fo, ms in metadata['metadata_statements'].items():
python
{ "resource": "" }
q278920
Operator.extend_with_ms
test
def extend_with_ms(self, req, sms_dict): """ Add signed metadata statements to a request :param req: The request :param sms_dict: A dictionary with FO IDs as keys and signed metadata statements (sms) or uris pointing to sms as values. :return: The updated request """ _ms_uri = {} _ms = {} for fo, sms in sms_dict.items(): if sms.startswith('http://') or sms.startswith('https://'):
python
{ "resource": "" }
q278921
parse_args
test
def parse_args(): """ Parses command line args using argparse library """ usage = "Usage: create_concordance <infile> [<outfile>]" description = "Simple Concordance Generator" argparser = argparse.ArgumentParser( usage=usage, description=description) argparser.add_argument( 'infile', type=argparse.FileType('r'), help="File read in to create concordance") argparser.add_argument(
python
{ "resource": "" }
q278922
addCommandLineArgs
test
def addCommandLineArgs(arg_parser): """Add logging option to an ArgumentParser.""" arg_parser.register("action", "log_levels", LogLevelAction) arg_parser.register("action", "log_files", LogFileAction) arg_parser.register("action", "log_help", LogHelpAction) group = arg_parser.add_argument_group("Logging options") group.add_argument( "-l", "--log-level", dest="log_levels",
python
{ "resource": "" }
q278923
applyLoggingOpts
test
def applyLoggingOpts(log_levels, log_files): """Apply logging options produced by LogLevelAction and LogFileAction. More often then not this function is not needed, the actions have already been taken during the parse, but it can be used in the case they need to be applied again (e.g. when command line opts take precedence but were overridded by a fileConfig, etc.).
python
{ "resource": "" }
q278924
Logger.verbose
test
def verbose(self, msg, *args, **kwargs): """Log msg at 'verbose' level, debug < verbose < info"""
python
{ "resource": "" }
q278925
_letter_map
test
def _letter_map(word): """Creates a map of letter use in a word. Args: word: a string to create a letter map from Returns: a dictionary of {letter: integer count of letter in word} """ lmap = {} for letter in word:
python
{ "resource": "" }
q278926
anagrams_in_word
test
def anagrams_in_word(word, sowpods=False, start="", end=""): """Finds anagrams in word. Args: word: the string to base our search off of sowpods: boolean to declare TWL or SOWPODS words file start: a string of starting characters to find anagrams based on end: a string of ending characters to find anagrams based on Yields: a tuple of (word, score) that can be made with the input_word """ input_letters, blanks, questions = blank_tiles(word) for tile in start + end: input_letters.append(tile) for word in word_list(sowpods, start, end): lmap = _letter_map(input_letters) used_blanks = 0 for letter in word:
python
{ "resource": "" }
q278927
Error.asAMP
test
def asAMP(cls): """ Returns the exception's name in an AMP Command friendly format. For example, given a class named ``ExampleExceptionClass``, returns ``"EXAMPLE_EXCEPTION_CLASS"``.
python
{ "resource": "" }
q278928
transform_timeseries_data
test
def transform_timeseries_data(timeseries, start, end=None): """Transforms a Go Metrics API metric result into a list of values for a given window period. start and end are expected to be Unix timestamps in microseconds.
python
{ "resource": "" }
q278929
get_last_value_from_timeseries
test
def get_last_value_from_timeseries(timeseries): """Gets the most recent non-zero value for a .last metric or zero for empty data.""" if not timeseries:
python
{ "resource": "" }
q278930
validate_page_number
test
def validate_page_number(number): """Validate the given 1-based page number.""" try: number = int(number) except (TypeError,
python
{ "resource": "" }
q278931
get_page_of_iterator
test
def get_page_of_iterator(iterator, page_size, page_number): """ Get a page from an interator, handling invalid input from the page number by defaulting to the first page. """ try: page_number = validate_page_number(page_number) except (PageNotAnInteger, EmptyPage): page_number = 1 start = (page_number - 1) * page_size # End 1 more than we need, so that we can see if there's another page
python
{ "resource": "" }
q278932
chmod
test
def chmod(path, mode, recursive=True): """ alternative to os. """ if recursive: cmd = 'chmod -R %s %s' % (mode, path)
python
{ "resource": "" }
q278933
make_internal_signing_service
test
def make_internal_signing_service(config, entity_id): """ Given configuration initiate an InternalSigningService instance :param config: The signing service configuration :param entity_id: The entity identifier :return: A InternalSigningService instance
python
{ "resource": "" }
q278934
make_signing_service
test
def make_signing_service(config, entity_id): """ Given configuration initiate a SigningService instance :param config: The signing service configuration :param entity_id: The entity identifier :return: A SigningService instance """ _args = dict([(k, v) for k, v in config.items() if k in KJ_SPECS]) _kj = init_key_jar(**_args) if config['type'] == 'internal': signer = InternalSigningService(entity_id, _kj) elif config['type'] == 'web':
python
{ "resource": "" }
q278935
InternalSigningService.sign
test
def sign(self, req, receiver='', iss='', lifetime=0, sign_alg='', aud=None): """ Creates a signed JWT :param req: Original metadata statement as a :py:class:`MetadataStatement` instance :param receiver: The intended audience for the JWS :param iss: Issuer or the JWT :param lifetime: Lifetime of the signature :param sign_alg: Which signature algorithm to use :param aud: The audience, a list of receivers. :return: A signed JWT """ if not sign_alg: for key_type, s_alg in [('RSA', 'RS256'), ('EC', 'ES256')]:
python
{ "resource": "" }
q278936
WebSigningServiceClient.create
test
def create(self, req, **kwargs): """ Uses POST to send a first metadata statement signing request to a signing service. :param req: The metadata statement that the entity wants signed :return: returns a
python
{ "resource": "" }
q278937
WebSigningServiceClient.update_metadata_statement
test
def update_metadata_statement(self, location, req): """ Uses PUT to update an earlier accepted and signed metadata statement. :param location: A URL to which the update request is sent :param req: The diff between what is registereed with the signing service and what it should
python
{ "resource": "" }
q278938
WebSigningServiceClient.update_signature
test
def update_signature(self, location): """ Uses GET to get a newly signed metadata statement. :param location: A URL to which the
python
{ "resource": "" }
q278939
Package._yield_bundle_contents
test
def _yield_bundle_contents(self, data): """Yield bundle contents from the given dict. Each item yielded will be either a string representing a file path or a bundle.""" if isinstance(data, list):
python
{ "resource": "" }
q278940
Package._create_bundle
test
def _create_bundle(self, data): """Return a bundle initialised by the given dict.""" kwargs = {} filters = None if isinstance(data, dict): kwargs.update( filters=data.get('filters', None),
python
{ "resource": "" }
q278941
Package.urls_for
test
def urls_for(self, asset_type, *args, **kwargs): """Returns urls needed to include all assets of asset_type """
python
{ "resource": "" }
q278942
Package.html_tags_for
test
def html_tags_for(self, asset_type, *args, **kwargs): """Return html tags for urls of asset_type """ html = [] for ref in self.depends: html.append(self._ref(ref).html_tags_for(asset_type, *args, **kwargs)) if asset_type
python
{ "resource": "" }
q278943
Package.html_tags
test
def html_tags(self, *args, **kwargs): """Return all html tags for all asset_type """ html = [] for asset_type in list_asset_types():
python
{ "resource": "" }
q278944
protocolise
test
def protocolise(url): """ Given a URL, check to see if there is an assocaited protocol. If not, set the protocol to HTTP and return the protocolised URL """ # Use the regex to match http//localhost/something protore
python
{ "resource": "" }
q278945
find_links
test
def find_links(url): """ Find the href destinations of all links at URL Arguments: - `url`: Return: list[str] Exceptions: None """ url = protocolise(url) content = requests.get(url).content
python
{ "resource": "" }
q278946
_connected
test
def _connected(client): """ Connected to AMP server, start listening locally, and give the AMP client a reference to the local listening factory. """ log.msg("Connected to AMP server, starting to listen locally...")
python
{ "resource": "" }
q278947
ServiceModules.get_modules
test
def get_modules(self): """Get modules by project_abspath and packages_scan. Traverse all files under folder packages_scan which set by customer. And get all modules name. """ if not self.project_abspath: raise TypeError("project_abspath can not be
python
{ "resource": "" }
q278948
ServiceModules.import_modules
test
def import_modules(self): """Import customer's service module.""" modules = self.get_modules() log.info("import service modules: " + str(modules)) try: for module in modules:
python
{ "resource": "" }
q278949
to_dates
test
def to_dates(param): """ This function takes a date string in various formats and converts it to a normalized and validated date range. A list with two elements is returned, lower and upper date boundary. Valid inputs are, for example: 2012 => Jan 1 20012 - Dec 31 2012 (whole year) 201201 => Jan 1 2012 - Jan 31 2012 (whole month) 2012101 => Jan 1 2012 - Jan 1 2012 (whole day) 2011-2011 => same as "2011", which means whole year 2012 2011-2012 => Jan 1 2011 - Dec 31 2012 (two years) 201104-2012 => Apr 1 2011 - Dec 31 2012 201104-201203 => Apr 1 2011 - March 31 2012 20110408-2011 => Apr 8 2011 - Dec 31 2011 20110408-201105 => Apr 8 2011 - May 31 2011 20110408-20110507 => Apr 8 2011 - May 07 2011 2011- => Jan 1 2012 - Dec 31 9999 (unlimited) 201104- => Apr 1 2011 - Dec 31 9999 (unlimited) 20110408- => Apr 8 2011
python
{ "resource": "" }
q278950
Doc_Formatter.select_fields
test
def select_fields(doc, field_list): ''' Take 'doc' and create a new doc using only keys from the 'fields' list. Supports referencing fields using dotted notation "a.b.c" so we can parse nested fields the way MongoDB does. The nested field class is a hack. It should be a sub-class of dict. ''' if field_list is None or len(field_list) == 0: return doc newDoc = Nested_Dict({})
python
{ "resource": "" }
q278951
Doc_Formatter.date_map
test
def date_map(doc, datemap_list, time_format=None): ''' For all the datetime fields in "datemap" find that key in doc and map the datetime object to a strftime string. This pprint and others will print out readable datetimes. ''' if datemap_list:
python
{ "resource": "" }
q278952
CursorFormatter.printCursor
test
def printCursor(self, fieldnames=None, datemap=None, time_format=None): ''' Output a cursor to a filename or stdout if filename is "-". fmt defines whether we output CSV or JSON. ''' if self._format == 'csv':
python
{ "resource": "" }
q278953
CursorFormatter.output
test
def output(self, fieldNames=None, datemap=None, time_format=None): ''' Output all fields using the fieldNames list. for fields in the list datemap indicates the field must
python
{ "resource": "" }
q278954
get_tasks
test
def get_tasks(do_tasks, dep_graph): """Given a list of tasks to perform and a dependency graph, return the tasks that must be performed, in the correct order""" #XXX: Is it important that if a task has "foo" before "bar" as a dep, # that foo executes before bar? Why? ATM this may not happen. #Each task that the user has specified gets its own execution graph task_graphs = []
python
{ "resource": "" }
q278955
add_default_deps
test
def add_default_deps(project): """Add or create the default departments for the given project :param project: the project that needs default departments :type project: :class:`muke.models.Project` :returns: None :rtype: None :raises: None """ # create deps for project for name, short, order, af
python
{ "resource": "" }
q278956
add_default_atypes
test
def add_default_atypes(project): """Add or create the default assettypes for the given project :param project: the project that needs default assettypes :type project: :class:`muke.models.Project` :returns: None :rtype: None :raises: None """ # create assettypes for project
python
{ "resource": "" }
q278957
add_default_sequences
test
def add_default_sequences(project): """Add or create the default sequences for the given project :param project: the project that needs default sequences :type project: :class:`muke.models.Project` :returns: None :rtype: None :raises: None """ # create sequences for project seqs = [(GLOBAL_NAME, 'global sequence for project %s' % project.name),
python
{ "resource": "" }
q278958
add_userrnd_shot
test
def add_userrnd_shot(project): """Add a rnd shot for every user in the project :param project: the project that needs its rnd shots updated :type project: :class:`muke.models.Project` :returns: None :rtype: None :raises: None """ rndseq = project.sequence_set.get(name=RNDSEQ_NAME) users = [u for u in project.users.all()]
python
{ "resource": "" }
q278959
prj_post_save_handler
test
def prj_post_save_handler(sender, **kwargs): """ Post save receiver for when a Project is saved. Creates a rnd shot for every user. On creations does: 1. create all default departments 2. create all default assettypes 3. create all default sequences :param sender: the project class :type sender: :class:`muke.models.Project` :returns: None
python
{ "resource": "" }
q278960
seq_post_save_handler
test
def seq_post_save_handler(sender, **kwargs): """ Post save receiver for when a sequence is saved. creates a global shot. :param sender: the sequence class :type sender: :class:`muke.models.Sequence` :returns: None :raises: None """ if not kwargs['created']: return seq =
python
{ "resource": "" }
q278961
create_all_tasks
test
def create_all_tasks(element): """Create all tasks for the element :param element: The shot or asset that needs tasks :type element: :class:`muke.models.Shot` | :class:`muke.models.Asset` :returns: None :rtype: None :raises: None """ prj = element.project if isinstance(element, Asset):
python
{ "resource": "" }
q278962
ConnectionPool.pre_connect
test
def pre_connect(self, peer): """ Ensures that we have an open connection to the given peer. Returns the peer id. This should be equal to the given one, but it might not if the given peer was, say, the IP and the peer actually identifies itself with a host name. The returned peer is the real one that should be used. This can be handy if we aren't 100% sure of the peer's identity. """ if peer in self._connections:
python
{ "resource": "" }
q278963
ConnectionPool.send
test
def send(self, peer, typename, data): """ Sends a packet to a peer. """ def attempt_to_send(_): if peer not in self._connections: d = self._connect(peer) d.addCallback(attempt_to_send) return d else: conn = self._connections[peer][0] conn.send_packet(typename, data) return defer.succeed(None)
python
{ "resource": "" }
q278964
Config.get_config_value
test
def get_config_value(self, section, key, return_type: type): """Read customer's config value by section and key. :param section: config file's section. i.e [default] :param key: config file's key under section. i.e packages_scan :param return_type: return value type, str | int | bool. """
python
{ "resource": "" }
q278965
nova
test
def nova(*arg): """ Nova annotation for adding function to process nova notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Nova, *arg) event_type = arg[0]
python
{ "resource": "" }
q278966
cinder
test
def cinder(*arg): """ Cinder annotation for adding function to process cinder notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Cinder, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) cinder_customer_process_wildcard[event_type_pattern] = func else:
python
{ "resource": "" }
q278967
neutron
test
def neutron(*arg): """ Neutron annotation for adding function to process neutron notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Neutron, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) neutron_customer_process_wildcard[event_type_pattern] = func else:
python
{ "resource": "" }
q278968
glance
test
def glance(*arg): """ Glance annotation for adding function to process glance notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Glance, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) glance_customer_process_wildcard[event_type_pattern] = func else:
python
{ "resource": "" }
q278969
swift
test
def swift(*arg): """ Swift annotation for adding function to process swift notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Swift, *arg) event_type = arg[0]
python
{ "resource": "" }
q278970
keystone
test
def keystone(*arg): """ Swift annotation for adding function to process keystone notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Keystone, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) keystone_customer_process_wildcard[event_type_pattern] = func else:
python
{ "resource": "" }
q278971
heat
test
def heat(*arg): """ Heat annotation for adding function to process heat notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Heat, *arg) event_type = arg[0]
python
{ "resource": "" }
q278972
MultiplexingCommandLocator.addFactory
test
def addFactory(self, identifier, factory): """Adds a factory. After calling this method, remote clients will be able to connect to it.
python
{ "resource": "" }
q278973
MultiplexingCommandLocator.removeFactory
test
def removeFactory(self, identifier): """Removes a factory. After calling this method, remote clients will no longer be able to connect to it. This will call the factory's ``doStop`` method.
python
{ "resource": "" }
q278974
MultiplexingCommandLocator.connect
test
def connect(self, factory): """Attempts to connect using a given factory. This will find the requested factory and use it to build a protocol as if the AMP protocol's peer was making the connection. It will create a transport for the protocol and connect it immediately. It will then store the protocol under a unique identifier, and return that identifier. """ try: factory = self._factories[factory] except KeyError: raise NoSuchFactory() remote =
python
{ "resource": "" }
q278975
MultiplexingCommandLocator.receiveData
test
def receiveData(self, connection, data): """ Receives some data for the given protocol. """
python
{ "resource": "" }
q278976
MultiplexingCommandLocator.disconnect
test
def disconnect(self, connection): """ Disconnects the given protocol. """ proto
python
{ "resource": "" }
q278977
ProxyingProtocol._callRemote
test
def _callRemote(self, command, **kwargs): """Shorthand for ``callRemote``. This uses the factory's connection to the AMP peer.
python
{ "resource": "" }
q278978
ProxyingProtocol.connectionMade
test
def connectionMade(self): """Create a multiplexed stream connection. Connect to the AMP server's multiplexed factory using the identifier (defined by this class' factory). When done, stores the connection reference and causes buffered data to be sent. """ log.msg("Creating multiplexed AMP connection...")
python
{ "resource": "" }
q278979
ProxyingProtocol._multiplexedConnectionMade
test
def _multiplexedConnectionMade(self, response): """Stores a reference to the connection, registers this protocol on the factory as one related to a multiplexed AMP connection, and sends currently buffered data. Gets rid of the buffer afterwards. """ self.connection = conn = response["connection"] self.factory.protocols[conn] = self log.msg("Multiplexed
python
{ "resource": "" }
q278980
ProxyingProtocol.dataReceived
test
def dataReceived(self, data): """Received some data from the local side. If we have set up the multiplexed connection, sends the data over the multiplexed connection. Otherwise, buffers. """ log.msg("{} bytes of data received locally".format(len(data))) if self.connection is None:
python
{ "resource": "" }
q278981
ProxyingProtocol._sendData
test
def _sendData(self, data): """Actually sends data over the wire.
python
{ "resource": "" }
q278982
ProxyingProtocol.connectionLost
test
def connectionLost(self, reason): """If we already have an AMP connection registered on the factory,
python
{ "resource": "" }
q278983
ProxyingAMPLocator.getLocalProtocol
test
def getLocalProtocol(self, connectionIdentifier): """Attempts to get a local protocol by connection identifier. """ for factory in self.localFactories: try:
python
{ "resource": "" }
q278984
ProxyingAMPLocator.remoteDataReceived
test
def remoteDataReceived(self, connection, data): """Some data was received from the remote end. Find the matching
python
{ "resource": "" }
q278985
ProxyingAMPLocator.disconnect
test
def disconnect(self, connection): """The other side has asked us to disconnect. """
python
{ "resource": "" }
q278986
centered
test
def centered(mystring, linewidth=None, fill=" "): '''Takes a string, centres it, and pads it on both sides''' if linewidth is None: linewidth = get_terminal_size().columns - 1
python
{ "resource": "" }
q278987
clock_on_right
test
def clock_on_right(mystring): '''Takes a string, and prints it with the time right aligned''' taken = length_no_ansi(mystring)
python
{ "resource": "" }
q278988
version_number_str
test
def version_number_str(major, minor=0, patch=0, prerelease=None, build=None): """ Takes the parts of a semantic version number, and returns a nicely formatted string. """ version = str(major) + '.' + str(minor) + '.' + str(patch) if prerelease: if prerelease.startswith('-'): version = version + prerelease
python
{ "resource": "" }
q278989
identify_unit_framework
test
def identify_unit_framework(target_unit): """ Identify whether the user is requesting unit validation against astropy.units, pint, or quantities. """ if HAS_ASTROPY: from astropy.units import UnitBase if isinstance(target_unit, UnitBase): return ASTROPY if HAS_PINT: from pint.unit import UnitsContainer if hasattr(target_unit, 'dimensionality') and isinstance(target_unit.dimensionality, UnitsContainer): return PINT
python
{ "resource": "" }
q278990
assert_unit_convertability
test
def assert_unit_convertability(name, value, target_unit, unit_framework): """ Check that a value has physical type consistent with user-specified units Note that this does not convert the value, only check that the units have the right physical dimensionality. Parameters ---------- name : str The name of the value to check (used for error messages). value : `numpy.ndarray` or instance of `numpy.ndarray` subclass The value to check. target_unit : unit The unit that the value should be convertible to. unit_framework : str The unit framework to use """ if unit_framework == ASTROPY: from astropy.units import Quantity if not isinstance(value, Quantity): raise TraitError("{0} should be given as an Astropy Quantity instance".format(name)) if not target_unit.is_equivalent(value.unit): raise TraitError("{0} should be in units convertible to {1}".format(name, target_unit)) elif unit_framework == PINT:
python
{ "resource": "" }
q278991
pad
test
def pad(data_to_pad, block_size, style='pkcs7'): """Apply standard padding. :Parameters: data_to_pad : byte string The data that needs to be padded. block_size : integer The block boundary to use for padding. The output length is guaranteed to be a multiple of ``block_size``. style : string Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. :Return: The original data with the appropriate padding added at the end. """
python
{ "resource": "" }
q278992
unpad
test
def unpad(padded_data, block_size, style='pkcs7'): """Remove standard padding. :Parameters: padded_data : byte string A piece of data with padding that needs to be stripped. block_size : integer The block boundary to use for padding. The input length must be a multiple of ``block_size``. style : string Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. :Return: Data without padding. :Raises ValueError: if the padding is incorrect. """ pdata_len = len(padded_data) if pdata_len % block_size: raise ValueError("Input data is not padded") if style in ('pkcs7',
python
{ "resource": "" }
q278993
FederationEntity.self_sign
test
def self_sign(self, req, receiver='', aud=None): """ Sign the extended request. :param req: Request, a :py:class:`fedoidcmsg.MetadataStatement' instance :param receiver: The intended user of this metadata statement :param aud: The audience, a list of receivers. :return: An augmented set of request arguments """ if self.entity_id: _iss = self.entity_id else: _iss = self.iss creq = req.copy() if not 'metadata_statement_uris' in creq and not \ 'metadata_statements' in creq: _copy = creq.copy() _jws = self.self_signer.sign(_copy, receiver=receiver, iss=_iss, aud=aud) sms_spec = {'metadata_statements': {self.iss: _jws}} else: for ref in ['metadata_statement_uris', 'metadata_statements']: try: del creq[ref] except KeyError:
python
{ "resource": "" }
q278994
FederationEntityOOB.gather_metadata_statements
test
def gather_metadata_statements(self, fos=None, context=''): """ Only gathers metadata statements and returns them. :param fos: Signed metadata statements from these Federation Operators should be added. :param context: context of the metadata exchange :return: Dictionary with signed Metadata Statements as values """ if not context: context = self.context _res = {} if self.metadata_statements: try: cms = self.metadata_statements[context] except KeyError: if self.metadata_statements == { 'register': {}, 'discovery': {}, 'response': {} }: # No superior so an FO then. Nothing to add .. pass else: logger.error( 'No metadata statements for this context: {}'.format( context)) raise ValueError('Wrong context "{}"'.format(context)) else: if cms != {}: if fos is None: fos = list(cms.keys())
python
{ "resource": "" }
q278995
pretty_print
test
def pretty_print(input_word, anagrams, by_length=False): """Prints the anagram results sorted by score to stdout. Args: input_word: the base word we searched on anagrams: generator of (word, score) from anagrams_in_word by_length: a boolean to declare printing by length instead of score """ scores = {} if by_length: noun = "tiles" for word, score in anagrams: try: scores[len(word)].append("{0} ({1:d})".format(word, score))
python
{ "resource": "" }
q278996
argument_parser
test
def argument_parser(args): """Argparse logic, command line options. Args: args: sys.argv[1:], everything passed to the program after its name Returns: A tuple of: a list of words/letters to search a boolean to declare if we want to use the sowpods words file a boolean to declare if we want to output anagrams by length a string of starting characters to find anagrams based on a string of ending characters to find anagrams based on Raises: SystemExit if the user passes invalid arguments, --version or --help """ parser = argparse.ArgumentParser( prog="nagaram", description="Finds Scabble anagrams.", formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False, ) parser.add_argument( "-h", "--help", dest="help", action="store_true", default=False, ) parser.add_argument( "--sowpods", dest="sowpods",
python
{ "resource": "" }
q278997
main
test
def main(arguments=None): """Main command line entry point.""" if not arguments: arguments = sys.argv[1:] wordlist, sowpods, by_length, start, end = argument_parser(arguments) for word in wordlist:
python
{ "resource": "" }
q278998
PacketProtocol.dataReceived
test
def dataReceived(self, data): """ Do not overwrite this method. Instead implement `on_...` methods for the registered typenames to handle incomming packets. """ self._unprocessed_data.enqueue(data) while True: if len(self._unprocessed_data) < self._header.size: return # not yet enough data hdr_data = self._unprocessed_data.peek(self._header.size) packet_length, typekey = self._header.unpack(hdr_data) total_length = self._header.size + packet_length if len(self._unprocessed_data) < total_length:
python
{ "resource": "" }
q278999
PacketProtocol.on_unregistered_type
test
def on_unregistered_type(self, typekey, packet): """ Invoked if a packet with an unregistered type was received. Default behaviour is to log and close the connection.
python
{ "resource": "" }