_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q278900
last_name
test
def last_name(languages=None): """ return a random last name >>> from mock import patch >>> with patch('%s._get_lastnames' % __name__, lambda *args: ['aaa']): ... last_name() 'Aaa' >>> with patch('%s.get_lastnames' % __name__, lambda lang: ['%s_lastname'% lang]): ... last_name(['it']) 'It_Lastname' """ choices = [] languages = languages or ['en'] for lang in languages: samples = _get_lastnames(lang) choices.extend(samples) return random.choice(choices).title()
python
{ "resource": "" }
q278901
Axes.render
test
def render(self): """Render the axes data into the dict data""" for opt,values in self.data.items(): if opt == 'ticks': self['chxtc'] = '|'.join(values) else: self['chx%s'%opt[0]] = '|'.join(values) return self
python
{ "resource": "" }
q278902
GChart.dataset
test
def dataset(self, data, series=''): """ Update the chart's dataset, can be two dimensional or contain string data """ self._dataset = data self._series = series return self
python
{ "resource": "" }
q278903
GChart.render
test
def render(self): """ Renders the chart context and axes into the dict data """ self.update(self.axes.render()) encoder = Encoder(self._encoding, None, self._series) if not 'chs' in self: self['chs'] = '300x150' else: size = self['chs'].split('x') assert len(size) == 2, 'Invalid size, must be in the format WxH' self.check_size(*map(int,size)) assert 'cht' in self, 'No chart type defined, use type method' self['cht'] = self.check_type(self['cht']) if ('any' in dir(self._dataset) and self._dataset.any()) or self._dataset: self['chd'] = encoder.encode(self._dataset) elif not 'choe' in self: assert 'chd' in self, 'You must have a dataset, or use chd' if self._scale: assert self['chd'].startswith('t'),\ 'You must use text encoding with chds' self['chds'] = ','.join(self._scale) if self._geo and self._ld: self['chtm'] = self._geo self['chld'] = self._ld if self.lines: self['chls'] = '|'.join(self.lines) if self.markers: self['chm'] = '|'.join(self.markers) if self.fills: self['chf'] = '|'.join(self.fills)
python
{ "resource": "" }
q278904
GChart.check_type
test
def check_type(self, type): """Check to see if the type is either in TYPES or fits type name Returns proper type """ if type in TYPES: return type tdict = dict(zip(TYPES,TYPES)) tdict.update({ 'line': 'lc', 'bar': 'bvs', 'pie': 'p', 'venn': 'v', 'scater': 's', 'radar': 'r', 'meter': 'gom', }) assert type in tdict, 'Invalid chart type: %s'%type return tdict[type]
python
{ "resource": "" }
q278905
GChart.url
test
def url(self): """ Returns the rendered URL of the chart """ self.render() return self._apiurl + '&'.join(self._parts()).replace(' ','+')
python
{ "resource": "" }
q278906
GChart.show
test
def show(self, *args, **kwargs): """ Shows the chart URL in a webbrowser Other arguments passed to webbrowser.open """ from webbrowser import open as webopen return webopen(str(self), *args, **kwargs)
python
{ "resource": "" }
q278907
GChart.save
test
def save(self, fname=None): """ Download the chart from the URL into a filename as a PNG The filename defaults to the chart title (chtt) if any """ if not fname: fname = self.getname() assert fname != None, 'You must specify a filename to save to' if not fname.endswith('.png'): fname += '.png' try: urlretrieve(self.url, fname) except Exception: raise IOError('Problem saving %s to file'%fname) return fname
python
{ "resource": "" }
q278908
GChart.urlopen
test
def urlopen(self): """ Grabs readable PNG file pointer """ req = Request(str(self)) try: return urlopen(req) except HTTPError: _print('The server couldn\'t fulfill the request.') except URLError: _print('We failed to reach a server.')
python
{ "resource": "" }
q278909
GChart.image
test
def image(self): """ Returns a PngImageFile instance of the chart You must have PIL installed for this to work """ try: try: import Image except ImportError: from PIL import Image except ImportError: raise ImportError('You must install PIL to fetch image objects') try: from cStringIO import StringIO except ImportError: from StringIO import StringIO return Image.open(StringIO(self.urlopen().read()))
python
{ "resource": "" }
q278910
GChart.write
test
def write(self, fp): """ Writes out PNG image data in chunks to file pointer fp fp must support w or wb """ urlfp = self.urlopen().fp while 1: try: fp.write(urlfp.next()) except StopIteration: return
python
{ "resource": "" }
q278911
GChart.checksum
test
def checksum(self): """ Returns the unique SHA1 hexdigest of the chart URL param parts good for unittesting... """ self.render() return new_sha(''.join(sorted(self._parts()))).hexdigest()
python
{ "resource": "" }
q278912
amount
test
def amount(min=1, max=sys.maxsize, decimal_places=2): """ return a random floating number :param min: minimum value :param max: maximum value :param decimal_places: decimal places :return: """ q = '.%s1' % '0' * (decimal_places - 1) return decimal.Decimal(uniform(min, max)).quantize(decimal.Decimal(q))
python
{ "resource": "" }
q278913
entity_name_decorator
test
def entity_name_decorator(top_cls): """ Assign an entity name based on the class immediately inhering from Base. This is needed because we don't want entity names to come from any class that simply inherits our classes, just the ones in our module. For example, if you create a class Project2 that exists outside of kalibro_client and inherits from Project, it's entity name should still be Project. """ class_name = inflection.underscore(top_cls.__name__).lower() def entity_name(cls): return class_name top_cls.entity_name = classmethod(entity_name) return top_cls
python
{ "resource": "" }
q278914
LessOrEqual.unprotected_and_protected_claims
test
def unprotected_and_protected_claims(self): """ This is both verified and self asserted information. As expected verified information beats self-asserted so if there is both self-asserted and verified values for a claim then only the verified will be returned. """ if self.sup: res = {} for k, v in self.le.items(): if k not in self.sup.le: res[k] = v else: res[k] = self.sup.le[k] return res else: return self.le
python
{ "resource": "" }
q278915
Operator.signing_keys_as_jwks
test
def signing_keys_as_jwks(self): """ Build a JWKS from the signing keys belonging to the self signer :return: Dictionary """ _l = [x.serialize() for x in self.self_signer.keyjar.get_signing_key()] if not _l: _l = [x.serialize() for x in self.self_signer.keyjar.get_signing_key(owner=self.iss)] return {'keys': _l}
python
{ "resource": "" }
q278916
Operator.unpack_metadata_statement
test
def unpack_metadata_statement(self, ms_dict=None, jwt_ms='', keyjar=None, cls=ClientMetadataStatement, liss=None): """ Starting with a signed JWT or a JSON document unpack and verify all the separate metadata statements. :param ms_dict: Metadata statement as a dictionary :param jwt_ms: Metadata statement as JWT :param keyjar: Keys that should be used to verify the signature of the document :param cls: What type (Class) of metadata statement this is :param liss: list of FO identifiers that matters. The rest will be ignored :return: A ParseInfo instance """ if not keyjar: if self.jwks_bundle: keyjar = self.jwks_bundle.as_keyjar() else: keyjar = KeyJar() if jwt_ms: try: ms_dict = unfurl(jwt_ms) except JWSException as err: logger.error('Could not unfurl jwt_ms due to {}'.format(err)) raise if ms_dict: return self._unpack(ms_dict, keyjar, cls, jwt_ms, liss) else: raise AttributeError('Need one of ms_dict or jwt_ms')
python
{ "resource": "" }
q278917
Operator.pack_metadata_statement
test
def pack_metadata_statement(self, metadata, receiver='', iss='', lifetime=0, sign_alg=''): """ Given a MetadataStatement instance create a signed JWT. :param metadata: Original metadata statement as a MetadataStatement instance :param receiver: Receiver (audience) of the JWT :param iss: Issuer ID if different from default :param lifetime: jWT signature life time :param sign_alg: JWT signature algorithm :return: A JWT """ return self.self_signer.sign(metadata, receiver=receiver, iss=iss, lifetime=lifetime, sign_alg=sign_alg)
python
{ "resource": "" }
q278918
Operator.evaluate_metadata_statement
test
def evaluate_metadata_statement(self, metadata, keyjar=None): """ Computes the resulting metadata statement from a compounded metadata statement. If something goes wrong during the evaluation an exception is raised :param metadata: The compounded metadata statement as a dictionary :return: A list of :py:class:`fedoidc.operator.LessOrEqual` instances, one per FO. """ # start from the innermost metadata statement and work outwards res = dict([(k, v) for k, v in metadata.items() if k not in IgnoreKeys]) les = [] if 'metadata_statements' in metadata: for fo, ms in metadata['metadata_statements'].items(): if isinstance(ms, str): ms = json.loads(ms) for _le in self.evaluate_metadata_statement(ms): if isinstance(ms, Message): le = LessOrEqual(sup=_le, **ms.to_dict()) else: # Must be a dict le = LessOrEqual(sup=_le, **ms) if le.is_expired(): logger.error( 'This metadata statement has expired: {}'.format(ms) ) logger.info('My time: {}'.format(utc_time_sans_frac())) continue le.eval(res) les.append(le) return les else: # this is the innermost try: _iss = metadata['iss'] except: le = LessOrEqual() le.eval(res) else: le = LessOrEqual(iss=_iss, exp=metadata['exp']) le.eval(res) les.append(le) return les
python
{ "resource": "" }
q278919
Operator.correct_usage
test
def correct_usage(self, metadata, federation_usage): """ Remove MS paths that are marked to be used for another usage :param metadata: Metadata statement as dictionary :param federation_usage: In which context this is expected to used. :return: Filtered Metadata statement. """ if 'metadata_statements' in metadata: _msl = {} for fo, ms in metadata['metadata_statements'].items(): if not isinstance(ms, Message): ms = json.loads(ms) if self.correct_usage(ms, federation_usage=federation_usage): _msl[fo] = ms if _msl: metadata['metadata_statements'] = Message(**_msl) return metadata else: return None else: # this is the innermost try: assert federation_usage == metadata['federation_usage'] except KeyError: pass except AssertionError: return None return metadata
python
{ "resource": "" }
q278920
Operator.extend_with_ms
test
def extend_with_ms(self, req, sms_dict): """ Add signed metadata statements to a request :param req: The request :param sms_dict: A dictionary with FO IDs as keys and signed metadata statements (sms) or uris pointing to sms as values. :return: The updated request """ _ms_uri = {} _ms = {} for fo, sms in sms_dict.items(): if sms.startswith('http://') or sms.startswith('https://'): _ms_uri[fo] = sms else: _ms[fo] = sms if _ms: req['metadata_statements'] = Message(**_ms) if _ms_uri: req['metadata_statement_uris'] = Message(**_ms_uri) return req
python
{ "resource": "" }
q278921
parse_args
test
def parse_args(): """ Parses command line args using argparse library """ usage = "Usage: create_concordance <infile> [<outfile>]" description = "Simple Concordance Generator" argparser = argparse.ArgumentParser( usage=usage, description=description) argparser.add_argument( 'infile', type=argparse.FileType('r'), help="File read in to create concordance") argparser.add_argument( 'outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout, help="File to write concordance to. " "Default is stdout") argparser.add_argument( '--word', nargs="?", const=str, help="Display a word in concordance") args = argparser.parse_args() return args
python
{ "resource": "" }
q278922
addCommandLineArgs
test
def addCommandLineArgs(arg_parser): """Add logging option to an ArgumentParser.""" arg_parser.register("action", "log_levels", LogLevelAction) arg_parser.register("action", "log_files", LogFileAction) arg_parser.register("action", "log_help", LogHelpAction) group = arg_parser.add_argument_group("Logging options") group.add_argument( "-l", "--log-level", dest="log_levels", action="log_levels", metavar="LOGGER:LEVEL", default=[], help="Set log levels for individual loggers. See --help-logging for " "complete details.") group.add_argument( "-L", "--log-file", dest="log_files", action="log_files", metavar="LOGGER:FILE", default=[], help="Set log the output file for individual loggers. " " See --help-logging for complete details.") group.add_argument("--help-logging", action="log_help", help=argparse.SUPPRESS)
python
{ "resource": "" }
q278923
applyLoggingOpts
test
def applyLoggingOpts(log_levels, log_files): """Apply logging options produced by LogLevelAction and LogFileAction. More often then not this function is not needed, the actions have already been taken during the parse, but it can be used in the case they need to be applied again (e.g. when command line opts take precedence but were overridded by a fileConfig, etc.). """ for l, lvl in log_levels: l.setLevel(lvl) for l, hdl in log_files: for h in l.handlers: l.removeHandler(h) l.addHandler(hdl)
python
{ "resource": "" }
q278924
Logger.verbose
test
def verbose(self, msg, *args, **kwargs): """Log msg at 'verbose' level, debug < verbose < info""" self.log(logging.VERBOSE, msg, *args, **kwargs)
python
{ "resource": "" }
q278925
_letter_map
test
def _letter_map(word): """Creates a map of letter use in a word. Args: word: a string to create a letter map from Returns: a dictionary of {letter: integer count of letter in word} """ lmap = {} for letter in word: try: lmap[letter] += 1 except KeyError: lmap[letter] = 1 return lmap
python
{ "resource": "" }
q278926
anagrams_in_word
test
def anagrams_in_word(word, sowpods=False, start="", end=""): """Finds anagrams in word. Args: word: the string to base our search off of sowpods: boolean to declare TWL or SOWPODS words file start: a string of starting characters to find anagrams based on end: a string of ending characters to find anagrams based on Yields: a tuple of (word, score) that can be made with the input_word """ input_letters, blanks, questions = blank_tiles(word) for tile in start + end: input_letters.append(tile) for word in word_list(sowpods, start, end): lmap = _letter_map(input_letters) used_blanks = 0 for letter in word: if letter in lmap: lmap[letter] -= 1 if lmap[letter] < 0: used_blanks += 1 if used_blanks > (blanks + questions): break else: used_blanks += 1 if used_blanks > (blanks + questions): break else: yield (word, word_score(word, input_letters, questions))
python
{ "resource": "" }
q278927
Error.asAMP
test
def asAMP(cls): """ Returns the exception's name in an AMP Command friendly format. For example, given a class named ``ExampleExceptionClass``, returns ``"EXAMPLE_EXCEPTION_CLASS"``. """ parts = groupByUpperCase(cls.__name__) return cls, "_".join(part.upper() for part in parts)
python
{ "resource": "" }
q278928
transform_timeseries_data
test
def transform_timeseries_data(timeseries, start, end=None): """Transforms a Go Metrics API metric result into a list of values for a given window period. start and end are expected to be Unix timestamps in microseconds. """ data = [] include = False for metric, points in timeseries.items(): for point in points: if point['x'] == start: include = True if include: data.append(point['y']) if end is not None and point['x'] == end: return data return data
python
{ "resource": "" }
q278929
get_last_value_from_timeseries
test
def get_last_value_from_timeseries(timeseries): """Gets the most recent non-zero value for a .last metric or zero for empty data.""" if not timeseries: return 0 for metric, points in timeseries.items(): return next((p['y'] for p in reversed(points) if p['y'] > 0), 0)
python
{ "resource": "" }
q278930
validate_page_number
test
def validate_page_number(number): """Validate the given 1-based page number.""" try: number = int(number) except (TypeError, ValueError): raise PageNotAnInteger('That page number is not an integer') if number < 1: raise EmptyPage('That page number is less than 1') return number
python
{ "resource": "" }
q278931
get_page_of_iterator
test
def get_page_of_iterator(iterator, page_size, page_number): """ Get a page from an interator, handling invalid input from the page number by defaulting to the first page. """ try: page_number = validate_page_number(page_number) except (PageNotAnInteger, EmptyPage): page_number = 1 start = (page_number - 1) * page_size # End 1 more than we need, so that we can see if there's another page end = (page_number * page_size) + 1 skipped_items = list(islice(iterator, start)) items = list(islice(iterator, end)) if len(items) == 0 and page_number != 1: items = skipped_items page_number = 1 has_next = len(items) > page_size items = items[:page_size] return NoCountPage(items, page_number, page_size, has_next)
python
{ "resource": "" }
q278932
chmod
test
def chmod(path, mode, recursive=True): """ alternative to os. """ if recursive: cmd = 'chmod -R %s %s' % (mode, path) else: cmd = 'chmod %s %s' % (mode, path) return sh(cmd)
python
{ "resource": "" }
q278933
make_internal_signing_service
test
def make_internal_signing_service(config, entity_id): """ Given configuration initiate an InternalSigningService instance :param config: The signing service configuration :param entity_id: The entity identifier :return: A InternalSigningService instance """ _args = dict([(k, v) for k, v in config.items() if k in KJ_SPECS]) _kj = init_key_jar(**_args) return InternalSigningService(entity_id, _kj)
python
{ "resource": "" }
q278934
make_signing_service
test
def make_signing_service(config, entity_id): """ Given configuration initiate a SigningService instance :param config: The signing service configuration :param entity_id: The entity identifier :return: A SigningService instance """ _args = dict([(k, v) for k, v in config.items() if k in KJ_SPECS]) _kj = init_key_jar(**_args) if config['type'] == 'internal': signer = InternalSigningService(entity_id, _kj) elif config['type'] == 'web': _kj.issuer_keys[config['iss']] = _kj.issuer_keys[''] del _kj.issuer_keys[''] signer = WebSigningServiceClient(config['iss'], config['url'], entity_id, _kj) else: raise ValueError('Unknown signer type: {}'.format(config['type'])) return signer
python
{ "resource": "" }
q278935
InternalSigningService.sign
test
def sign(self, req, receiver='', iss='', lifetime=0, sign_alg='', aud=None): """ Creates a signed JWT :param req: Original metadata statement as a :py:class:`MetadataStatement` instance :param receiver: The intended audience for the JWS :param iss: Issuer or the JWT :param lifetime: Lifetime of the signature :param sign_alg: Which signature algorithm to use :param aud: The audience, a list of receivers. :return: A signed JWT """ if not sign_alg: for key_type, s_alg in [('RSA', 'RS256'), ('EC', 'ES256')]: if self.keyjar.get_signing_key(key_type=key_type): sign_alg = s_alg break if not sign_alg: raise NoSigningKeys('Could not find any signing keys') return self.pack(req=req, receiver=receiver, iss=iss, lifetime=lifetime, sign=True, encrypt=False, sign_alg=sign_alg)
python
{ "resource": "" }
q278936
WebSigningServiceClient.create
test
def create(self, req, **kwargs): """ Uses POST to send a first metadata statement signing request to a signing service. :param req: The metadata statement that the entity wants signed :return: returns a dictionary with 'sms' and 'loc' as keys. """ response = requests.post(self.url, json=req, **self.req_args()) return self.parse_response(response)
python
{ "resource": "" }
q278937
WebSigningServiceClient.update_metadata_statement
test
def update_metadata_statement(self, location, req): """ Uses PUT to update an earlier accepted and signed metadata statement. :param location: A URL to which the update request is sent :param req: The diff between what is registereed with the signing service and what it should be. :return: returns a dictionary with 'sms' and 'loc' as keys. """ response = requests.put(location, json=req, **self.req_args()) return self.parse_response(response)
python
{ "resource": "" }
q278938
WebSigningServiceClient.update_signature
test
def update_signature(self, location): """ Uses GET to get a newly signed metadata statement. :param location: A URL to which the request is sent :return: returns a dictionary with 'sms' and 'loc' as keys. """ response = requests.get(location, **self.req_args()) return self.parse_response(response)
python
{ "resource": "" }
q278939
Package._yield_bundle_contents
test
def _yield_bundle_contents(self, data): """Yield bundle contents from the given dict. Each item yielded will be either a string representing a file path or a bundle.""" if isinstance(data, list): contents = data else: contents = data.get('contents', []) if isinstance(contents, six.string_types): contents = contents, for content in contents: if isinstance(content, dict): content = self._create_bundle(content) yield content
python
{ "resource": "" }
q278940
Package._create_bundle
test
def _create_bundle(self, data): """Return a bundle initialised by the given dict.""" kwargs = {} filters = None if isinstance(data, dict): kwargs.update( filters=data.get('filters', None), output=data.get('output', None), debug=data.get('debug', None), extra=data.get('extra', {}), config=data.get('config', {}), depends=data.get('depends', None)) bundle = Bundle(*list(self._yield_bundle_contents(data)), **kwargs) return self._auto_filter_bundle(bundle)
python
{ "resource": "" }
q278941
Package.urls_for
test
def urls_for(self, asset_type, *args, **kwargs): """Returns urls needed to include all assets of asset_type """ return self.urls_for_depends(asset_type, *args, **kwargs) + \ self.urls_for_self(asset_type, *args, **kwargs)
python
{ "resource": "" }
q278942
Package.html_tags_for
test
def html_tags_for(self, asset_type, *args, **kwargs): """Return html tags for urls of asset_type """ html = [] for ref in self.depends: html.append(self._ref(ref).html_tags_for(asset_type, *args, **kwargs)) if asset_type in self.typed_bundles: html.append(render_asset_html_tags(asset_type, self.urls_for_self(asset_type, *args, **kwargs))) return "\n".join(html)
python
{ "resource": "" }
q278943
Package.html_tags
test
def html_tags(self, *args, **kwargs): """Return all html tags for all asset_type """ html = [] for asset_type in list_asset_types(): html.append(self.html_tags_for(asset_type.name, *args, **kwargs)) return "\n".join(html)
python
{ "resource": "" }
q278944
protocolise
test
def protocolise(url): """ Given a URL, check to see if there is an assocaited protocol. If not, set the protocol to HTTP and return the protocolised URL """ # Use the regex to match http//localhost/something protore = re.compile(r'https?:{0,1}/{1,2}') parsed = urlparse.urlparse(url) if not parsed.scheme and not protore.search(url): url = 'http://{0}'.format(url) return url
python
{ "resource": "" }
q278945
find_links
test
def find_links(url): """ Find the href destinations of all links at URL Arguments: - `url`: Return: list[str] Exceptions: None """ url = protocolise(url) content = requests.get(url).content flike = StringIO(content) root = html.parse(flike).getroot() atags = root.cssselect('a') hrefs = [a.attrib['href'] for a in atags] # !!! This does the wrong thing for bbc.co.uk/index.html hrefs = [h if h.startswith('http') else '/'.join([url, h]) for h in hrefs ] return hrefs
python
{ "resource": "" }
q278946
_connected
test
def _connected(client): """ Connected to AMP server, start listening locally, and give the AMP client a reference to the local listening factory. """ log.msg("Connected to AMP server, starting to listen locally...") localFactory = multiplexing.ProxyingFactory(client, "hello") return listeningEndpoint.listen(localFactory)
python
{ "resource": "" }
q278947
ServiceModules.get_modules
test
def get_modules(self): """Get modules by project_abspath and packages_scan. Traverse all files under folder packages_scan which set by customer. And get all modules name. """ if not self.project_abspath: raise TypeError("project_abspath can not be empty.") packages_abspath = self.get_package_abspath() for package_abspath in packages_abspath: self.get_module_name(package_abspath) return self._modules
python
{ "resource": "" }
q278948
ServiceModules.import_modules
test
def import_modules(self): """Import customer's service module.""" modules = self.get_modules() log.info("import service modules: " + str(modules)) try: for module in modules: __import__(module) except ImportError as error: raise ImportModulesError(error.msg)
python
{ "resource": "" }
q278949
to_dates
test
def to_dates(param): """ This function takes a date string in various formats and converts it to a normalized and validated date range. A list with two elements is returned, lower and upper date boundary. Valid inputs are, for example: 2012 => Jan 1 20012 - Dec 31 2012 (whole year) 201201 => Jan 1 2012 - Jan 31 2012 (whole month) 2012101 => Jan 1 2012 - Jan 1 2012 (whole day) 2011-2011 => same as "2011", which means whole year 2012 2011-2012 => Jan 1 2011 - Dec 31 2012 (two years) 201104-2012 => Apr 1 2011 - Dec 31 2012 201104-201203 => Apr 1 2011 - March 31 2012 20110408-2011 => Apr 8 2011 - Dec 31 2011 20110408-201105 => Apr 8 2011 - May 31 2011 20110408-20110507 => Apr 8 2011 - May 07 2011 2011- => Jan 1 2012 - Dec 31 9999 (unlimited) 201104- => Apr 1 2011 - Dec 31 9999 (unlimited) 20110408- => Apr 8 2011 - Dec 31 9999 (unlimited) -2011 Jan 1 0000 - Dez 31 2011 -201104 Jan 1 0000 - Apr 30, 2011 -20110408 Jan 1 0000 - Apr 8, 2011 """ pos = param.find('-') lower, upper = (None, None) if pos == -1: # no seperator given lower, upper = (param, param) else: lower, upper = param.split('-') ret = (expand_date_param(lower, 'lower'), expand_date_param(upper, 'upper')) return ret
python
{ "resource": "" }
q278950
Doc_Formatter.select_fields
test
def select_fields(doc, field_list): ''' Take 'doc' and create a new doc using only keys from the 'fields' list. Supports referencing fields using dotted notation "a.b.c" so we can parse nested fields the way MongoDB does. The nested field class is a hack. It should be a sub-class of dict. ''' if field_list is None or len(field_list) == 0: return doc newDoc = Nested_Dict({}) oldDoc = Nested_Dict(doc) for i in field_list: if oldDoc.has_key(i): # print( "doc: %s" % doc ) # print( "i: %s" %i ) newDoc.set_value(i, oldDoc.get_value(i)) return newDoc.dict_value()
python
{ "resource": "" }
q278951
Doc_Formatter.date_map
test
def date_map(doc, datemap_list, time_format=None): ''' For all the datetime fields in "datemap" find that key in doc and map the datetime object to a strftime string. This pprint and others will print out readable datetimes. ''' if datemap_list: for i in datemap_list: if isinstance(i, datetime): doc=CursorFormatter.date_map_field(doc, i, time_format=time_format) return doc
python
{ "resource": "" }
q278952
CursorFormatter.printCursor
test
def printCursor(self, fieldnames=None, datemap=None, time_format=None): ''' Output a cursor to a filename or stdout if filename is "-". fmt defines whether we output CSV or JSON. ''' if self._format == 'csv': count = self.printCSVCursor(fieldnames, datemap, time_format) else: count = self.printJSONCursor( fieldnames, datemap, time_format) return count
python
{ "resource": "" }
q278953
CursorFormatter.output
test
def output(self, fieldNames=None, datemap=None, time_format=None): ''' Output all fields using the fieldNames list. for fields in the list datemap indicates the field must be date ''' count = self.printCursor(self._cursor, fieldNames, datemap, time_format)
python
{ "resource": "" }
q278954
get_tasks
test
def get_tasks(do_tasks, dep_graph): """Given a list of tasks to perform and a dependency graph, return the tasks that must be performed, in the correct order""" #XXX: Is it important that if a task has "foo" before "bar" as a dep, # that foo executes before bar? Why? ATM this may not happen. #Each task that the user has specified gets its own execution graph task_graphs = [] for task in do_tasks: exgraph = DiGraph() exgraph.add_node(task) _get_deps(task, exgraph, dep_graph) task_graphs.append(exgraph) return flatten(reversed(topological_sort(g)) for g in task_graphs)
python
{ "resource": "" }
q278955
add_default_deps
test
def add_default_deps(project): """Add or create the default departments for the given project :param project: the project that needs default departments :type project: :class:`muke.models.Project` :returns: None :rtype: None :raises: None """ # create deps for project for name, short, order, af in DEFAULT_DEPARTMENTS: dep, created = Department.objects.get_or_create(name=name, short=short, ordervalue=order, assetflag=af) dep.projects.add(project) dep.full_clean() dep.save()
python
{ "resource": "" }
q278956
add_default_atypes
test
def add_default_atypes(project): """Add or create the default assettypes for the given project :param project: the project that needs default assettypes :type project: :class:`muke.models.Project` :returns: None :rtype: None :raises: None """ # create assettypes for project for name, desc in DEFAULT_ASSETTYPES: at, created = Atype.objects.get_or_create(name=name, defaults={'description': desc}) at.projects.add(project) at.full_clean() at.save()
python
{ "resource": "" }
q278957
add_default_sequences
test
def add_default_sequences(project): """Add or create the default sequences for the given project :param project: the project that needs default sequences :type project: :class:`muke.models.Project` :returns: None :rtype: None :raises: None """ # create sequences for project seqs = [(GLOBAL_NAME, 'global sequence for project %s' % project.name), (RNDSEQ_NAME, 'research and development sequence for project %s' % project.name)] for name, desc in seqs: seq, created = Sequence.objects.get_or_create(name=name, project=project, defaults={'description': desc})
python
{ "resource": "" }
q278958
add_userrnd_shot
test
def add_userrnd_shot(project): """Add a rnd shot for every user in the project :param project: the project that needs its rnd shots updated :type project: :class:`muke.models.Project` :returns: None :rtype: None :raises: None """ rndseq = project.sequence_set.get(name=RNDSEQ_NAME) users = [u for u in project.users.all()] for user in users: shot, created = Shot.objects.get_or_create(name=user.username, project=project, sequence=rndseq, defaults={'description': 'rnd shot for user %s' % user.username}) for t in shot.tasks.all(): t.users.add(user) t.full_clean() t.save()
python
{ "resource": "" }
q278959
prj_post_save_handler
test
def prj_post_save_handler(sender, **kwargs): """ Post save receiver for when a Project is saved. Creates a rnd shot for every user. On creations does: 1. create all default departments 2. create all default assettypes 3. create all default sequences :param sender: the project class :type sender: :class:`muke.models.Project` :returns: None :raises: None """ prj = kwargs['instance'] if not kwargs['created']: add_userrnd_shot(prj) return add_default_deps(prj) add_default_atypes(prj) add_default_sequences(prj)
python
{ "resource": "" }
q278960
seq_post_save_handler
test
def seq_post_save_handler(sender, **kwargs): """ Post save receiver for when a sequence is saved. creates a global shot. :param sender: the sequence class :type sender: :class:`muke.models.Sequence` :returns: None :raises: None """ if not kwargs['created']: return seq = kwargs['instance'] if seq.name == RNDSEQ_NAME: return prj = seq.project name = GLOBAL_NAME desc = "Global shot for sequence %s" % seq.name Shot.objects.create(name=name, project=prj, sequence=seq, description=desc)
python
{ "resource": "" }
q278961
create_all_tasks
test
def create_all_tasks(element): """Create all tasks for the element :param element: The shot or asset that needs tasks :type element: :class:`muke.models.Shot` | :class:`muke.models.Asset` :returns: None :rtype: None :raises: None """ prj = element.project if isinstance(element, Asset): flag=True else: flag=False deps = prj.department_set.filter(assetflag=flag) for d in deps: t = Task(project=prj, department=d, element=element) t.full_clean() t.save()
python
{ "resource": "" }
q278962
ConnectionPool.pre_connect
test
def pre_connect(self, peer): """ Ensures that we have an open connection to the given peer. Returns the peer id. This should be equal to the given one, but it might not if the given peer was, say, the IP and the peer actually identifies itself with a host name. The returned peer is the real one that should be used. This can be handy if we aren't 100% sure of the peer's identity. """ if peer in self._connections: return defer.succeed(peer) else: d = self._connect(peer, exact_peer=False) def connected(p): return p.peer d.addCallback(connected) return d
python
{ "resource": "" }
q278963
ConnectionPool.send
test
def send(self, peer, typename, data): """ Sends a packet to a peer. """ def attempt_to_send(_): if peer not in self._connections: d = self._connect(peer) d.addCallback(attempt_to_send) return d else: conn = self._connections[peer][0] conn.send_packet(typename, data) return defer.succeed(None) d = attempt_to_send(None) self._ongoing_sends.add(d) def send_completed(result): if d in self._ongoing_sends: self._ongoing_sends.remove(d) return result d.addBoth(send_completed) return d
python
{ "resource": "" }
q278964
Config.get_config_value
test
def get_config_value(self, section, key, return_type: type): """Read customer's config value by section and key. :param section: config file's section. i.e [default] :param key: config file's key under section. i.e packages_scan :param return_type: return value type, str | int | bool. """ try: value = self.method_mapping[return_type](section, key) except NoSectionError as e: raise ConfigError(e.message) except NoOptionError as e: raise ConfigError(e.message) return value
python
{ "resource": "" }
q278965
nova
test
def nova(*arg): """ Nova annotation for adding function to process nova notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Nova, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) nova_customer_process_wildcard[event_type_pattern] = func else: nova_customer_process[event_type] = func log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type)) @functools.wraps(func) def wrapper(*args, **kwargs): func(*args, **kwargs) return wrapper return decorator
python
{ "resource": "" }
q278966
cinder
test
def cinder(*arg): """ Cinder annotation for adding function to process cinder notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Cinder, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) cinder_customer_process_wildcard[event_type_pattern] = func else: cinder_customer_process[event_type] = func log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type)) @functools.wraps(func) def wrapper(*args, **kwargs): func(*args, **kwargs) return wrapper return decorator
python
{ "resource": "" }
q278967
neutron
test
def neutron(*arg): """ Neutron annotation for adding function to process neutron notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Neutron, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) neutron_customer_process_wildcard[event_type_pattern] = func else: neutron_customer_process[event_type] = func log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type)) @functools.wraps(func) def wrapper(*args, **kwargs): func(*args, **kwargs) return wrapper return decorator
python
{ "resource": "" }
q278968
glance
test
def glance(*arg): """ Glance annotation for adding function to process glance notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Glance, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) glance_customer_process_wildcard[event_type_pattern] = func else: glance_customer_process[event_type] = func log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type)) @functools.wraps(func) def wrapper(*args, **kwargs): func(*args, **kwargs) return wrapper return decorator
python
{ "resource": "" }
q278969
swift
test
def swift(*arg): """ Swift annotation for adding function to process swift notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Swift, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) swift_customer_process_wildcard[event_type_pattern] = func else: swift_customer_process[event_type] = func log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type)) @functools.wraps(func) def wrapper(*args, **kwargs): func(*args, **kwargs) return wrapper return decorator
python
{ "resource": "" }
q278970
keystone
test
def keystone(*arg): """ Swift annotation for adding function to process keystone notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Keystone, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) keystone_customer_process_wildcard[event_type_pattern] = func else: keystone_customer_process[event_type] = func log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type)) @functools.wraps(func) def wrapper(*args, **kwargs): func(*args, **kwargs) return wrapper return decorator
python
{ "resource": "" }
q278971
heat
test
def heat(*arg): """ Heat annotation for adding function to process heat notification. if event_type include wildcard, will put {pattern: function} into process_wildcard dict else will put {event_type: function} into process dict :param arg: event_type of notification """ check_event_type(Openstack.Heat, *arg) event_type = arg[0] def decorator(func): if event_type.find("*") != -1: event_type_pattern = pre_compile(event_type) heat_customer_process_wildcard[event_type_pattern] = func else: heat_customer_process[event_type] = func log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type)) @functools.wraps(func) def wrapper(*args, **kwargs): func(*args, **kwargs) return wrapper return decorator
python
{ "resource": "" }
q278972
MultiplexingCommandLocator.addFactory
test
def addFactory(self, identifier, factory): """Adds a factory. After calling this method, remote clients will be able to connect to it. This will call ``factory.doStart``. """ factory.doStart() self._factories[identifier] = factory
python
{ "resource": "" }
q278973
MultiplexingCommandLocator.removeFactory
test
def removeFactory(self, identifier): """Removes a factory. After calling this method, remote clients will no longer be able to connect to it. This will call the factory's ``doStop`` method. """ factory = self._factories.pop(identifier) factory.doStop() return factory
python
{ "resource": "" }
q278974
MultiplexingCommandLocator.connect
test
def connect(self, factory): """Attempts to connect using a given factory. This will find the requested factory and use it to build a protocol as if the AMP protocol's peer was making the connection. It will create a transport for the protocol and connect it immediately. It will then store the protocol under a unique identifier, and return that identifier. """ try: factory = self._factories[factory] except KeyError: raise NoSuchFactory() remote = self.getProtocol() addr = remote.transport.getPeer() proto = factory.buildProtocol(addr) if proto is None: raise ConnectionRefused() identifier = uuid4().hex transport = MultiplexedTransport(identifier, remote) proto.makeConnection(transport) self._protocols[identifier] = proto return {"connection": identifier}
python
{ "resource": "" }
q278975
MultiplexingCommandLocator.receiveData
test
def receiveData(self, connection, data): """ Receives some data for the given protocol. """ try: protocol = self._protocols[connection] except KeyError: raise NoSuchConnection() protocol.dataReceived(data) return {}
python
{ "resource": "" }
q278976
MultiplexingCommandLocator.disconnect
test
def disconnect(self, connection): """ Disconnects the given protocol. """ proto = self._protocols.pop(connection) proto.transport = None return {}
python
{ "resource": "" }
q278977
ProxyingProtocol._callRemote
test
def _callRemote(self, command, **kwargs): """Shorthand for ``callRemote``. This uses the factory's connection to the AMP peer. """ return self.factory.remote.callRemote(command, **kwargs)
python
{ "resource": "" }
q278978
ProxyingProtocol.connectionMade
test
def connectionMade(self): """Create a multiplexed stream connection. Connect to the AMP server's multiplexed factory using the identifier (defined by this class' factory). When done, stores the connection reference and causes buffered data to be sent. """ log.msg("Creating multiplexed AMP connection...") remoteFactoryIdentifier = self.factory.remoteFactoryIdentifier d = self._callRemote(Connect, factory=remoteFactoryIdentifier) d.addCallback(self._multiplexedConnectionMade)
python
{ "resource": "" }
q278979
ProxyingProtocol._multiplexedConnectionMade
test
def _multiplexedConnectionMade(self, response): """Stores a reference to the connection, registers this protocol on the factory as one related to a multiplexed AMP connection, and sends currently buffered data. Gets rid of the buffer afterwards. """ self.connection = conn = response["connection"] self.factory.protocols[conn] = self log.msg("Multiplexed AMP connection ({!r}) made!".format(conn)) data, self._buffer = self._buffer.getvalue(), None if data: log.msg("Sending {} bytes of buffered data...".format(len(data))) self._sendData(data) else: log.msg("No buffered data to send!")
python
{ "resource": "" }
q278980
ProxyingProtocol.dataReceived
test
def dataReceived(self, data): """Received some data from the local side. If we have set up the multiplexed connection, sends the data over the multiplexed connection. Otherwise, buffers. """ log.msg("{} bytes of data received locally".format(len(data))) if self.connection is None: # we haven't finished connecting yet log.msg("Connection not made yet, buffering...") self._buffer.write(data) else: log.msg("Sending data...") self._sendData(data)
python
{ "resource": "" }
q278981
ProxyingProtocol._sendData
test
def _sendData(self, data): """Actually sends data over the wire. """ d = self._callRemote(Transmit, connection=self.connection, data=data) d.addErrback(log.err)
python
{ "resource": "" }
q278982
ProxyingProtocol.connectionLost
test
def connectionLost(self, reason): """If we already have an AMP connection registered on the factory, get rid of it. """ if self.connection is not None: del self.factory.protocols[self.connection]
python
{ "resource": "" }
q278983
ProxyingAMPLocator.getLocalProtocol
test
def getLocalProtocol(self, connectionIdentifier): """Attempts to get a local protocol by connection identifier. """ for factory in self.localFactories: try: return factory.protocols[connectionIdentifier] except KeyError: continue raise NoSuchConnection()
python
{ "resource": "" }
q278984
ProxyingAMPLocator.remoteDataReceived
test
def remoteDataReceived(self, connection, data): """Some data was received from the remote end. Find the matching protocol and replay it. """ proto = self.getLocalProtocol(connection) proto.transport.write(data) return {}
python
{ "resource": "" }
q278985
ProxyingAMPLocator.disconnect
test
def disconnect(self, connection): """The other side has asked us to disconnect. """ proto = self.getLocalProtocol(connection) proto.transport.loseConnection() return {}
python
{ "resource": "" }
q278986
centered
test
def centered(mystring, linewidth=None, fill=" "): '''Takes a string, centres it, and pads it on both sides''' if linewidth is None: linewidth = get_terminal_size().columns - 1 sides = (linewidth - length_no_ansi(mystring))//2 extra = (linewidth - length_no_ansi(mystring)) % 2 fill = fill[:1] sidestring = fill*sides extrastring = fill*extra newstring = sidestring + mystring + sidestring + extrastring return newstring
python
{ "resource": "" }
q278987
clock_on_right
test
def clock_on_right(mystring): '''Takes a string, and prints it with the time right aligned''' taken = length_no_ansi(mystring) padding = (get_terminal_size().columns - 1) - taken - 5 clock = time.strftime("%I:%M", time.localtime()) print(mystring + " "*padding + clock)
python
{ "resource": "" }
q278988
version_number_str
test
def version_number_str(major, minor=0, patch=0, prerelease=None, build=None): """ Takes the parts of a semantic version number, and returns a nicely formatted string. """ version = str(major) + '.' + str(minor) + '.' + str(patch) if prerelease: if prerelease.startswith('-'): version = version + prerelease else: version = version + "-" + str(prerelease) if build: if build.startswith('+'): version = version + build else: version = version + "+" + str(build) return(version)
python
{ "resource": "" }
q278989
identify_unit_framework
test
def identify_unit_framework(target_unit): """ Identify whether the user is requesting unit validation against astropy.units, pint, or quantities. """ if HAS_ASTROPY: from astropy.units import UnitBase if isinstance(target_unit, UnitBase): return ASTROPY if HAS_PINT: from pint.unit import UnitsContainer if hasattr(target_unit, 'dimensionality') and isinstance(target_unit.dimensionality, UnitsContainer): return PINT if HAS_QUANTITIES: from quantities.unitquantity import IrreducibleUnit from quantities import Quantity if isinstance(target_unit, IrreducibleUnit) or isinstance(target_unit, Quantity): return QUANTITIES raise TraitError("Could not identify unit framework for target unit of type {0}".format(type(target_unit).__name__))
python
{ "resource": "" }
q278990
assert_unit_convertability
test
def assert_unit_convertability(name, value, target_unit, unit_framework): """ Check that a value has physical type consistent with user-specified units Note that this does not convert the value, only check that the units have the right physical dimensionality. Parameters ---------- name : str The name of the value to check (used for error messages). value : `numpy.ndarray` or instance of `numpy.ndarray` subclass The value to check. target_unit : unit The unit that the value should be convertible to. unit_framework : str The unit framework to use """ if unit_framework == ASTROPY: from astropy.units import Quantity if not isinstance(value, Quantity): raise TraitError("{0} should be given as an Astropy Quantity instance".format(name)) if not target_unit.is_equivalent(value.unit): raise TraitError("{0} should be in units convertible to {1}".format(name, target_unit)) elif unit_framework == PINT: from pint.unit import UnitsContainer if not (hasattr(value, 'dimensionality') and isinstance(value.dimensionality, UnitsContainer)): raise TraitError("{0} should be given as a Pint Quantity instance".format(name)) if value.dimensionality != target_unit.dimensionality: raise TraitError("{0} should be in units convertible to {1}".format(name, target_unit)) elif unit_framework == QUANTITIES: from quantities import Quantity if not isinstance(value, Quantity): raise TraitError("{0} should be given as a quantities Quantity instance".format(name)) if value.dimensionality.simplified != target_unit.dimensionality.simplified: raise TraitError("{0} should be in units convertible to {1}".format(name, target_unit.dimensionality.string))
python
{ "resource": "" }
q278991
pad
test
def pad(data_to_pad, block_size, style='pkcs7'): """Apply standard padding. :Parameters: data_to_pad : byte string The data that needs to be padded. block_size : integer The block boundary to use for padding. The output length is guaranteed to be a multiple of ``block_size``. style : string Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. :Return: The original data with the appropriate padding added at the end. """ padding_len = block_size-len(data_to_pad)%block_size if style == 'pkcs7': padding = bchr(padding_len)*padding_len elif style == 'x923': padding = bchr(0)*(padding_len-1) + bchr(padding_len) elif style == 'iso7816': padding = bchr(128) + bchr(0)*(padding_len-1) else: raise ValueError("Unknown padding style") return data_to_pad + padding
python
{ "resource": "" }
q278992
unpad
test
def unpad(padded_data, block_size, style='pkcs7'): """Remove standard padding. :Parameters: padded_data : byte string A piece of data with padding that needs to be stripped. block_size : integer The block boundary to use for padding. The input length must be a multiple of ``block_size``. style : string Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. :Return: Data without padding. :Raises ValueError: if the padding is incorrect. """ pdata_len = len(padded_data) if pdata_len % block_size: raise ValueError("Input data is not padded") if style in ('pkcs7', 'x923'): padding_len = bord(padded_data[-1]) if padding_len<1 or padding_len>min(block_size, pdata_len): raise ValueError("Padding is incorrect.") if style == 'pkcs7': if padded_data[-padding_len:]!=bchr(padding_len)*padding_len: raise ValueError("PKCS#7 padding is incorrect.") else: if padded_data[-padding_len:-1]!=bchr(0)*(padding_len-1): raise ValueError("ANSI X.923 padding is incorrect.") elif style == 'iso7816': padding_len = pdata_len - padded_data.rfind(bchr(128)) if padding_len<1 or padding_len>min(block_size, pdata_len): raise ValueError("Padding is incorrect.") if padding_len>1 and padded_data[1-padding_len:]!=bchr(0)*(padding_len-1): raise ValueError("ISO 7816-4 padding is incorrect.") else: raise ValueError("Unknown padding style") return padded_data[:-padding_len]
python
{ "resource": "" }
q278993
FederationEntity.self_sign
test
def self_sign(self, req, receiver='', aud=None): """ Sign the extended request. :param req: Request, a :py:class:`fedoidcmsg.MetadataStatement' instance :param receiver: The intended user of this metadata statement :param aud: The audience, a list of receivers. :return: An augmented set of request arguments """ if self.entity_id: _iss = self.entity_id else: _iss = self.iss creq = req.copy() if not 'metadata_statement_uris' in creq and not \ 'metadata_statements' in creq: _copy = creq.copy() _jws = self.self_signer.sign(_copy, receiver=receiver, iss=_iss, aud=aud) sms_spec = {'metadata_statements': {self.iss: _jws}} else: for ref in ['metadata_statement_uris', 'metadata_statements']: try: del creq[ref] except KeyError: pass sms_spec = {'metadata_statements': Message()} for ref in ['metadata_statement_uris', 'metadata_statements']: if ref not in req: continue for foid, value in req[ref].items(): _copy = creq.copy() _copy[ref] = Message() _copy[ref][foid] = value _jws = self.self_signer.sign(_copy, receiver=receiver, iss=_iss, aud=aud) sms_spec['metadata_statements'][foid] = _jws creq.update(sms_spec) return creq
python
{ "resource": "" }
q278994
FederationEntityOOB.gather_metadata_statements
test
def gather_metadata_statements(self, fos=None, context=''): """ Only gathers metadata statements and returns them. :param fos: Signed metadata statements from these Federation Operators should be added. :param context: context of the metadata exchange :return: Dictionary with signed Metadata Statements as values """ if not context: context = self.context _res = {} if self.metadata_statements: try: cms = self.metadata_statements[context] except KeyError: if self.metadata_statements == { 'register': {}, 'discovery': {}, 'response': {} }: # No superior so an FO then. Nothing to add .. pass else: logger.error( 'No metadata statements for this context: {}'.format( context)) raise ValueError('Wrong context "{}"'.format(context)) else: if cms != {}: if fos is None: fos = list(cms.keys()) for f in fos: try: val = cms[f] except KeyError: continue if val.startswith('http'): value_type = 'metadata_statement_uris' else: value_type = 'metadata_statements' try: _res[value_type][f] = val except KeyError: _res[value_type] = Message() _res[value_type][f] = val return _res
python
{ "resource": "" }
q278995
pretty_print
test
def pretty_print(input_word, anagrams, by_length=False): """Prints the anagram results sorted by score to stdout. Args: input_word: the base word we searched on anagrams: generator of (word, score) from anagrams_in_word by_length: a boolean to declare printing by length instead of score """ scores = {} if by_length: noun = "tiles" for word, score in anagrams: try: scores[len(word)].append("{0} ({1:d})".format(word, score)) except KeyError: scores[len(word)] = ["{0} ({1:d})".format(word, score)] else: noun = "points" for word, score in anagrams: try: scores[score].append(word) except KeyError: scores[score] = [word] print("Anagrams for {0}{1}:".format(input_word, " (score)" * by_length)) if not valid_scrabble_word(input_word): print("{0} is not possible in Scrabble.".format(input_word)) for key, value in sorted(scores.items(), reverse=True): print("{0:d} {1}: {2}".format(key, noun, ", ".join(value)))
python
{ "resource": "" }
q278996
argument_parser
test
def argument_parser(args): """Argparse logic, command line options. Args: args: sys.argv[1:], everything passed to the program after its name Returns: A tuple of: a list of words/letters to search a boolean to declare if we want to use the sowpods words file a boolean to declare if we want to output anagrams by length a string of starting characters to find anagrams based on a string of ending characters to find anagrams based on Raises: SystemExit if the user passes invalid arguments, --version or --help """ parser = argparse.ArgumentParser( prog="nagaram", description="Finds Scabble anagrams.", formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False, ) parser.add_argument( "-h", "--help", dest="help", action="store_true", default=False, ) parser.add_argument( "--sowpods", dest="sowpods", action="store_true", default=False, ) parser.add_argument( "--length", "-l", dest="length", action="store_true", default=False, ) parser.add_argument( "--starts-with", "-s", dest="starts_with", metavar="chars", default="", nargs=1, type=str, ) parser.add_argument( "--ends-with", "-e", dest="ends_with", metavar="chars", default="", nargs=1, type=str, ) parser.add_argument( "--version", "-v", action="version", version="Nagaram {0} (Released: {1})".format( nagaram.__version__, nagaram.__release_date__, ) ) parser.add_argument( dest="wordlist", metavar="letters to find anagrams with (? for anything, _ for blanks)", nargs=argparse.REMAINDER, ) settings = parser.parse_args(args) if settings.help: raise SystemExit(nagaram.__doc__.strip()) if not settings.wordlist: raise SystemExit(parser.print_usage()) if settings.starts_with: settings.starts_with = settings.starts_with[0] if settings.ends_with: settings.ends_with = settings.ends_with[0] return (settings.wordlist, settings.sowpods, settings.length, settings.starts_with, settings.ends_with)
python
{ "resource": "" }
q278997
main
test
def main(arguments=None): """Main command line entry point.""" if not arguments: arguments = sys.argv[1:] wordlist, sowpods, by_length, start, end = argument_parser(arguments) for word in wordlist: pretty_print( word, anagrams_in_word(word, sowpods, start, end), by_length, )
python
{ "resource": "" }
q278998
PacketProtocol.dataReceived
test
def dataReceived(self, data): """ Do not overwrite this method. Instead implement `on_...` methods for the registered typenames to handle incomming packets. """ self._unprocessed_data.enqueue(data) while True: if len(self._unprocessed_data) < self._header.size: return # not yet enough data hdr_data = self._unprocessed_data.peek(self._header.size) packet_length, typekey = self._header.unpack(hdr_data) total_length = self._header.size + packet_length if len(self._unprocessed_data) < total_length: return # not yet enough data self._unprocessed_data.drop(self._header.size) packet = self._unprocessed_data.dequeue(packet_length) self._start_receive = None typename = self._type_register.get(typekey, None) if typename is None: self.on_unregistered_type(typekey, packet) else: self.packet_received(typename, packet)
python
{ "resource": "" }
q278999
PacketProtocol.on_unregistered_type
test
def on_unregistered_type(self, typekey, packet): """ Invoked if a packet with an unregistered type was received. Default behaviour is to log and close the connection. """ log.msg("Missing handler for typekey %s in %s. Closing connection." % (typekey, type(self).__name__)) self.transport.loseConnection()
python
{ "resource": "" }