_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q278000
unparse_headers
test
def unparse_headers(hdrs): """Parse a dictionary of headers to a string. Args: hdrs: A dictionary of headers. Returns: The headers as a string that can be used in an NNTP POST. """ return "".join([unparse_header(n, v) for n, v in hdrs.items()]) + "\r\n"
python
{ "resource": "" }
q278001
WebHookHandler.do_POST
test
def do_POST(self): """ Handles the POST request sent by Boundary Url Action """ self.send_response(urllib2.httplib.OK) self.end_headers() content_length = int(self.headers['Content-Length']) body = self.rfile.read(content_length) print("Client: {0}".format(str(self.client_address))) print("headers: {0}".format(self.headers)) print("path: {0}".format(self.path)) print("body: {0}".format(body))
python
{ "resource": "" }
q278002
run
test
def run(tests=(), reporter=None, stop_after=None): """ Run the tests that are loaded by each of the strings provided. Arguments: tests (iterable): the collection of tests (specified as `str` s) to run reporter (Reporter): a `Reporter` to use for the run. If unprovided, the default is to return a `virtue.reporters.Counter` (which produces no output). stop_after (int): a number of non-successful tests to allow before stopping the run. """ if reporter is None: reporter = Counter() if stop_after is not None: reporter = _StopAfterWrapper(reporter=reporter, limit=stop_after) locator = ObjectLocator() cases = ( case for test in tests for loader in locator.locate_by_name(name=test) for case in loader.load() ) suite = unittest.TestSuite(cases) getattr(reporter, "startTestRun", lambda: None)() suite.run(reporter) getattr(reporter, "stopTestRun", lambda: None)() return reporter
python
{ "resource": "" }
q278003
defaults_docstring
test
def defaults_docstring(defaults, header=None, indent=None, footer=None): """Return a docstring from a list of defaults. """ if indent is None: indent = '' if header is None: header = '' if footer is None: footer = '' width = 60 #hbar = indent + width * '=' + '\n' # horizontal bar hbar = '\n' s = hbar + (header) + hbar for key, value, desc in defaults: if isinstance(value, basestring): value = "'" + value + "'" if hasattr(value, '__call__'): value = "<" + value.__name__ + ">" s += indent +'%-12s\n' % ("%s :" % key) s += indent + indent + (indent + 23 * ' ').join(desc.split('\n')) s += ' [%s]\n\n' % str(value) s += hbar s += footer return s
python
{ "resource": "" }
q278004
defaults_decorator
test
def defaults_decorator(defaults): """Decorator to append default kwargs to a function. """ def decorator(func): """Function that appends default kwargs to a function. """ kwargs = dict(header='Keyword arguments\n-----------------\n', indent=' ', footer='\n') doc = defaults_docstring(defaults, **kwargs) if func.__doc__ is None: func.__doc__ = '' func.__doc__ += doc return func return decorator
python
{ "resource": "" }
q278005
Property.defaults_docstring
test
def defaults_docstring(cls, header=None, indent=None, footer=None): """Add the default values to the class docstring""" return defaults_docstring(cls.defaults, header=header, indent=indent, footer=footer)
python
{ "resource": "" }
q278006
Property.set_value
test
def set_value(self, value): """Set the value This invokes hooks for type-checking and bounds-checking that may be implemented by sub-classes. """ self.check_bounds(value) self.check_type(value) self.__value__ = value
python
{ "resource": "" }
q278007
Property.check_type
test
def check_type(self, value): """Hook for type-checking, invoked during assignment. raises TypeError if neither value nor self.dtype are None and they do not match. will not raise an exception if either value or self.dtype is None """ if self.__dict__['dtype'] is None: return elif value is None: return elif isinstance(value, self.__dict__['dtype']): return msg = "Value of type %s, when %s was expected." % ( type(value), self.__dict__['dtype']) raise TypeError(msg)
python
{ "resource": "" }
q278008
Derived.value
test
def value(self): """Return the current value. This first checks if the value is cached (i.e., if `self.__value__` is not None) If it is not cached then it invokes the `loader` function to compute the value, and caches the computed value """ if self.__value__ is None: try: loader = self.__dict__['loader'] except KeyError: raise AttributeError("Loader is not defined") # Try to run the loader. # Don't catch expections here, let the Model class figure it out val = loader() # Try to set the value try: self.set_value(val) except TypeError: msg = "Loader must return variable of type %s or None, got %s" % (self.__dict__['dtype'], type(val)) raise TypeError(msg) return self.__value__
python
{ "resource": "" }
q278009
Parameter.check_type
test
def check_type(self, value): """Hook for type-checking, invoked during assignment. Allows size 1 numpy arrays and lists, but raises TypeError if value can not be cast to a scalar. """ try: scalar = asscalar(value) except ValueError as e: raise TypeError(e) super(Parameter, self).check_type(scalar)
python
{ "resource": "" }
q278010
Parameter.symmetric_error
test
def symmetric_error(self): """Return the symmertic error Similar to above, but zero implies no error estimate, and otherwise this will either be the symmetric error, or the average of the low,high asymmetric errors. """ # ADW: Should this be `np.nan`? if self.__errors__ is None: return 0. if np.isscalar(self.__errors__): return self.__errors__ return 0.5 * (self.__errors__[0] + self.__errors__[1])
python
{ "resource": "" }
q278011
Parameter.set_errors
test
def set_errors(self, errors): """Set parameter error estimate """ if errors is None: self.__errors__ = None return self.__errors__ = [asscalar(e) for e in errors]
python
{ "resource": "" }
q278012
Parameter.set
test
def set(self, **kwargs): """Set the value,bounds,free,errors based on corresponding kwargs The invokes hooks for type-checking and bounds-checking that may be implemented by sub-classes. """ # Probably want to reset bounds if set fails if 'bounds' in kwargs: self.set_bounds(kwargs.pop('bounds')) if 'free' in kwargs: self.set_free(kwargs.pop('free')) if 'errors' in kwargs: self.set_errors(kwargs.pop('errors')) if 'value' in kwargs: self.set_value(kwargs.pop('value'))
python
{ "resource": "" }
q278013
MetricCreateBulk.import_metrics
test
def import_metrics(self): """ 1) Get command line arguments 2) Read the JSON file 3) Parse into a dictionary 4) Create or update definitions using API call """ self.v2Metrics = self.metricDefinitionV2(self.metrics) if self.v2Metrics: metrics = self.metrics else: metrics = self.metrics['result'] # Loop through the metrics and call the API # to create/update for m in metrics: if self.v2Metrics: metric = metrics[m] metric['name'] = m else: metric = m self.create_update(metric)
python
{ "resource": "" }
q278014
MetricExport.extract_dictionary
test
def extract_dictionary(self, metrics): """ Extract required fields from an array """ new_metrics = {} for m in metrics: metric = self.extract_fields(m) new_metrics[m['name']] = metric return new_metrics
python
{ "resource": "" }
q278015
MetricExport.filter
test
def filter(self): """ Apply the criteria to filter out on the metrics required """ if self.filter_expression is not None: new_metrics = [] metrics = self.metrics['result'] for m in metrics: if self.filter_expression.search(m['name']): new_metrics.append(m) else: new_metrics = self.metrics['result'] self.metrics = self.extract_dictionary(new_metrics)
python
{ "resource": "" }
q278016
MeterClient._call_api
test
def _call_api(self): """ Make a call to the meter via JSON RPC """ # Allocate a socket and connect to the meter sockobj = socket(AF_INET, SOCK_STREAM) sockobj.connect((self.rpc_host, self.rpc_port)) self.get_json() message = [self.rpc_message.encode('utf-8')] for line in message: sockobj.send(line) data = sockobj.recv(self.MAX_LINE) print(data) self.rpc_data.append(data) sockobj.close()
python
{ "resource": "" }
q278017
Parser.expression_terminal
test
def expression_terminal(self, text): """expression_terminal = identifier | terminal | option_group | repetition_group | grouping_group | special_handling ; """ self._attempting(text) return alternation([ self.identifier, self.terminal, self.option_group, self.repetition_group, self.grouping_group, self.special_handling ])(text)
python
{ "resource": "" }
q278018
Parser.operator
test
def operator(self, text): """operator = "|" | "." | "," | "-";""" self._attempting(text) return alternation([ "|", ".", ",", "-" ])(text).retyped(TokenType.operator)
python
{ "resource": "" }
q278019
Parser.op_add
test
def op_add(self, text): """op_add = "+" ;""" self._attempting(text) return terminal("+")(text).retyped(TokenType.op_add)
python
{ "resource": "" }
q278020
Model._init_properties
test
def _init_properties(self): """ Loop through the list of Properties, extract the derived and required properties and do the appropriate book-keeping """ self._missing = {} for k, p in self.params.items(): if p.required: self._missing[k] = p if isinstance(p, Derived): if p.loader is None: # Default to using _<param_name> p.loader = self.__getattribute__("_%s" % k) elif isinstance(p.loader, str): p.loader = self.__getattribute__(p.loader)
python
{ "resource": "" }
q278021
Model.get_params
test
def get_params(self, pnames=None): """ Return a list of Parameter objects Parameters ---------- pname : list or None If a list get the Parameter objects with those names If none, get all the Parameter objects Returns ------- params : list list of Parameters """ l = [] if pnames is None: pnames = self.params.keys() for pname in pnames: p = self.params[pname] if isinstance(p, Parameter): l.append(p) return l
python
{ "resource": "" }
q278022
Model.param_values
test
def param_values(self, pnames=None): """ Return an array with the parameter values Parameters ---------- pname : list or None If a list, get the values of the `Parameter` objects with those names If none, get all values of all the `Parameter` objects Returns ------- values : `np.array` Parameter values """ l = self.get_params(pnames) v = [p.value for p in l] return np.array(v)
python
{ "resource": "" }
q278023
Model.param_errors
test
def param_errors(self, pnames=None): """ Return an array with the parameter errors Parameters ---------- pname : list of string or none If a list of strings, get the Parameter objects with those names If none, get all the Parameter objects Returns ------- ~numpy.array of parameter errors Note that this is a N x 2 array. """ l = self.get_params(pnames) v = [p.errors for p in l] return np.array(v)
python
{ "resource": "" }
q278024
Model.clear_derived
test
def clear_derived(self): """ Reset the value of all Derived properties to None This is called by setp (and by extension __setattr__) """ for p in self.params.values(): if isinstance(p, Derived): p.clear_value()
python
{ "resource": "" }
q278025
ApiCall.method
test
def method(self, value): """ Before assigning the value validate that is in one of the HTTP methods we implement """ keys = self._methods.keys() if value not in keys: raise AttributeError("Method value not in " + str(keys)) else: self._method = value
python
{ "resource": "" }
q278026
ApiCall._get_environment
test
def _get_environment(self): """ Gets the configuration stored in environment variables """ if 'TSP_EMAIL' in os.environ: self._email = os.environ['TSP_EMAIL'] if 'TSP_API_TOKEN' in os.environ: self._api_token = os.environ['TSP_API_TOKEN'] if 'TSP_API_HOST' in os.environ: self._api_host = os.environ['TSP_API_HOST'] else: self._api_host = 'api.truesight.bmc.com'
python
{ "resource": "" }
q278027
ApiCall._get_url_parameters
test
def _get_url_parameters(self): """ Encode URL parameters """ url_parameters = '' if self._url_parameters is not None: url_parameters = '?' + urllib.urlencode(self._url_parameters) return url_parameters
python
{ "resource": "" }
q278028
ApiCall._do_get
test
def _do_get(self): """ HTTP Get Request """ return requests.get(self._url, data=self._data, headers=self._headers, auth=(self._email, self._api_token))
python
{ "resource": "" }
q278029
ApiCall._do_delete
test
def _do_delete(self): """ HTTP Delete Request """ return requests.delete(self._url, data=self._data, headers=self._headers, auth=(self._email, self._api_token))
python
{ "resource": "" }
q278030
ApiCall._do_post
test
def _do_post(self): """ HTTP Post Request """ return requests.post(self._url, data=self._data, headers=self._headers, auth=(self._email, self._api_token))
python
{ "resource": "" }
q278031
ApiCall._do_put
test
def _do_put(self): """ HTTP Put Request """ return requests.put(self._url, data=self._data, headers=self._headers, auth=(self._email, self._api_token))
python
{ "resource": "" }
q278032
ApiCall._call_api
test
def _call_api(self): """ Make an API call to get the metric definition """ self._url = self.form_url() if self._headers is not None: logging.debug(self._headers) if self._data is not None: logging.debug(self._data) if len(self._get_url_parameters()) > 0: logging.debug(self._get_url_parameters()) result = self._methods[self._method]() if not self.good_response(result.status_code): logging.error(self._url) logging.error(self._method) if self._data is not None: logging.error(self._data) logging.error(result) self._api_result = result
python
{ "resource": "" }
q278033
USGSDownload.validate_sceneInfo
test
def validate_sceneInfo(self): """Check scene name and whether remote file exists. Raises WrongSceneNameError if the scene name is wrong. """ if self.sceneInfo.prefix not in self.__satellitesMap: raise WrongSceneNameError('USGS Downloader: Prefix of %s (%s) is invalid' % (self.sceneInfo.name, self.sceneInfo.prefix))
python
{ "resource": "" }
q278034
USGSDownload.verify_type_product
test
def verify_type_product(self, satellite): """Gets satellite id """ if satellite == 'L5': id_satellite = '3119' stations = ['GLC', 'ASA', 'KIR', 'MOR', 'KHC', 'PAC', 'KIS', 'CHM', 'LGS', 'MGR', 'COA', 'MPS'] elif satellite == 'L7': id_satellite = '3373' stations = ['EDC', 'SGS', 'AGS', 'ASN', 'SG1'] elif satellite == 'L8': id_satellite = '4923' stations = ['LGN'] else: raise ProductInvalidError('Type product invalid. the permitted types are: L5, L7, L8. ') typ_product = dict(id_satelite=id_satellite, stations=stations) return typ_product
python
{ "resource": "" }
q278035
USGSDownload.get_remote_file_size
test
def get_remote_file_size(self, url): """Gets the filesize of a remote file """ try: req = urllib.request.urlopen(url) return int(req.getheader('Content-Length').strip()) except urllib.error.HTTPError as error: logger.error('Error retrieving size of the remote file %s' % error) print('Error retrieving size of the remote file %s' % error) self.connect_earthexplorer() self.get_remote_file_size(url)
python
{ "resource": "" }
q278036
USGSDownload.download
test
def download(self, bands=None, download_dir=None, metadata=False): """Download remote .tar.bz file.""" if not download_dir: download_dir = DOWNLOAD_DIR if bands is None: bands = list(range(1, 12)) + ['BQA'] else: self.validate_bands(bands) pattern = re.compile('^[^\s]+_(.+)\.tiff?', re.I) band_list = ['B%i' % (i,) if isinstance(i, int) else i for i in bands] image_list = [] # Connect Earth explore self.connect_earthexplorer() # tgz name tgzname = self.sceneInfo.name + '.tgz' dest_dir = check_create_folder(join(download_dir, self.sceneInfo.name)) # Download File downloaded = self.download_file(self.url, dest_dir, tgzname) # Log logger.debug('Status downloaded %s' % downloaded) print('\n Status downloaded %s' % downloaded) if downloaded['sucess']: # Log print('\n Downloaded sucess') logger.debug('Downloaded sucess of scene: %s' % self.sceneInfo.name) try: tar = tarfile.open(downloaded['file_path'], 'r') folder_path = join(download_dir, self.sceneInfo.name) tar.extractall(folder_path) remove(downloaded['file_path']) images_path = listdir(folder_path) for image_path in images_path: matched = pattern.match(image_path) file_path = join(folder_path, image_path) if matched and matched.group(1) in band_list: image_list.append([file_path, getsize(file_path)]) elif matched: remove(file_path) except tarfile.ReadError as error: print('\nError when extracting files. %s' % error) logger.error('Error when extracting files. %s' % error) return image_list else: logger.debug('Info downloaded: %s' % downloaded) print('\n Info downloaded: %s' % downloaded) return downloaded
python
{ "resource": "" }
q278037
USGSDownload.validate_bands
test
def validate_bands(bands): """Validate bands parameter.""" if not isinstance(bands, list): raise TypeError('Parameter bands must be a "list"') valid_bands = list(range(1, 12)) + ['BQA'] for band in bands: if band not in valid_bands: raise InvalidBandError('%s is not a valid band' % band)
python
{ "resource": "" }
q278038
USGSDownload.connect_earthexplorer
test
def connect_earthexplorer(self): """ Connection to Earth explorer without proxy """ logger.info("Establishing connection to Earthexplorer") print("\n Establishing connection to Earthexplorer") try: opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor()) urllib.request.install_opener(opener) params = urllib.parse.urlencode(dict(username=self.user, password=self.password)) params = params.encode('utf-8') f = opener.open("https://ers.cr.usgs.gov/login", params) data = f.read().decode('utf-8') f.close() if data.find( 'You must sign in as a registered user to download data or place orders for USGS EROS products') > 0: print("\n Authentification failed") logger.error("Authentification failed") raise AutenticationUSGSFailed('Authentification USGS failed') print('User %s connected with USGS' % self.user) logger.debug('User %s connected with USGS' % self.user) return except Exception as e: print('\nError when trying to connect USGS: %s' % e) raise logger.error('Error when trying to connect USGS: %s' % e)
python
{ "resource": "" }
q278039
prefixed_by
test
def prefixed_by(prefix): """ Make a callable returning True for names starting with the given prefix. The returned callable takes two arguments, the attribute or name of the object, and possibly its corresponding value (which is ignored), as suitable for use with :meth:`ObjectLocator.is_test_module` and :meth:`ObjectLocator.is_test_method`\ . """ def prefixed_by_(name, value=None): return name.startswith(prefix) prefixed_by_.__name__ += prefix return prefixed_by_
python
{ "resource": "" }
q278040
timezone
test
def timezone(zone): r''' Return a datetime.tzinfo implementation for the given timezone >>> from datetime import datetime, timedelta >>> utc = timezone('UTC') >>> eastern = timezone('US/Eastern') >>> eastern.zone 'US/Eastern' >>> timezone(unicode('US/Eastern')) is eastern True >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) >>> loc_dt = utc_dt.astimezone(eastern) >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' >>> loc_dt.strftime(fmt) '2002-10-27 01:00:00 EST (-0500)' >>> (loc_dt - timedelta(minutes=10)).strftime(fmt) '2002-10-27 00:50:00 EST (-0500)' >>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt) '2002-10-27 01:50:00 EDT (-0400)' >>> (loc_dt + timedelta(minutes=10)).strftime(fmt) '2002-10-27 01:10:00 EST (-0500)' Raises UnknownTimeZoneError if passed an unknown zone. >>> try: ... timezone('Asia/Shangri-La') ... except UnknownTimeZoneError: ... print('Unknown') Unknown >>> try: ... timezone(unicode('\N{TRADE MARK SIGN}')) ... except UnknownTimeZoneError: ... print('Unknown') Unknown ''' if zone.upper() == 'UTC': return utc try: zone = ascii(zone) except UnicodeEncodeError: # All valid timezones are ASCII raise UnknownTimeZoneError(zone) zone = _unmunge_zone(zone) if zone not in _tzinfo_cache: if zone in all_timezones_set: # fp = open_resource(zone) # try: _tzinfo_cache[zone] = build_tzinfo(zone)#, fp) # finally: # fp.close() else: raise UnknownTimeZoneError(zone) return _tzinfo_cache[zone]
python
{ "resource": "" }
q278041
_FixedOffset.normalize
test
def normalize(self, dt, is_dst=False): '''Correct the timezone information on the given datetime''' if dt.tzinfo is None: raise ValueError('Naive time - no tzinfo set') return dt.replace(tzinfo=self)
python
{ "resource": "" }
q278042
esc_join
test
def esc_join(iterable, delimiter=" ", escape="\\"): """Join an iterable by a delimiter, replacing instances of delimiter in items with escape + delimiter. """ rep = escape + delimiter return delimiter.join(i.replace(delimiter, rep) for i in iterable)
python
{ "resource": "" }
q278043
get_newline_positions
test
def get_newline_positions(text): """Returns a list of the positions in the text where all new lines occur. This is used by get_line_and_char to efficiently find coordinates represented by offset positions. """ pos = [] for i, c in enumerate(text): if c == "\n": pos.append(i) return pos
python
{ "resource": "" }
q278044
point_to_source
test
def point_to_source(source, position, fmt=(2, True, "~~~~~", "^")): """Point to a position in source code. source is the text we're pointing in. position is a 2-tuple of (line_number, character_number) to point to. fmt is a 4-tuple of formatting parameters, they are: name default description ---- ------- ----------- surrounding_lines 2 the number of lines above and below the target line to print show_line_numbers True if true line numbers will be generated for the output_lines tail_body "~~~~~" the body of the tail pointer_char "^" the character that will point to the position """ surrounding_lines, show_line_numbers, tail_body, pointer_char = fmt line_no, char_no = position lines = source.split("\n") line = lines[line_no] if char_no >= len(tail_body): tail = " " * (char_no - len(tail_body)) + tail_body + pointer_char else: tail = " " * char_no + pointer_char + tail_body if show_line_numbers: line_no_width = int(math.ceil(math.log10(max(1, line_no + surrounding_lines))) + 1) line_fmt = "{0:" + str(line_no_width) + "}: {1}" else: line_fmt = "{1}" pivot = line_no + 1 output_lines = [(pivot, line), ("", tail)] for i in range(surrounding_lines): upper_ofst = i + 1 upper_idx = line_no + upper_ofst lower_ofst = -upper_ofst lower_idx = line_no + lower_ofst if lower_idx >= 0: output_lines.insert(0, (pivot + lower_ofst, lines[lower_idx])) if upper_idx < len(lines): output_lines.append((pivot + upper_ofst, lines[upper_idx])) return "\n".join(line_fmt.format(n, c) for n, c in output_lines)
python
{ "resource": "" }
q278045
RelayGetOutput._dump_text
test
def _dump_text(self): """ Send output in textual format """ results = self._relay_output['result']; for l in results: dt = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(int(l[1]['ts']))) print("{0} {1} {2} {3}".format(l[0], dt, l[1]['type'], l[1]['msg']))
python
{ "resource": "" }
q278046
RelayList._filter
test
def _filter(self): """ Apply the criteria to filter out on the output required """ if self._metrics or self._control or self._plugins: relays = self._relays['result']['relays'] for relay in relays: if self._metrics: del relays[relay]['metrics'] if self._control: del relays[relay]['control'] if self._plugins: if 'plugins' in relays[relay]: del relays[relay]['plugins']
python
{ "resource": "" }
q278047
Chooser.fromlist
test
def fromlist(cls, files, equal=False, offensive=False, lang=None): """Initialize based on a list of fortune files""" self = cls.__new__(cls) self.files = fortunes = [] count = 0 for file in files: fortune = load_fortune(file, offensive=offensive, lang=lang) if fortune is None: logger.warn("Can't load: %s", file) continue count += 1 if equal else fortune.size fortunes.append((fortune, count)) if not fortunes: raise ValueError('All fortune files specified are invalid') self.count = count self.keys = [i[1] for i in self.files] return self
python
{ "resource": "" }
q278048
Chooser.set_chance
test
def set_chance(cls, files, equal=False, offensive=False, lang=None): # where files are (name, chance) """Initialize based on a list of fortune files with set chances""" self = cls.__new__(cls) total = 0. file = [] leftover = [] for name, chance in files: if total >= 1: break fortune = load_fortune(name, offensive=offensive, lang=lang) if fortune is None or not fortune.size: continue if chance: file.append((fortune, chance)) total += chance else: leftover.append(fortune) if leftover and total < 1: left = 1 - total if equal: perfile = left / len(leftover) for fortune in leftover: file.append((fortune, perfile)) else: entries = sum(map(attrgetter('size'), leftover)) logger.debug('%d entries left', entries) for fortune in leftover: chance = left * fortune.size / entries file.append((fortune, chance)) # Arbitrary limit to calculate upper bound with, nice round number self.count = count = 65536 bound = 0 self.files = fortunes = [] for file, chance in file: bound += int(chance * count) fortunes.append((file, bound)) self.keys = [i[1] for i in self.files] return self
python
{ "resource": "" }
q278049
main
test
def main(context, **kwargs): """ virtue discovers and runs tests found in the given objects. Provide it with one or more tests (packages, modules or objects) to run. """ result = run(**kwargs) context.exit(not result.wasSuccessful())
python
{ "resource": "" }
q278050
Parser.rule
test
def rule(self, text): """rule = identifier , "=" , expression , ";" ;""" self._attempting(text) return concatenation([ self.identifier, "=", self.expression, ";", ], ignore_whitespace=True)(text).retyped(TokenType.rule)
python
{ "resource": "" }
q278051
Parser.special_handling
test
def special_handling(self, text): """special_handling = "?" , identifier , "?" ;""" self._attempting(text) return concatenation([ "?", self.identifier, "?", ], ignore_whitespace=True)(text).retyped(TokenType.special_handling)
python
{ "resource": "" }
q278052
Compiler.grammar
test
def grammar(self): """The parse tree generated by the source.""" if self._grammar is None: self.parser = Parser() grammar = self.parser.parse(self.input_source) self._grammar = grammar.trimmed().flattened().flattened(self._flatten) return self._grammar
python
{ "resource": "" }
q278053
Compiler.rules
test
def rules(self): """The AST rules.""" if self._rules is None: self._rules = [] for child in self.grammar.children: if child.is_type(TokenType.rule): name, expression = child.children self._rules.append(Rule(name.value, self._expression_to_asn(expression), name.position, child.consumed)) return self._rules
python
{ "resource": "" }
q278054
Compiler.comments
test
def comments(self): """The AST comments.""" if self._comments is None: self._comments = [c for c in self.grammar.children if c.is_type(TokenType.comment)] return self._comments
python
{ "resource": "" }
q278055
Compiler.directives
test
def directives(self): """The diretives parsed from the comments.""" if self._directives is None: self._directives = [] for comment in self.comments: self._directives.extend(self.directives_from_comment(comment)) return self._directives
python
{ "resource": "" }
q278056
Compiler.output_source
test
def output_source(self): """The python source of the parser generated from the input source.""" if self._output_source is None: self._output_source = self._compile() return self._output_source
python
{ "resource": "" }
q278057
Compiler._compile
test
def _compile(self): """Returns the python source code for the generated parser.""" fmt = """\"\"\"This parser was generated by pyebnf on {date}.\"\"\" from enum import Enum from pyebnf import parser_base as PB from pyebnf.primitive import alternation, concatenation, exclusion, one_or_more from pyebnf.primitive import option, repeated, repetition, terminal, zero_or_more {imports} {token_type_enum} {class_definition} """ fmt = self._clean_fmt(fmt) return fmt.format(date=datetime.utcnow().isoformat(), imports=self._get_imports(), token_type_enum=self._get_token_type_enum(), class_definition=self._get_class_definition())
python
{ "resource": "" }
q278058
Compiler._get_imports
test
def _get_imports(self): """Reads the directives and generates source code for custom imports.""" import_directives = [d for d in self.directives if d.name == "import"] if import_directives: return "\n" + "\n".join(d.args["value"] for d in import_directives) else: return ""
python
{ "resource": "" }
q278059
Compiler._get_token_type_enum
test
def _get_token_type_enum(self): """Builds the python source code for the Parser TokenType enum.""" fmt = "class TokenType(Enum):\n" \ "{indent}\"\"\"The token types for parse nodes generated by the Parser.\"\"\"\n" \ "{indent}" + \ "\n{indent}".join("{1} = {0}".format(num + 1, r.name) for num, r in enumerate(self.rules)) return fmt.format(indent=self.indent)
python
{ "resource": "" }
q278060
Compiler._get_class_definition
test
def _get_class_definition(self): """Builds the class definition of the parser.""" fmt = """class Parser({parser_base}): {indent}\"\"\"This class contains methods for reading source code and generating a parse tree.\"\"\" {indent}entry_point = "{entry_point}" {rule_definitions} """ fmt = self._clean_fmt(fmt) return fmt.format(parser_base=self._get_parser_base(), indent=self.indent, entry_point=self._get_entry_point(), rule_definitions="\n".join(self._get_rule_definitions()))
python
{ "resource": "" }
q278061
Compiler._get_entry_point
test
def _get_entry_point(self): """Gets the entry_point value for the parser.""" ep = self._find_directive("entry_point") if ep: return ep.args["value"] else: return self.rules[0].name
python
{ "resource": "" }
q278062
Compiler._get_rule_definition
test
def _get_rule_definition(self, rule): """Generates the source code for a rule.""" fmt = """def {rule_fxn_name}(self, text): {indent}\"\"\"{rule_source}\"\"\" {indent}self._attempting(text) {indent}return {rule_definition}(text){transform} """ fmt = self._clean_fmt(fmt) source = self._indent(self._ast_to_code(rule.expression), skip_first_line=True) # All the primitives will accept a string x in place of terminal(x). This is terminal shorthand. # However, if a rule is only a wrapper around a single terminal, we have to actually make a # terminal call. This handles that situation. if self.use_terminal_shorthand and len(source) == 1 and source[0].startswith(("'", '"')): source = ["terminal({})".format(source[0])] rule_source = fmt.format(rule_fxn_name=self._get_rule_fxn_name(rule.name), indent=self.indent, rule_source=self._get_rule_source(rule), rule_definition="\n".join(source), transform=self._get_rule_transform(rule)) return self._indent(rule_source, 1)
python
{ "resource": "" }
q278063
Compiler._get_rule_source
test
def _get_rule_source(self, rule): """Gets the variable part of the source code for a rule.""" p = len(self.input_source) + rule.position source = self.input_source[p:p + rule.consumed].rstrip() return self._indent(source, depth=self.indent + " ", skip_first_line=True)
python
{ "resource": "" }
q278064
Compiler._get_rule_transform
test
def _get_rule_transform(self, rule): """The return value for each rule can be either retyped, compressed or left alone. This method determines that and returns the source code text for accomplishing it. """ rd = self._find_directive(lambda d: d.name == "rule" and d.args.get("name") == rule.name) if rd: args = rd.args else: args = {} transform = args.get("transform", "retype") if transform == "retype": new_name = args.get("to_type", "TokenType.{0}".format(rule.name)) return ".retyped({0})".format(new_name) elif transform == "compress": new_name = args.get("to_type", "TokenType.{0}".format(rule.name)) if new_name == "identity": return ".compressed()" else: return ".compressed({0})".format(new_name) elif transform == "identity": return ""
python
{ "resource": "" }
q278065
Compiler._expression_to_asn
test
def _expression_to_asn(self, expression): """Convert an expression to an Abstract Syntax Tree Node.""" new_children = [self._node_to_asn(c) for c in expression.children] return self._remove_grouping_groups(infix_to_optree(new_children))
python
{ "resource": "" }
q278066
Compiler._node_to_asn
test
def _node_to_asn(self, node): """Convert a parse tree node into an absract syntax tree node.""" if node.is_type(TokenType.identifier): return Identifier(node.svalue) elif node.is_type(TokenType.terminal): return Terminal(node.svalue) elif node.is_type(TokenType.option_group): expr = node.children[0] return OptionGroup(self._expression_to_asn(expr)) elif node.is_type(TokenType.repetition_group): expr = node.children[0] return RepetitionGroup(self._expression_to_asn(expr)) elif node.is_type(TokenType.grouping_group): expr = node.children[0] return GroupingGroup(self._expression_to_asn(expr)) elif node.is_type(TokenType.special_handling): ident = node.children[0] return SpecialHandling(ident) elif node.is_type(TokenType.number): return Number(node.svalue) elif node.is_type((TokenType.operator, TokenType.op_mult, TokenType.op_add)): return OperatorNode(OPERATOR_INDEX[node.svalue], node.position) else: raise Exception("Unhandled parse tree node: {0}".format(node))
python
{ "resource": "" }
q278067
Compiler._hoist_operands
test
def _hoist_operands(self, operands, pred): """Flattens a list of optree operands based on a pred. This is used to convert concatenation([x, concatenation[y, ...]]) (or alternation) to concatenation([x, y, ...]). """ hopper = list(operands) new_operands = [] while hopper: target = hopper.pop(0) if pred(target): hopper = list(target.operands) + hopper else: new_operands.append(target) return new_operands
python
{ "resource": "" }
q278068
Compiler._remove_grouping_groups
test
def _remove_grouping_groups(self, optree): """Grouping groups are implied by optrees, this function hoists grouping group expressions up to their parent node. """ new_operands = [] for operand in optree.operands: if isinstance(operand, OptreeNode): new_operands.append(self._remove_grouping_groups(operand)) elif isinstance(operand, GroupingGroup): new_operands.append(operand.expression) else: new_operands.append(operand) return OptreeNode(optree.opnode, new_operands)
python
{ "resource": "" }
q278069
Compiler._ast_to_code
test
def _ast_to_code(self, node, **kwargs): """Convert an abstract syntax tree to python source code.""" if isinstance(node, OptreeNode): return self._ast_optree_node_to_code(node, **kwargs) elif isinstance(node, Identifier): return self._ast_identifier_to_code(node, **kwargs) elif isinstance(node, Terminal): return self._ast_terminal_to_code(node, **kwargs) elif isinstance(node, OptionGroup): return self._ast_option_group_to_code(node, **kwargs) elif isinstance(node, RepetitionGroup): return self._ast_repetition_group_to_code(node, **kwargs) elif isinstance(node, SpecialHandling): return self._ast_special_handling_to_code(node, **kwargs) elif isinstance(node, Number): return self._ast_number_to_code(node, **kwargs) else: raise Exception("Unhandled ast node: {0}".format(node))
python
{ "resource": "" }
q278070
Compiler._ast_optree_node_to_code
test
def _ast_optree_node_to_code(self, node, **kwargs): """Convert an abstract syntax operator tree to python source code.""" opnode = node.opnode if opnode is None: return self._ast_to_code(node.operands[0]) else: operator = opnode.operator if operator is OP_ALTERNATE: return self._ast_op_alternate_to_code(node, **kwargs) elif operator is OP_WS_CONCAT: kwargs["ignore_whitespace"] = False return self._ast_op_concat_to_code(node, **kwargs) elif operator is OP_CONCAT: kwargs["ignore_whitespace"] = True return self._ast_op_concat_to_code(node, **kwargs) elif operator is OP_EXCLUDE: return self._ast_op_exclude_to_code(node, **kwargs) elif operator is OP_MULTIPLY: return self._ast_op_multiply_to_code(node, **kwargs) elif operator is OP_REPEAT: return self._ast_op_repeat_to_code(node, **kwargs) else: raise Exception("Unhandled optree node: {0}".format(node))
python
{ "resource": "" }
q278071
Compiler._ast_terminal_to_code
test
def _ast_terminal_to_code(self, terminal, **kwargs): """Convert an AST terminal to python source code.""" value = _replace(terminal.value) if self.use_terminal_shorthand: return [value] else: return ["terminal({})".format(value)]
python
{ "resource": "" }
q278072
Compiler._ast_option_group_to_code
test
def _ast_option_group_to_code(self, option_group, **kwargs): """Convert an AST option group to python source code.""" lines = ["option("] lines.extend(self._indent(self._ast_to_code(option_group.expression))) lines.append(")") return lines
python
{ "resource": "" }
q278073
Compiler._ast_repetition_group_to_code
test
def _ast_repetition_group_to_code(self, repetition_group, ignore_whitespace=False, **kwargs): """Convert an AST repetition group to python source code.""" lines = ["zero_or_more("] lines.extend(self._indent(self._ast_to_code(repetition_group.expression))) lines[-1] += "," lines.append(self._indent("ignore_whitespace={}".format(bool(ignore_whitespace)))) lines.append(")") return lines
python
{ "resource": "" }
q278074
Compiler._ast_special_handling_to_code
test
def _ast_special_handling_to_code(self, special_handling, **kwargs): """Convert an AST sepcial handling to python source code.""" ident = special_handling.value.svalue if ident in PB_SPECIAL_HANDLING: return ["PB.{0}".format(ident)] else: return ["self.{0}".format(ident)]
python
{ "resource": "" }
q278075
Compiler._ast_op_alternate_to_code
test
def _ast_op_alternate_to_code(self, opr, **kwargs): """Convert an AST alternate op to python source code.""" hoist_target = OP_ALTERNATE operands = self._hoist_operands(opr.operands, lambda t: isinstance(t, OptreeNode) and t.opnode.operator is hoist_target) lines = ["alternation(["] for op in operands: lines.extend(self._indent(self._ast_to_code(op))) lines[-1] += "," lines.append("])") return lines
python
{ "resource": "" }
q278076
Compiler._ast_op_concat_to_code
test
def _ast_op_concat_to_code(self, opr, *, ignore_whitespace, **kwargs): """Convert an AST concatenate op to python source code.""" hoist_target = OP_CONCAT if ignore_whitespace else OP_WS_CONCAT operands = self._hoist_operands(opr.operands, lambda t: isinstance(t, OptreeNode) and t.opnode.operator is hoist_target) lines = ["concatenation(["] for op in operands: lines.extend(self._indent(self._ast_to_code(op, ignore_whitespace=ignore_whitespace))) lines[-1] += "," lines.append("], ignore_whitespace={})".format(bool(ignore_whitespace))) return lines
python
{ "resource": "" }
q278077
Compiler._ast_op_exclude_to_code
test
def _ast_op_exclude_to_code(self, opr, **kwargs): """Convert an AST exclude op to python source code.""" opl, opr = opr.operands lines = ["exclusion("] lines.extend(self._indent(self._ast_to_code(opl))) lines[-1] += "," lines.extend(self._indent(self._ast_to_code(opr))) lines.append(")") return lines
python
{ "resource": "" }
q278078
Compiler._ast_op_multiply_to_code
test
def _ast_op_multiply_to_code(self, opr, ignore_whitespace=False, **kwargs): """Convert an AST multiply op to python source code.""" opl, opr = opr.operands if isinstance(opl, Number): times = opl.value subject = self._ast_to_code(opr) else: times = opr.value subject = self._ast_to_code(opl) lines = ["repeated("] lines.extend(self._indent(subject)) lines[-1] += "," lines.append("{0}times={1},".format(self.indent, times)) lines.append("{0}ignore_whitespace={1}".format(self.indent, bool(ignore_whitespace))) lines.append(")") return lines
python
{ "resource": "" }
q278079
Compiler._ast_op_repeat_to_code
test
def _ast_op_repeat_to_code(self, opr, ignore_whitespace=False, **kwargs): """Convert an AST repeat op to python source code.""" lines = ["one_or_more("] lines.extend(self._indent(self._ast_to_code(opr.operands[0]))) lines[-1] += "," lines.append(self._indent("ignore_whitespace={}".format(bool(ignore_whitespace)))) lines.append(")") return lines
python
{ "resource": "" }
q278080
Compiler._find_directives
test
def _find_directives(self, pred): """Finds all directives with a certain name, or that passes a predicate.""" if isinstance(pred, str): return [d for d in self.directives if d.name == pred] else: return [d for d in self.directives if pred(d)]
python
{ "resource": "" }
q278081
Compiler._flatten
test
def _flatten(child, parent): """Custom flattening method for the parse tree.""" return parent.is_type(TokenType.expression) and child.node_type == parent.node_type
python
{ "resource": "" }
q278082
Compiler.directives_from_comment
test
def directives_from_comment(cls, comment): """A directive is a line in a comment that begins with '!'.""" comment_contents = comment.value[2:-2].strip() comment_lines = (l.strip() for l in comment_contents.split("\n")) directives = (l[1:].strip() for l in comment_lines if l.startswith("!")) for directive_def in directives: yield cls.parse_directive_def(directive_def)
python
{ "resource": "" }
q278083
AlarmDelete._handle_results
test
def _handle_results(self): """ Handle the results of the API call """ # Only process if we get HTTP return code other 200. if self._api_result.status_code != requests.codes.ok: print(self.colorize_json(self._api_result.text))
python
{ "resource": "" }
q278084
get_id
test
def get_id(id): """Get a new id if the provided one is None.""" if id == None: id = wx.NewId() logger.debug('Generated new ID %s.', id) else: logger.debug('Using provided id %s.', id) return id
python
{ "resource": "" }
q278085
remove_hotkey
test
def remove_hotkey(control, key): """ Remove a global hotkey. control - The control to affect key - The key to remove. """ l = _hotkeys.get(control, []) for a in l: key_str, id = a if key_str == key: control.Unbind(wx.EVT_HOTKEY, id = id) control.UnregisterHotKey(id) l.remove(a) if l: _hotkeys[control] = l else: del _hotkeys[control]
python
{ "resource": "" }
q278086
ApiCli.add_arguments
test
def add_arguments(self): """ Configure handling of command line arguments. """ self.add_logging_argument() self.parser.add_argument('-a', '--api-host', dest='api_host', action='store', metavar="api_host", help='{0} API host endpoint'.format(self.product_name)) self.parser.add_argument('-e', '--email', dest='email', action='store', metavar="e_mail", help='e-mail that has access to the {0} account'.format(self.product_name)) self.parser.add_argument('-t', '--api-token', dest='api_token', required=False, action='store', metavar="api_token", help='API token for given e-mail that has access to the {0} account'.format( self.product_name)) self.parser.add_argument('-z', '--curl', dest='curl', required=False, action='store_true', default=False, help='Output the corresponding curl command line and exit')
python
{ "resource": "" }
q278087
ApiCli._configure_logging
test
def _configure_logging(self): """ Configure logging based on command line options """ if self.args.logLevel is not None: logging.basicConfig(level=self.levels[self.args.logLevel]) logging.info("Set logging level to {0}".format(self.args.logLevel))
python
{ "resource": "" }
q278088
ApiCli._validate_arguments
test
def _validate_arguments(self): """ Validates the command line arguments passed to the CLI Derived classes that override need to call this method before validating their arguments """ if self._email is None: self.set_error_message("E-mail for the account not provided") return False if self._api_token is None: self.set_error_message("API Token for the account not provided") return False return True
python
{ "resource": "" }
q278089
infix_to_postfix
test
def infix_to_postfix(nodes, *, recurse_types=None): """Convert a list of nodes in infix order to a list of nodes in postfix order. E.G. with normal algebraic precedence, 3 + 4 * 5 -> 3 4 5 * + """ output = [] operators = [] for node in nodes: if isinstance(node, OperatorNode): # Drain out all operators whose precedence is gte the node's... cmp_operator = node.operator while operators: current_operator = operators[-1].operator if current_operator.precedence > cmp_operator.precedence or \ current_operator.precedence == cmp_operator.precedence and current_operator.association == Association.left: output.append(operators.pop()) else: break operators.append(node) else: if recurse_types is not None and node.node_type in recurse_types: output.extend(infix_to_postfix(node.children, recurse_types=recurse_types)) else: output.append(node) return output + list(reversed(operators))
python
{ "resource": "" }
q278090
postfix_to_optree
test
def postfix_to_optree(nodes): """Convert a list of nodes in postfix order to an Optree.""" while len(nodes) > 1: nodes = _reduce(nodes) if len(nodes) == 0: raise OperatorError("Empty node list") node = nodes[0] if isinstance(node, OperatorNode): raise OperatorError("Operator without operands") if isinstance(node, OptreeNode): return node return OptreeNode(None, (node, ))
python
{ "resource": "" }
q278091
_reduce
test
def _reduce(nodes): """Finds the first operator in the list, converts it and its operands to a OptreeNode, then returns a new list with the operator and operands replaced by the new OptreeNode. """ i = 0 while i < len(nodes): if isinstance(nodes[i], OperatorNode): break else: i += 1 if i == len(nodes): raise OperatorError("No operator found") operator_node = nodes[i] operator = operator_node.operator operands_lbound = i - operator.cardinality if operands_lbound < 0: raise OperatorError("Insufficient operands for operator {0}".format(operator.symbol)) return nodes[:operands_lbound] + \ [OptreeNode(operator_node, tuple(nodes[operands_lbound:i]))] + \ nodes[i+1:]
python
{ "resource": "" }
q278092
MetricModify.add_arguments
test
def add_arguments(self): """ Add the specific arguments of this CLI """ MetricCommon.add_arguments(self) self.parser.add_argument('-n', '--metric-name', dest='metricName', action='store', required=True, metavar='metric_name', help='Metric identifier') self.parser.add_argument('-d', '--display-name', dest='displayName', action='store', required=True, metavar='display_name', help='Metric display name') self.parser.add_argument('-s', '--display-name-short', dest='displayNameShort', action='store', required=True, metavar='display_short_name', help='Metric short display name') self.parser.add_argument('-i', '--description', dest='description', action='store', required=not self.update, metavar='description', help='Metric description') self.parser.add_argument('-g', '--aggregate', dest='aggregate', action='store', required=True, choices=['avg', 'max', 'min', 'sum'], help='Metric default aggregate') self.parser.add_argument('-u', '--unit', dest='unit', action='store', required=False, choices=['percent', 'number', 'bytecount', 'duration'], help='Metric unit') self.parser.add_argument('-r', '--resolution', dest='resolution', action='store', metavar='resolution', required=False, help='Metric default resolution') self.parser.add_argument('-y', '--type', dest='type', action='store', default=None, required=False, metavar='type', help='Sets the type metadata field') self.parser.add_argument('-x', '--is-disabled', dest='isDisabled', action='store', default=None, required=False, choices=['true', 'false'], help='Enable or disable the metric definition')
python
{ "resource": "" }
q278093
MetricMarkdown.load
test
def load(self): """ Read the file and parse JSON into dictionary """ manifest = PluginManifest(self.file_path) manifest.get() self.manifest = manifest.get_manifest()
python
{ "resource": "" }
q278094
MetricMarkdown.getMetricDefinition
test
def getMetricDefinition(self, name): """ Looks up the metric definition from the definitions from the API call """ metric = None for m in self.metric_definitions: if m['name'] == name: metric = m break return metric
python
{ "resource": "" }
q278095
MetricMarkdown.getFieldsColumnLengths
test
def getFieldsColumnLengths(self): """ Gets the maximum length of each column in the field table """ nameLen = 0 descLen = 0 for f in self.fields: nameLen = max(nameLen, len(f['title'])) descLen = max(descLen, len(f['description'])) return (nameLen, descLen)
python
{ "resource": "" }
q278096
MetricMarkdown.getMetricsColumnLengths
test
def getMetricsColumnLengths(self): """ Gets the maximum length of each column """ displayLen = 0 descLen = 0 for m in self.metrics: displayLen = max(displayLen, len(m['displayName'])) descLen = max(descLen, len(m['description'])) return (displayLen, descLen)
python
{ "resource": "" }
q278097
MetricMarkdown.escapeUnderscores
test
def escapeUnderscores(self): """ Escape underscores so that the markdown is correct """ new_metrics = [] for m in self.metrics: m['name'] = m['name'].replace("_", "\_") new_metrics.append(m) self.metrics = new_metrics
python
{ "resource": "" }
q278098
MetricMarkdown.outputFieldMarkdown
test
def outputFieldMarkdown(self): """ Sends the field definitions ot standard out """ f, d = self.getFieldsColumnLengths() fc, dc = self.printFieldsHeader(f, d) f = max(fc, f) d = max(dc, d) self.printFields(f, d)
python
{ "resource": "" }
q278099
MetricMarkdown.outputMetricMarkdown
test
def outputMetricMarkdown(self): """ Sends the markdown of the metric definitions to standard out """ self.escapeUnderscores() m, d = self.getMetricsColumnLengths() self.printMetricsHeader(m, d) self.printMetrics(m, d)
python
{ "resource": "" }