_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q278100
MetricMarkdown.generateMarkdown
test
def generateMarkdown(self): """ Look up each of the metrics and then output in Markdown """ self.generateMetricDefinitions()
python
{ "resource": "" }
q278101
ParserBase.parse
test
def parse(self, text): """Attempt to parse source code.""" self.original_text = text try: return getattr(self,
python
{ "resource": "" }
q278102
ParserBase._attempting
test
def _attempting(self, text): """Keeps track of the furthest point in the source code the parser has reached to this point."""
python
{ "resource": "" }
q278103
MeasurementGet.add_arguments
test
def add_arguments(self): """ Add specific command line arguments for this command """ # Call our parent to add the default arguments ApiCli.add_arguments(self) # Command specific arguments self.parser.add_argument('-f', '--format', dest='format', action='store', required=False, choices=['csv', 'json', 'raw', 'xml'], help='Output format. Default is raw') self.parser.add_argument('-n', '--name', dest='metric_name', action='store', required=True, metavar="metric_name", help='Metric identifier') self.parser.add_argument('-g', '--aggregate', dest='aggregate', action='store', required=False, choices=['sum', 'avg', 'max', 'min'], help='Metric default aggregate') self.parser.add_argument('-r', '--sample', dest='sample', action='store', type=int, metavar="sample", help='Down sample rate sample in seconds') self.parser.add_argument('-s', '--source', dest='source', action='store', metavar="source", required=True, help='Source of measurement') self.parser.add_argument('-b', '--start', dest='start', action='store', required=True, metavar="start",
python
{ "resource": "" }
q278104
MeasurementGet.parse_time_date
test
def parse_time_date(self, s): """ Attempt to parse the passed in string into a valid datetime. If we get a parse error then assume the string is an epoch time
python
{ "resource": "" }
q278105
MeasurementGet.output_csv
test
def output_csv(self, text): """ Output results in CSV format """ payload = json.loads(text) # Print CSV header print("{0},{1},{2},{3},{4}".format('timestamp', 'metric', 'aggregate', 'source', 'value')) metric_name = self._metric_name # Loop through the aggregates one row per timestamp, and 1 or more source/value pairs for r in payload['result']['aggregates']['key']:
python
{ "resource": "" }
q278106
MeasurementGet.output_json
test
def output_json(self, text): """ Output results in structured JSON format """ payload = json.loads(text) data = [] metric_name = self._metric_name for r in payload['result']['aggregates']['key']: timestamp = self._format_timestamp(r[0][0]) for s in r[1]: data.append({ "timestamp": timestamp,
python
{ "resource": "" }
q278107
MeasurementGet.output_raw
test
def output_raw(self, text): """ Output results in raw JSON format """ payload = json.loads(text)
python
{ "resource": "" }
q278108
MeasurementGet.output_xml
test
def output_xml(self, text): """ Output results in JSON format """ # Create the main document nodes document = Element('results') comment = Comment('Generated by TrueSight Pulse measurement-get CLI') document.append(comment) aggregates = SubElement(document, 'aggregates') aggregate = SubElement(aggregates, 'aggregate') measurements = SubElement(aggregate, 'measurements') # Parse the JSON result so we can translate to XML payload = json.loads(text) # Current only support a single metric, if we move to the batch API then # we can handle multiple metric_name = self._metric_name # Loop through the aggregates one row per timestamp, and 1 or more source/value pairs for r in payload['result']['aggregates']['key']: timestamp = self._format_timestamp(r[0][0]) for s in r[1]: # Each timestamp, metric, source, values is placed in a measure tag measure_node = SubElement(measurements, 'measure') source = s[0] value = str(s[1])
python
{ "resource": "" }
q278109
trimmed_pred_default
test
def trimmed_pred_default(node, parent): """The default predicate used in Node.trimmed.""" return isinstance(node,
python
{ "resource": "" }
q278110
pprint
test
def pprint(root, depth=0, space_unit=" ", *, source_len=0, file=None): """Pretting print a parse tree.""" spacing = space_unit * depth if isinstance(root, str): print("{0}terminal@(?): {1}".format(spacing, root), file=file) else: if root.position is None: position = -1 elif root.position < 0: position = source_len + root.position else: position = root.position if root.is_value:
python
{ "resource": "" }
q278111
repetition
test
def repetition(extractor, bounds, *, ignore_whitespace=False): """Returns a partial of _get_repetition that accepts only a text argument.""" return
python
{ "resource": "" }
q278112
_get_terminal
test
def _get_terminal(value, text): """Checks the beginning of text for a value. If it is found, a terminal ParseNode is returned filled out appropriately for the value it found. DeadEnd is raised if
python
{ "resource": "" }
q278113
_get_repetition
test
def _get_repetition(extractor, text, *, bounds=(0, None), ignore_whitespace=False): """Tries to pull text with extractor repeatedly. Bounds is a 2-tuple of (lbound, ubound) where lbound is a number and ubound is a number or None. If the ubound is None, this method will execute extractor on text until extrator raises DeadEnd. Otherwise, extractor will be called until it raises DeadEnd, or it has extracted ubound times. If the number of children extracted is >= lbound, then a ParseNode with type repetition is returned. Otherwise, DeadEnd is raised. Bounds are interpreted as (lbound, ubound] This method is used to implement: - option (0, 1) - zero_or_more (0, None) - one_or_more (1, None) - exact_repeat (n, n) """ minr, maxr = bounds children = [] while maxr is None or
python
{ "resource": "" }
q278114
_get_exclusion
test
def _get_exclusion(extractor, exclusion, text): """Returns extractor's result if exclusion does not match. If exclusion raises DeadEnd (meaning it did not match) then the result of extractor(text) is returned. Otherwise, if
python
{ "resource": "" }
q278115
_count_leading_whitespace
test
def _count_leading_whitespace(text): """Returns the number of characters at the beginning of text
python
{ "resource": "" }
q278116
_call_extractor
test
def _call_extractor(extractor, text): """This method calls an extractor on some text. If extractor is just a string, it is passed as the first value to _get_terminal. Otherwise it is treated as a callable and text is passed directly to it.
python
{ "resource": "" }
q278117
ParseNode.position
test
def position(self): """Gets the position of the text the ParseNode processed. If the ParseNode does not have its own position, it looks to its first child for its position. 'Value Nodes' (terminals) must have their own position, otherwise this method will throw an exception when it tries to get the position property of the string child. """
python
{ "resource": "" }
q278118
ParseNode.is_empty
test
def is_empty(self): """Returns True if this node has no children, or if all of its children are ParseNode instances and are empty. """
python
{ "resource": "" }
q278119
ParseNode.add_ignored
test
def add_ignored(self, ignored): """Add ignored text to the node. This will add the length of the ignored text to the node's
python
{ "resource": "" }
q278120
ParseNode.is_type
test
def is_type(self, value): """Returns True if node_type == value. If value is a tuple, node_type is checked against each member and True is returned if any of them match. """ if isinstance(value, tuple): for opt in value:
python
{ "resource": "" }
q278121
ParseNode.flattened
test
def flattened(self, pred=flattened_pred_default): """Flattens nodes by hoisting children up to ancestor nodes. A node is hoisted if pred(node) returns True. """ if self.is_value: return self new_children = [] for child in self.children: if child.is_empty: continue new_child = child.flattened(pred) if pred(new_child, self):
python
{ "resource": "" }
q278122
ParseNode.trimmed
test
def trimmed(self, pred=trimmed_pred_default): """Trim a ParseTree. A node is trimmed if pred(node) returns True. """ new_children = [] for child in self.children: if isinstance(child, ParseNode): new_child = child.trimmed(pred) else: new_child = child if not pred(new_child, self):
python
{ "resource": "" }
q278123
ParseNode.merged
test
def merged(self, other): """Returns a new ParseNode whose type is this node's type, and whose children are all the children from this node and the other whose length is not 0. """ children = [c for c in itertools.chain(self.children, other.children) if len(c) > 0] # NOTE: Only terminals should have ignored text attached to them, and terminals shouldn't be # merged (probably) so it shouldn't be necessary to copy of ignored -- it should always
python
{ "resource": "" }
q278124
ParseNode.retyped
test
def retyped(self, new_type): """Returns a new node with the same contents as self, but with a new node_type.""" return ParseNode(new_type, children=list(self.children),
python
{ "resource": "" }
q278125
ParseNode.compressed
test
def compressed(self, new_type=None, *, include_ignored=False): """Turns the node into a value node, whose single string child is the concatenation of all its children. """ values = [] consumed = 0 ignored = None for i, child in enumerate(self.children): consumed += child.consumed if i == 0 and not include_ignored: ignored = child.ignored if child.is_value: if include_ignored: values.append("{0}{1}".format(child.ignored or "", child.value)) else: values.append(child.value) else:
python
{ "resource": "" }
q278126
Cursor.position
test
def position(self) -> Position: """The current position of the cursor."""
python
{ "resource": "" }
q278127
Cursor.max_readed_position
test
def max_readed_position(self) -> Position: """The index of the deepest character readed."""
python
{ "resource": "" }
q278128
Cursor.step_next_char
test
def step_next_char(self): """Puts the cursor on the next character.""" self._index += 1 self._col_offset += 1 if self._index > self._maxindex:
python
{ "resource": "" }
q278129
Cursor.step_next_line
test
def step_next_line(self): """Sets cursor as beginning of next line."""
python
{ "resource": "" }
q278130
Cursor.step_prev_line
test
def step_prev_line(self): """Sets cursor as end of previous line.""" #TODO(bps): raise explicit error
python
{ "resource": "" }
q278131
Stream.last_readed_line
test
def last_readed_line(self) -> str: """Usefull string to compute error message.""" mpos = self._cursor.max_readed_position mindex = mpos.index # search last \n prevline = mindex - 1 if mindex == self.eos_index else mindex
python
{ "resource": "" }
q278132
Stream.incpos
test
def incpos(self, length: int=1) -> int: """Increment the cursor to the next character.""" if length < 0: raise ValueError("length must be positive") i =
python
{ "resource": "" }
q278133
Stream.save_context
test
def save_context(self) -> bool: """Save current position."""
python
{ "resource": "" }
q278134
Stream.restore_context
test
def restore_context(self) -> bool: """Rollback to previous saved position."""
python
{ "resource": "" }
q278135
to_fmt
test
def to_fmt(self, with_from=False) -> fmt.indentable: """ Return a Fmt representation of Translator for pretty-printing """ txt = fmt.sep("\n", [ fmt.sep( " ", [ self._type_source, "to",
python
{ "resource": "" }
q278136
Scope.set_name
test
def set_name(self, name: str): """ You could set the name after construction """ self.name = name
python
{ "resource": "" }
q278137
Scope.count_vars
test
def count_vars(self) -> int: """ Count var define by this scope """ n = 0 for s in self._hsig.values():
python
{ "resource": "" }
q278138
Scope.count_funs
test
def count_funs(self) -> int: """ Count function define by this scope """ n = 0 for s in self._hsig.values():
python
{ "resource": "" }
q278139
Scope.__update_count
test
def __update_count(self): """ Update internal counters """ self._ntypes = self.count_types()
python
{ "resource": "" }
q278140
Scope.update
test
def update(self, sig: list or Scope) -> Scope: """ Update the Set with values of another Set """ values = sig if hasattr(sig, 'values'): values = sig.values() for s in values: if self.is_namespace:
python
{ "resource": "" }
q278141
Scope.union
test
def union(self, sig: Scope) -> Scope: """ Create a new Set produce by the union
python
{ "resource": "" }
q278142
Scope.intersection_update
test
def intersection_update(self, oset: Scope) -> Scope: """ Update Set with common values of another Set """ keys = list(self._hsig.keys()) for k in keys: if k not in oset:
python
{ "resource": "" }
q278143
Scope.intersection
test
def intersection(self, sig: Scope) -> Scope: """ Create a new Set produce by the intersection
python
{ "resource": "" }
q278144
Scope.difference_update
test
def difference_update(self, oset: Scope) -> Scope: """ Remove values common with another Set """ keys = list(self._hsig.keys())
python
{ "resource": "" }
q278145
Scope.difference
test
def difference(self, sig: Scope) -> Scope: """ Create a new Set produce by a Set subtracted
python
{ "resource": "" }
q278146
Scope.symmetric_difference_update
test
def symmetric_difference_update(self, oset: Scope) -> Scope: """ Remove common values and Update specific values from another Set """ skey = set() keys = list(self._hsig.keys()) for k in keys: if k in oset:
python
{ "resource": "" }
q278147
Scope.symmetric_difference
test
def symmetric_difference(self, sig: Scope) -> Scope: """ Create a new Set with values present
python
{ "resource": "" }
q278148
Scope.add
test
def add(self, it: Signature) -> bool: """ Add it to the Set """ if isinstance(it, Scope): it.state = StateScope.EMBEDDED txt = it.internal_name() it.set_parent(self)
python
{ "resource": "" }
q278149
Scope.remove
test
def remove(self, it: Signature) -> bool: """ Remove it but raise KeyError if not found """ txt = it.internal_name() if txt not in self._hsig: raise KeyError(it.show_name() + ' not in Set') sig = self._hsig[txt]
python
{ "resource": "" }
q278150
Scope.discard
test
def discard(self, it: Signature) -> bool: """ Remove it only if present """ txt = it.internal_name() if txt in self._hsig: sig = self._hsig[txt] if isinstance(sig, Scope):
python
{ "resource": "" }
q278151
Scope.values
test
def values(self) -> [Signature]: """ Retrieve all values """ if self.state == StateScope.EMBEDDED and self.parent is not None: return
python
{ "resource": "" }
q278152
Scope.first
test
def first(self) -> Signature: """ Retrieve the first Signature ordered
python
{ "resource": "" }
q278153
Scope.last
test
def last(self) -> Signature: """ Retrieve the last Signature ordered by mangling descendant """
python
{ "resource": "" }
q278154
Scope.get
test
def get(self, key: str, default=None) -> Signature: """ Get a signature instance by its internal_name """ item = default
python
{ "resource": "" }
q278155
Scope.get_by_symbol_name
test
def get_by_symbol_name(self, name: str) -> Scope: """ Retrieve a Set of all signature by symbol name """ lst = [] for s in self.values(): if s.name == name: # create an EvalCtx only when necessary lst.append(EvalCtx.from_sig(s)) # include parent # TODO: see all case of local redefinition for # global overloads # possible algos... take all with different internal_name if len(lst) == 0:
python
{ "resource": "" }
q278156
Scope.getsig_by_symbol_name
test
def getsig_by_symbol_name(self, name: str) -> Signature: """ Retrieve the unique Signature of a symbol. Fail if the Signature is not unique """ subscope = self.get_by_symbol_name(name) if len(subscope) != 1:
python
{ "resource": "" }
q278157
Scope.get_all_polymorphic_return
test
def get_all_polymorphic_return(self) -> bool: """ For now, polymorphic return type are handle by symbol artefact. --> possible multi-polymorphic but with different constraint attached! """ lst = [] for s in self.values(): if hasattr(s, 'tret') and s.tret.is_polymorphic: # encapsulate s
python
{ "resource": "" }
q278158
Scope.callInjector
test
def callInjector(self, old: Node, trans: Translator) -> Node: """ If don't have injector call from parent """ if self.astTranslatorInjector is None: if self.parent is not None: # TODO: think if we forward for all StateScope
python
{ "resource": "" }
q278159
normalize
test
def normalize(ast: Node) -> Node: """ Normalize an AST nodes. all builtins containers are replace by referencable subclasses """ res = ast typemap = {DictNode, ListNode, TupleNode} if type(ast) is dict: res = DictNode(ast) elif type(ast) is list: res = ListNode(ast) elif type(ast) is tuple: res = TupleNode(ast) # in-depth change if hasattr(res, 'items'): for k, v in res.items(): res[k] = normalize(v) elif hasattr(res, '__getitem__'): for
python
{ "resource": "" }
q278160
Node.set
test
def set(self, othernode): """allow to completly mutate the node into any subclasses of Node""" self.__class__ = othernode.__class__ self.clean() if len(othernode) > 0: for k, v
python
{ "resource": "" }
q278161
ListNodeItem.rvalues
test
def rvalues(self): """ in reversed order """ tmp = self
python
{ "resource": "" }
q278162
_hit_ok
test
def _hit_ok(hit, min_hit_charge, max_hit_charge): ''' Check if given hit is withing the limits. ''' # Omit hits with charge < min_hit_charge if hit['charge'] < min_hit_charge: return False
python
{ "resource": "" }
q278163
EvalCtx.get_compute_sig
test
def get_compute_sig(self) -> Signature: """ Compute a signature Using resolution!!! TODO: discuss of relevance of a final generation for a signature """ tret = [] tparams = [] for t in self.tret.components: if t in self.resolution and self.resolution[t] is not None: tret.append(self.resolution[t]().show_name()) else: tret.append(t) if hasattr(self, 'tparams'): for p in self.tparams: tp = [] for t in p.components: if t in self.resolution and self.resolution[t] is not None: tp.append(self.resolution[t]().show_name())
python
{ "resource": "" }
q278164
EvalCtx.resolve
test
def resolve(self): """ Process the signature and find definition for type. """ # collect types for resolution t2resolv = [] if hasattr(self._sig, 'tret'): t2resolv.append(self._sig.tret) if hasattr(self._sig, 'tparams') and self._sig.tparams is not None: for p in self._sig.tparams: t2resolv.append(p) if self._translate_to is not None: t2resolv.append(self._translate_to.target) if self._variadic_types is not None: for t in self._variadic_types: t2resolv.append(t) for t in t2resolv: for c in t.components: if c not in self.resolution or self.resolution[c] is None: # try to find what is c parent = self.get_parent() if parent is not None:
python
{ "resource": "" }
q278165
EvalCtx.get_resolved_names
test
def get_resolved_names(self, type_name: TypeName) -> list: """ Use self.resolution to subsitute type_name. Allow to instanciate polymorphic type ?1, ?toto """ if not isinstance(type_name, TypeName): raise Exception("Take a TypeName as parameter not a %s" % type(type_name)) rnames = [] for name in type_name.components: if name not in self.resolution:
python
{ "resource": "" }
q278166
EvalCtx.set_resolved_name
test
def set_resolved_name(self, ref: dict, type_name2solve: TypeName, type_name_ref: TypeName): """ Warning!!! Need to rethink it when global poly type
python
{ "resource": "" }
q278167
S3Saver._delete_local
test
def _delete_local(self, filename): """Deletes the specified file from the local filesystem."""
python
{ "resource": "" }
q278168
S3Saver._delete_s3
test
def _delete_s3(self, filename, bucket_name): """Deletes the specified file from the given S3 bucket.""" conn = S3Connection(self.access_key_id, self.access_key_secret) bucket = conn.get_bucket(bucket_name) if type(filename).__name__ == 'Key': filename = '/' + filename.name
python
{ "resource": "" }
q278169
S3Saver.delete
test
def delete(self, filename, storage_type=None, bucket_name=None): """Deletes the specified file, either locally or from S3, depending on the file's storage type.""" if not (storage_type and bucket_name): self._delete_local(filename) else: if storage_type
python
{ "resource": "" }
q278170
S3Saver._save_local
test
def _save_local(self, temp_file, filename, obj): """Saves the specified file to the local file system.""" path = self._get_path(filename) if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path), self.permission | 0o111) fd = open(path, 'wb') # Thanks to: # http://stackoverflow.com/a/3253276/2066849 temp_file.seek(0) t = temp_file.read(1048576) while t:
python
{ "resource": "" }
q278171
S3Saver._save_s3
test
def _save_s3(self, temp_file, filename, obj): """Saves the specified file to the configured S3 bucket.""" conn = S3Connection(self.access_key_id, self.access_key_secret) bucket = conn.get_bucket(self.bucket_name)
python
{ "resource": "" }
q278172
S3Saver.save
test
def save(self, temp_file, filename, obj): """Saves the specified file to either S3 or the local filesystem, depending on the currently enabled storage type.""" if not (self.storage_type and self.bucket_name): ret = self._save_local(temp_file, filename, obj) else: if self.storage_type != 's3': raise ValueError('Storage type "%s" is invalid, the only supported storage type (apart from default local storage) is s3.' % self.storage_type) ret = self._save_s3(temp_file, filename, obj) if self.field_name: setattr(obj, self.field_name, ret) if self.storage_type == 's3':
python
{ "resource": "" }
q278173
S3Saver._find_by_path_s3
test
def _find_by_path_s3(self, path, bucket_name): """Finds files by licking an S3 bucket's contents by prefix.""" conn = S3Connection(self.access_key_id, self.access_key_secret)
python
{ "resource": "" }
q278174
enum
test
def enum(*sequential, **named): """ Build an enum statement """ #: build enums from parameter enums = dict(zip(sequential, range(len(sequential))), **named) enums['map'] = copy.copy(enums) #: build reverse mapping
python
{ "resource": "" }
q278175
checktypes
test
def checktypes(func): """Decorator to verify arguments and return types.""" sig = inspect.signature(func) types = {} for param in sig.parameters.values(): # Iterate through function's parameters and build the list of # arguments types param_type = param.annotation if param_type is param.empty or not inspect.isclass(param_type): # Missing annotation or not a type, skip it continue types[param.name] = param_type # If the argument has a type specified, let's check that its # default value (if present) conforms with the type. if (param.default is not param.empty and not isinstance(param.default, param_type)): raise ValueError( "{func}: wrong type of a default value for {arg!r}".format( func=func.__qualname__, arg=param.name) ) def check_type(sig, arg_name, arg_type, arg_value): # Internal function that encapsulates arguments type checking if not isinstance(arg_value, arg_type): raise ValueError("{func}: wrong type of {arg!r} argument, " "{exp!r} expected, got {got!r}". format(func=func.__qualname__, arg=arg_name, exp=arg_type.__name__, got=type(arg_value).__name__)) @functools.wraps(func) def wrapper(*args, **kwargs): # Let's bind the arguments ba = sig.bind(*args, **kwargs) for arg_name, arg in ba.arguments.items(): # And iterate through the bound arguments try: type_ = types[arg_name] except KeyError: continue else: # OK, we have a type for the argument, lets get the # corresponding parameter description from the signature object param = sig.parameters[arg_name] if param.kind == param.VAR_POSITIONAL:
python
{ "resource": "" }
q278176
set_one
test
def set_one(chainmap, thing_name, callobject): """ Add a mapping with key thing_name for callobject in chainmap with namespace handling. """ namespaces = reversed(thing_name.split(".")) lstname =
python
{ "resource": "" }
q278177
add_method
test
def add_method(cls): """Attach a method to a class.""" def wrapper(f): #if hasattr(cls, f.__name__): # raise AttributeError("{} already has a '{}' attribute".format(
python
{ "resource": "" }
q278178
hook
test
def hook(cls, hookname=None, erase=False): """Attach a method to a parsing class and register it as a parser hook. The method is registered with its name unless hookname is provided. """ if not hasattr(cls, '_hooks'): raise TypeError( "%s didn't seems to be a BasicParser subsclasse" % cls.__name__) class_hook_list = cls._hooks class_rule_list = cls._rules def wrapper(f): nonlocal hookname add_method(cls)(f) if hookname is None: hookname = f.__name__ if not erase
python
{ "resource": "" }
q278179
rule
test
def rule(cls, rulename=None, erase=False): """Attach a method to a parsing class and register it as a parser rule. The method is registered with its name unless rulename is provided. """ if not hasattr(cls, '_rules'): raise TypeError( "%s didn't seems to be a BasicParser subsclasse" % cls.__name__) class_hook_list = cls._hooks class_rule_list = cls._rules def wrapper(f): nonlocal rulename add_method(cls)(f) if rulename is None: rulename = f.__name__ if not erase
python
{ "resource": "" }
q278180
directive
test
def directive(directname=None): """Attach a class to a parsing class and register it as a parser directive. The class is registered with its name unless directname is provided. """ global _directives class_dir_list = _directives def wrapper(f): nonlocal directname
python
{ "resource": "" }
q278181
decorator
test
def decorator(directname=None): """ Attach a class to a parsing decorator and register it to the global decorator list. The class is registered with its name unless directname is provided """ global _decorators class_deco_list = _decorators def wrapper(f): nonlocal
python
{ "resource": "" }
q278182
bind
test
def bind(self, dst: str, src: Node) -> bool: """Allow to alias a node to another name. Useful to bind a node to _ as return of Rule:: R = [ __scope__:L [item:I #add_item(L, I]* #bind('_', L) ] It's also the default behaviour of ':>' """
python
{ "resource": "" }
q278183
read_eol
test
def read_eol(self) -> bool: """Return True if the parser can consume an EOL byte sequence.""" if self.read_eof(): return False self._stream.save_context() self.read_char('\r')
python
{ "resource": "" }
q278184
BasicParser.push_rule_nodes
test
def push_rule_nodes(self) -> bool: """Push context variable to store rule nodes.""" if self.rule_nodes is None: self.rule_nodes = collections.ChainMap() self.tag_cache = collections.ChainMap() self.id_cache = collections.ChainMap() else:
python
{ "resource": "" }
q278185
BasicParser.pop_rule_nodes
test
def pop_rule_nodes(self) -> bool: """Pop context variable that store rule nodes""" self.rule_nodes = self.rule_nodes.parents
python
{ "resource": "" }
q278186
BasicParser.value
test
def value(self, n: Node) -> str: """Return the text value of the node""" id_n = id(n) idcache = self.id_cache if id_n not in idcache: return "" name = idcache[id_n] tag_cache = self.tag_cache if name not in tag_cache: raise Exception("Incoherent tag cache")
python
{ "resource": "" }
q278187
BasicParser.parsed_stream
test
def parsed_stream(self, content: str, name: str=None): """Push a new Stream into the parser. All subsequent called functions will parse this new stream, until the
python
{ "resource": "" }
q278188
BasicParser.begin_tag
test
def begin_tag(self, name: str) -> Node: """Save the current index under the given name.""" # Check if we could attach tag cache to current rule_nodes scope
python
{ "resource": "" }
q278189
BasicParser.end_tag
test
def end_tag(self, name: str) -> Node: """Extract the string between saved and current index."""
python
{ "resource": "" }
q278190
BasicParser.set_rules
test
def set_rules(cls, rules: dict) -> bool: """ Merge internal rules set with the given rules """ cls._rules = cls._rules.new_child() for rule_name, rule_pt in rules.items(): if '.' not in rule_name:
python
{ "resource": "" }
q278191
BasicParser.set_hooks
test
def set_hooks(cls, hooks: dict) -> bool: """ Merge internal hooks set with the given hooks """ cls._hooks = cls._hooks.new_child() for hook_name, hook_pt in hooks.items(): if '.' not in hook_name:
python
{ "resource": "" }
q278192
BasicParser.set_directives
test
def set_directives(cls, directives: dict) -> bool: """ Merge internal directives set with the given directives. For working directives, attach it only in the dsl.Parser class """ meta._directives = meta._directives.new_child() for dir_name, dir_pt in
python
{ "resource": "" }
q278193
BasicParser.eval_rule
test
def eval_rule(self, name: str) -> Node: """Evaluate a rule by name.""" # context created by caller n = Node() id_n = id(n) self.rule_nodes['_'] = n self.id_cache[id_n] = '_' # TODO: other behavior for empty rules? if name not in self.__class__._rules: self.diagnostic.notify( error.Severity.ERROR, "Unknown rule : %s" % name, error.LocationInfo.from_stream(self._stream, is_error=True) )
python
{ "resource": "" }
q278194
BasicParser.eval_hook
test
def eval_hook(self, name: str, ctx: list) -> Node: """Evaluate the hook by its name""" if name not in self.__class__._hooks: # TODO: don't always throw error, could have return True by default self.diagnostic.notify( error.Severity.ERROR, "Unknown hook : %s" % name, error.LocationInfo.from_stream(self._stream, is_error=True) ) raise self.diagnostic
python
{ "resource": "" }
q278195
BasicParser.peek_text
test
def peek_text(self, text: str) -> bool: """Same as readText but doesn't consume the stream.""" start = self._stream.index stop = start + len(text)
python
{ "resource": "" }
q278196
BasicParser.one_char
test
def one_char(self) -> bool: """Read one byte in stream""" if self.read_eof():
python
{ "resource": "" }
q278197
BasicParser.read_char
test
def read_char(self, c: str) -> bool: """ Consume the c head byte, increment current index and return True else return False. It use peekchar and it's the same as '' in BNF. """ if self.read_eof(): return False
python
{ "resource": "" }
q278198
BasicParser.read_until_eof
test
def read_until_eof(self) -> bool: """Consume all the stream. Same as EOF in BNF.""" if self.read_eof(): return True # TODO: read ALL self._stream.save_context() while not
python
{ "resource": "" }
q278199
BasicParser.ignore_blanks
test
def ignore_blanks(self) -> bool: """Consume whitespace characters.""" self._stream.save_context() if not self.read_eof() and self._stream.peek_char in " \t\v\f\r\n": while (not self.read_eof()
python
{ "resource": "" }