Code
stringlengths
103
85.9k
Summary
sequencelengths
0
94
Please provide a description of the function:def ParseConfig(self, command, function=None, unique=1): if function is None: def parse_conf(env, cmd, unique=unique): return env.MergeFlags(cmd, unique) function = parse_conf if SCons.Util.is_List(command): command = ' '.join(command) command = self.subst(command) return function(self, self.backtick(command))
[ "\n Use the specified function to parse the output of the command\n in order to modify the current environment. The 'command' can\n be a string or a list of strings representing a command and\n its arguments. 'Function' is an optional argument that takes\n the environment, the output of the command, and the unique flag.\n If no function is specified, MergeFlags, which treats the output\n as the result of a typical 'X-config' command (i.e. gtk-config),\n will merge the output into the appropriate variables.\n " ]
Please provide a description of the function:def ParseDepends(self, filename, must_exist=None, only_one=0): filename = self.subst(filename) try: fp = open(filename, 'r') except IOError: if must_exist: raise return lines = SCons.Util.LogicalLines(fp).readlines() lines = [l for l in lines if l[0] != '#'] tdlist = [] for line in lines: try: target, depends = line.split(':', 1) except (AttributeError, ValueError): # Throws AttributeError if line isn't a string. Can throw # ValueError if line doesn't split into two or more elements. pass else: tdlist.append((target.split(), depends.split())) if only_one: targets = [] for td in tdlist: targets.extend(td[0]) if len(targets) > 1: raise SCons.Errors.UserError( "More than one dependency target found in `%s': %s" % (filename, targets)) for target, depends in tdlist: self.Depends(target, depends)
[ "\n Parse a mkdep-style file for explicit dependencies. This is\n completely abusable, and should be unnecessary in the \"normal\"\n case of proper SCons configuration, but it may help make\n the transition from a Make hierarchy easier for some people\n to swallow. It can also be genuinely useful when using a tool\n that can write a .d file, but for which writing a scanner would\n be too complicated.\n " ]
Please provide a description of the function:def Prepend(self, **kw): kw = copy_non_reserved_keywords(kw) for key, val in kw.items(): # It would be easier on the eyes to write this using # "continue" statements whenever we finish processing an item, # but Python 1.5.2 apparently doesn't let you use "continue" # within try:-except: blocks, so we have to nest our code. try: orig = self._dict[key] except KeyError: # No existing variable in the environment, so just set # it to the new value. self._dict[key] = val else: try: # Check if the original looks like a dictionary. # If it is, we can't just try adding the value because # dictionaries don't have __add__() methods, and # things like UserList will incorrectly coerce the # original dict to a list (which we don't want). update_dict = orig.update except AttributeError: try: # Most straightforward: just try to add them # together. This will work in most cases, when the # original and new values are of compatible types. self._dict[key] = val + orig except (KeyError, TypeError): try: # Check if the added value is a list. add_to_val = val.append except AttributeError: # The added value isn't a list, but the # original is (by process of elimination), # so insert the the new value in the original # (if there's one to insert). if val: orig.insert(0, val) else: # The added value is a list, so append # the original to it (if there's a value # to append). if orig: add_to_val(orig) self._dict[key] = val else: # The original looks like a dictionary, so update it # based on what we think the value looks like. if SCons.Util.is_List(val): for v in val: orig[v] = None else: try: update_dict(val) except (AttributeError, TypeError, ValueError): if SCons.Util.is_Dict(val): for k, v in val.items(): orig[k] = v else: orig[val] = None self.scanner_map_delete(kw)
[ "Prepend values to existing construction variables\n in an Environment.\n " ]
Please provide a description of the function:def PrependENVPath(self, name, newpath, envname = 'ENV', sep = os.pathsep, delete_existing=1): orig = '' if envname in self._dict and name in self._dict[envname]: orig = self._dict[envname][name] nv = SCons.Util.PrependPath(orig, newpath, sep, delete_existing, canonicalize=self._canonicalize) if envname not in self._dict: self._dict[envname] = {} self._dict[envname][name] = nv
[ "Prepend path elements to the path 'name' in the 'ENV'\n dictionary for this environment. Will only add any particular\n path once, and will normpath and normcase all paths to help\n assure this. This can also handle the case where the env\n variable is a list instead of a string.\n\n If delete_existing is 0, a newpath which is already in the path\n will not be moved to the front (it will be left where it is).\n " ]
Please provide a description of the function:def PrependUnique(self, delete_existing=0, **kw): kw = copy_non_reserved_keywords(kw) for key, val in kw.items(): if SCons.Util.is_List(val): val = _delete_duplicates(val, not delete_existing) if key not in self._dict or self._dict[key] in ('', None): self._dict[key] = val elif SCons.Util.is_Dict(self._dict[key]) and \ SCons.Util.is_Dict(val): self._dict[key].update(val) elif SCons.Util.is_List(val): dk = self._dict[key] if not SCons.Util.is_List(dk): dk = [dk] if delete_existing: dk = [x for x in dk if x not in val] else: val = [x for x in val if x not in dk] self._dict[key] = val + dk else: dk = self._dict[key] if SCons.Util.is_List(dk): # By elimination, val is not a list. Since dk is a # list, wrap val in a list first. if delete_existing: dk = [x for x in dk if x not in val] self._dict[key] = [val] + dk else: if not val in dk: self._dict[key] = [val] + dk else: if delete_existing: dk = [x for x in dk if x not in val] self._dict[key] = val + dk self.scanner_map_delete(kw)
[ "Prepend values to existing construction variables\n in an Environment, if they're not already there.\n If delete_existing is 1, removes existing values first, so\n values move to front.\n " ]
Please provide a description of the function:def Replace(self, **kw): try: kwbd = kw['BUILDERS'] except KeyError: pass else: kwbd = BuilderDict(kwbd,self) del kw['BUILDERS'] self.__setitem__('BUILDERS', kwbd) kw = copy_non_reserved_keywords(kw) self._update(semi_deepcopy(kw)) self.scanner_map_delete(kw)
[ "Replace existing construction variables in an Environment\n with new construction variables and/or values.\n " ]
Please provide a description of the function:def ReplaceIxes(self, path, old_prefix, old_suffix, new_prefix, new_suffix): old_prefix = self.subst('$'+old_prefix) old_suffix = self.subst('$'+old_suffix) new_prefix = self.subst('$'+new_prefix) new_suffix = self.subst('$'+new_suffix) dir,name = os.path.split(str(path)) if name[:len(old_prefix)] == old_prefix: name = name[len(old_prefix):] if name[-len(old_suffix):] == old_suffix: name = name[:-len(old_suffix)] return os.path.join(dir, new_prefix+name+new_suffix)
[ "\n Replace old_prefix with new_prefix and old_suffix with new_suffix.\n\n env - Environment used to interpolate variables.\n path - the path that will be modified.\n old_prefix - construction variable for the old prefix.\n old_suffix - construction variable for the old suffix.\n new_prefix - construction variable for the new prefix.\n new_suffix - construction variable for the new suffix.\n " ]
Please provide a description of the function:def WhereIs(self, prog, path=None, pathext=None, reject=[]): if path is None: try: path = self['ENV']['PATH'] except KeyError: pass elif SCons.Util.is_String(path): path = self.subst(path) if pathext is None: try: pathext = self['ENV']['PATHEXT'] except KeyError: pass elif SCons.Util.is_String(pathext): pathext = self.subst(pathext) prog = SCons.Util.CLVar(self.subst(prog)) # support "program --with-args" path = SCons.Util.WhereIs(prog[0], path, pathext, reject) if path: return path return None
[ "Find prog in the path.\n " ]
Please provide a description of the function:def Command(self, target, source, action, **kw): bkw = { 'action' : action, 'target_factory' : self.fs.Entry, 'source_factory' : self.fs.Entry, } try: bkw['source_scanner'] = kw['source_scanner'] except KeyError: pass else: del kw['source_scanner'] bld = SCons.Builder.Builder(**bkw) return bld(self, target, source, **kw)
[ "Builds the supplied target files from the supplied\n source files using the supplied action. Action may\n be any type that the Builder constructor will accept\n for an action." ]
Please provide a description of the function:def Depends(self, target, dependency): tlist = self.arg2nodes(target, self.fs.Entry) dlist = self.arg2nodes(dependency, self.fs.Entry) for t in tlist: t.add_dependency(dlist) return tlist
[ "Explicity specify that 'target's depend on 'dependency'." ]
Please provide a description of the function:def NoClean(self, *targets): tlist = [] for t in targets: tlist.extend(self.arg2nodes(t, self.fs.Entry)) for t in tlist: t.set_noclean() return tlist
[ "Tags a target so that it will not be cleaned by -c" ]
Please provide a description of the function:def NoCache(self, *targets): tlist = [] for t in targets: tlist.extend(self.arg2nodes(t, self.fs.Entry)) for t in tlist: t.set_nocache() return tlist
[ "Tags a target so that it will not be cached" ]
Please provide a description of the function:def Execute(self, action, *args, **kw): action = self.Action(action, *args, **kw) result = action([], [], self) if isinstance(result, SCons.Errors.BuildError): errstr = result.errstr if result.filename: errstr = result.filename + ': ' + errstr sys.stderr.write("scons: *** %s\n" % errstr) return result.status else: return result
[ "Directly execute an action through an Environment\n " ]
Please provide a description of the function:def Ignore(self, target, dependency): tlist = self.arg2nodes(target, self.fs.Entry) dlist = self.arg2nodes(dependency, self.fs.Entry) for t in tlist: t.add_ignore(dlist) return tlist
[ "Ignore a dependency." ]
Please provide a description of the function:def Requires(self, target, prerequisite): tlist = self.arg2nodes(target, self.fs.Entry) plist = self.arg2nodes(prerequisite, self.fs.Entry) for t in tlist: t.add_prerequisite(plist) return tlist
[ "Specify that 'prerequisite' must be built before 'target',\n (but 'target' does not actually depend on 'prerequisite'\n and need not be rebuilt if it changes)." ]
Please provide a description of the function:def SideEffect(self, side_effect, target): side_effects = self.arg2nodes(side_effect, self.fs.Entry) targets = self.arg2nodes(target, self.fs.Entry) for side_effect in side_effects: if side_effect.multiple_side_effect_has_builder(): raise SCons.Errors.UserError("Multiple ways to build the same target were specified for: %s" % str(side_effect)) side_effect.add_source(targets) side_effect.side_effect = 1 self.Precious(side_effect) for target in targets: target.side_effects.append(side_effect) return side_effects
[ "Tell scons that side_effects are built as side\n effects of building targets." ]
Please provide a description of the function:def SourceCode(self, entry, builder): msg = SCons.Warnings.warn(SCons.Warnings.DeprecatedSourceCodeWarning, msg) entries = self.arg2nodes(entry, self.fs.Entry) for entry in entries: entry.set_src_builder(builder) return entries
[ "Arrange for a source code builder for (part of) a tree.", "SourceCode() has been deprecated and there is no replacement.\n\\tIf you need this function, please contact [email protected]" ]
Please provide a description of the function:def Split(self, arg): if SCons.Util.is_List(arg): return list(map(self.subst, arg)) elif SCons.Util.is_String(arg): return self.subst(arg).split() else: return [self.subst(arg)]
[ "This function converts a string or list into a list of strings\n or Nodes. This makes things easier for users by allowing files to\n be specified as a white-space separated list to be split.\n\n The input rules are:\n - A single string containing names separated by spaces. These will be\n split apart at the spaces.\n - A single Node instance\n - A list containing either strings or Node instances. Any strings\n in the list are not split at spaces.\n\n In all cases, the function returns a list of Nodes and strings." ]
Please provide a description of the function:def FindSourceFiles(self, node='.'): node = self.arg2nodes(node, self.fs.Entry)[0] sources = [] def build_source(ss): for s in ss: if isinstance(s, SCons.Node.FS.Dir): build_source(s.all_children()) elif s.has_builder(): build_source(s.sources) elif isinstance(s.disambiguate(), SCons.Node.FS.File): sources.append(s) build_source(node.all_children()) def final_source(node): while (node != node.srcnode()): node = node.srcnode() return node sources = list(map( final_source, sources )); # remove duplicates return list(set(sources))
[ " returns a list of all source files.\n " ]
Please provide a description of the function:def FindInstalledFiles(self): from SCons.Tool import install if install._UNIQUE_INSTALLED_FILES is None: install._UNIQUE_INSTALLED_FILES = SCons.Util.uniquer_hashables(install._INSTALLED_FILES) return install._UNIQUE_INSTALLED_FILES
[ " returns the list of all targets of the Install and InstallAs Builder.\n " ]
Please provide a description of the function:def get(self, key, default=None): try: return self.__dict__['overrides'][key] except KeyError: return self.__dict__['__subject'].get(key, default)
[ "Emulates the get() method of dictionaries." ]
Please provide a description of the function:def Dictionary(self): d = self.__dict__['__subject'].Dictionary().copy() d.update(self.__dict__['overrides']) return d
[ "Emulates the items() method of dictionaries." ]
Please provide a description of the function:def FromBinary(cls, record_data, record_count=1): _cmd, address, _resp_length, payload = cls._parse_rpc_info(record_data) try: value, encoded_stream = struct.unpack("<LH", payload) stream = DataStream.FromEncoded(encoded_stream) except ValueError: raise ArgumentError("Could not parse set_constant payload", payload=payload) return SetConstantRecord(stream, value, address=address)
[ "Create an UpdateRecord subclass from binary record data.\n\n This should be called with a binary record blob (NOT including the\n record type header) and it will decode it into a SetConstantRecord.\n\n Args:\n record_data (bytearray): The raw record data that we wish to parse\n into an UpdateRecord subclass NOT including its 8 byte record header.\n record_count (int): The number of records included in record_data.\n\n Raises:\n ArgumentError: If the record_data is malformed and cannot be parsed.\n\n Returns:\n SetConstantRecord: The decoded reflash tile record.\n " ]
Please provide a description of the function:def generate(env): global PDFLaTeXAction if PDFLaTeXAction is None: PDFLaTeXAction = SCons.Action.Action('$PDFLATEXCOM', '$PDFLATEXCOMSTR') global PDFLaTeXAuxAction if PDFLaTeXAuxAction is None: PDFLaTeXAuxAction = SCons.Action.Action(PDFLaTeXAuxFunction, strfunction=SCons.Tool.tex.TeXLaTeXStrFunction) env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes) from . import pdf pdf.generate(env) bld = env['BUILDERS']['PDF'] bld.add_action('.ltx', PDFLaTeXAuxAction) bld.add_action('.latex', PDFLaTeXAuxAction) bld.add_emitter('.ltx', SCons.Tool.tex.tex_pdf_emitter) bld.add_emitter('.latex', SCons.Tool.tex.tex_pdf_emitter) SCons.Tool.tex.generate_common(env)
[ "Add Builders and construction variables for pdflatex to an Environment." ]
Please provide a description of the function:def scons_copytree(src, dst, symlinks=False): names = os.listdir(src) # [email protected] fix: check for dir before making dirs. if not os.path.exists(dst): os.makedirs(dst) errors = [] for name in names: srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if symlinks and os.path.islink(srcname): linkto = os.readlink(srcname) os.symlink(linkto, dstname) elif os.path.isdir(srcname): scons_copytree(srcname, dstname, symlinks) else: shutil.copy2(srcname, dstname) # XXX What about devices, sockets etc.? except (IOError, os.error) as why: errors.append((srcname, dstname, str(why))) # catch the CopytreeError from the recursive copytree so that we can # continue with other files except CopytreeError as err: errors.extend(err.args[0]) try: shutil.copystat(src, dst) except SCons.Util.WinError: # can't copy file access times on Windows pass except OSError as why: errors.extend((src, dst, str(why))) if errors: raise CopytreeError(errors)
[ "Recursively copy a directory tree using copy2().\n\n The destination directory must not already exist.\n If exception(s) occur, an CopytreeError is raised with a list of reasons.\n\n If the optional symlinks flag is true, symbolic links in the\n source tree result in symbolic links in the destination tree; if\n it is false, the contents of the files pointed to by symbolic\n links are copied.\n\n XXX Consider this example code rather than the ultimate tool.\n\n " ]
Please provide a description of the function:def copyFunc(dest, source, env): if os.path.isdir(source): if os.path.exists(dest): if not os.path.isdir(dest): raise SCons.Errors.UserError("cannot overwrite non-directory `%s' with a directory `%s'" % (str(dest), str(source))) else: parent = os.path.split(dest)[0] if not os.path.exists(parent): os.makedirs(parent) scons_copytree(source, dest) else: shutil.copy2(source, dest) st = os.stat(source) os.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) return 0
[ "Install a source file or directory into a destination by copying,\n (including copying permission/mode bits)." ]
Please provide a description of the function:def copyFuncVersionedLib(dest, source, env): if os.path.isdir(source): raise SCons.Errors.UserError("cannot install directory `%s' as a version library" % str(source) ) else: # remove the link if it is already there try: os.remove(dest) except: pass shutil.copy2(source, dest) st = os.stat(source) os.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) installShlibLinks(dest, source, env) return 0
[ "Install a versioned library into a destination by copying,\n (including copying permission/mode bits) and then creating\n required symlinks." ]
Please provide a description of the function:def installShlibLinks(dest, source, env): Verbose = False symlinks = listShlibLinksToInstall(dest, source, env) if Verbose: print('installShlibLinks: symlinks={:r}'.format(SCons.Tool.StringizeLibSymlinks(symlinks))) if symlinks: SCons.Tool.CreateLibSymlinks(env, symlinks) return
[ "If we are installing a versioned shared library create the required links." ]
Please provide a description of the function:def installFunc(target, source, env): try: install = env['INSTALL'] except KeyError: raise SCons.Errors.UserError('Missing INSTALL construction variable.') assert len(target)==len(source), \ "Installing source %s into target %s: target and source lists must have same length."%(list(map(str, source)), list(map(str, target))) for t,s in zip(target,source): if install(t.get_path(),s.get_path(),env): return 1 return 0
[ "Install a source file into a target using the function specified\n as the INSTALL construction variable." ]
Please provide a description of the function:def installFuncVersionedLib(target, source, env): try: install = env['INSTALLVERSIONEDLIB'] except KeyError: raise SCons.Errors.UserError('Missing INSTALLVERSIONEDLIB construction variable.') assert len(target)==len(source), \ "Installing source %s into target %s: target and source lists must have same length."%(list(map(str, source)), list(map(str, target))) for t,s in zip(target,source): if hasattr(t.attributes, 'shlibname'): tpath = os.path.join(t.get_dir(), t.attributes.shlibname) else: tpath = t.get_path() if install(tpath,s.get_path(),env): return 1 return 0
[ "Install a versioned library into a target using the function specified\n as the INSTALLVERSIONEDLIB construction variable." ]
Please provide a description of the function:def add_targets_to_INSTALLED_FILES(target, source, env): global _INSTALLED_FILES, _UNIQUE_INSTALLED_FILES _INSTALLED_FILES.extend(target) _UNIQUE_INSTALLED_FILES = None return (target, source)
[ " An emitter that adds all target files to the list stored in the\n _INSTALLED_FILES global variable. This way all installed files of one\n scons call will be collected.\n " ]
Please provide a description of the function:def add_versioned_targets_to_INSTALLED_FILES(target, source, env): global _INSTALLED_FILES, _UNIQUE_INSTALLED_FILES Verbose = False _INSTALLED_FILES.extend(target) if Verbose: print("add_versioned_targets_to_INSTALLED_FILES: target={:r}".format(list(map(str, target)))) symlinks = listShlibLinksToInstall(target[0], source, env) if symlinks: SCons.Tool.EmitLibSymlinks(env, symlinks, target[0]) _UNIQUE_INSTALLED_FILES = None return (target, source)
[ " An emitter that adds all target files to the list stored in the\n _INSTALLED_FILES global variable. This way all installed files of one\n scons call will be collected.\n " ]
Please provide a description of the function:def encode_contents(self): if self.variable_size: resp_length = 1 else: resp_length = self.fixed_response_size << 1 header = struct.pack("<HBB", self.rpc_id, self.address, resp_length) return bytearray(header) + self.payload
[ "Encode the contents of this update record without including a record header.\n\n Returns:\n bytearray: The encoded contents.\n " ]
Please provide a description of the function:def FromBinary(cls, record_data, record_count=1): cmd, address, resp_length, payload = cls._parse_rpc_info(record_data) # The first bit is 1 if we do not have a fixed length # The next 7 bits encode the fixed length if we do have a fixed length fixed_length = resp_length >> 1 if resp_length & 0b1: fixed_length = None return cls(address, cmd, payload, fixed_length)
[ "Create an UpdateRecord subclass from binary record data.\n\n This should be called with a binary record blob (NOT including the\n record type header) and it will decode it into a SendRPCRecord.\n\n Args:\n record_data (bytearray): The raw record data that we wish to parse\n into an UpdateRecord subclass NOT including its 8 byte record header.\n record_count (int): The number of records included in record_data.\n\n Raises:\n ArgumentError: If the record_data is malformed and cannot be parsed.\n\n Returns:\n SendRPCRecord: The decoded reflash tile record.\n " ]
Please provide a description of the function:def parse_multiple_rpcs(cls, record_data): rpcs = [] while len(record_data) > 0: total_length, record_type = struct.unpack_from("<LB3x", record_data) if record_type != SendErrorCheckingRPCRecord.RecordType: raise ArgumentError("Record set contains a record that is not an error checking RPC", record_type=record_type) record_contents = record_data[8: total_length] parsed_rpc = cls._parse_rpc_info(record_contents) rpcs.append(parsed_rpc) record_data = record_data[total_length:] return rpcs
[ "Parse record_data into multiple error checking rpcs." ]
Please provide a description of the function:def execute(self, sensor_graph, scope_stack): if not isinstance(scope_stack[-1], RootScope): raise SensorGraphSemanticError("You may only declare metadata at global scope in a sensorgraph.", identifier=self.identifier, value=self.value) sensor_graph.add_metadata(self.identifier, self.value)
[ "Execute this statement on the sensor_graph given the current scope tree.\n\n This function will likely modify the sensor_graph and will possibly\n also add to or remove from the scope_tree. If there are children nodes\n they will be called after execute_before and before execute_after,\n allowing block statements to sandwich their children in setup and teardown\n functions.\n\n Args:\n sensor_graph (SensorGraph): The sensor graph that we are building or\n modifying\n scope_stack (list(Scope)): A stack of nested scopes that may influence\n how this statement allocates clocks or other stream resources.\n " ]
Please provide a description of the function:def generate(env): SCons.Tool.cc.generate(env) env['CC'] = env.Detect(compilers) or 'clang' if env['PLATFORM'] in ['cygwin', 'win32']: env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS') else: env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC') # determine compiler version if env['CC']: #pipe = SCons.Action._subproc(env, [env['CC'], '-dumpversion'], pipe = SCons.Action._subproc(env, [env['CC'], '--version'], stdin='devnull', stderr='devnull', stdout=subprocess.PIPE) if pipe.wait() != 0: return # clang -dumpversion is of no use line = pipe.stdout.readline() if sys.version_info[0] > 2: line = line.decode() match = re.search(r'clang +version +([0-9]+(?:\.[0-9]+)+)', line) if match: env['CCVERSION'] = match.group(1)
[ "Add Builders and construction variables for clang to an Environment." ]
Please provide a description of the function:def run(self): if self._generator: try: gen = self._routine(*self._worker_args, **self._worker_kwargs) while True: if self._stop_condition.is_set(): return self._running.set() next(gen) for _i in range(0, self._wait_count): if self._stop_condition.is_set(): return time.sleep(self._wait) except StopIteration: pass except Exception: print("Exception occurred in background worker thread") traceback.print_exc() else: try: while True: if self._stop_condition.is_set(): break self._running.set() self._routine(*self._worker_args, **self._worker_kwargs) # Wait for the desired interval, checking if we should exit for _i in range(0, self._wait_count): if self._stop_condition.is_set(): return time.sleep(self._wait) except Exception: print("Exception occurred in background worker thread") traceback.print_exc()
[ "The target routine called to start thread activity.\n\n If the thread is created with a generator function, this iterates\n the generator and checks for a stop condition between each iteration.\n\n If the thread is created with a normal function, that function is called\n in a loop with the stop condition checked between each invocation.\n " ]
Please provide a description of the function:def wait_running(self, timeout=None): flag = self._running.wait(timeout) if flag is False: raise TimeoutExpiredError("Timeout waiting for thread to start running")
[ "Wait for the thread to pass control to its routine.\n\n Args:\n timeout (float): The maximum amount of time to wait\n " ]
Please provide a description of the function:def create_event(self, register=False): event = asyncio.Event(loop=self._loop) if register: self._events.add(event) return event
[ "Create an asyncio.Event inside the emulation loop.\n\n This method exists as a convenience to create an Event object that is\n associated with the correct EventLoop(). If you pass register=True,\n then the event will be registered as an event that must be set for the\n EmulationLoop to be considered idle. This means that whenever\n wait_idle() is called, it will block until this event is set.\n\n Examples of when you may want this behavior is when the event is\n signaling whether a tile has completed restarting itself. The reset()\n rpc cannot block until the tile has initialized since it may need to\n send its own rpcs as part of the initialization process. However, we\n want to retain the behavior that once the reset() rpc returns the tile\n has been completely reset.\n\n The cleanest way of achieving this is to have the tile set its\n self.initialized Event when it has finished rebooting and register\n that event so that wait_idle() nicely blocks until the reset process\n is complete.\n\n Args:\n register (bool): Whether to register the event so that wait_idle\n blocks until it is set.\n\n Returns:\n asyncio.Event: The Event object.\n " ]
Please provide a description of the function:def create_queue(self, register=False): queue = asyncio.Queue(loop=self._loop) if register: self._work_queues.add(queue) return queue
[ "Create a new work queue and optionally register it.\n\n This will make sure the queue is attached to the correct event loop.\n You can optionally choose to automatically register it so that\n wait_idle() will block until the queue is empty.\n\n Args:\n register (bool): Whether to call register_workqueue() automatically.\n\n Returns:\n asyncio.Queue: The newly created queue.\n " ]
Please provide a description of the function:def finish_async_rpc(self, address, rpc_id, *response): self.verify_calling_thread(True, "All asynchronous rpcs must be finished from within the emulation loop") if len(response) == 0: response_bytes = b'' elif len(response) == 1: response_bytes = response[0] if not isinstance(response_bytes, (bytes, bytearray)): raise ArgumentError("When passing a binary response to finish_async_rpc, you must " "pass a bytes or bytearray object", response=response_bytes) else: resp_format = response[0] resp_args = response[1:] if not isinstance(resp_format, str): raise ArgumentError("When passing a formatted response to finish_async_rpc, you must " "pass a str object with the format code as the first parameter after " "the rpc id.", resp_format=resp_format, additional_args=resp_args) response_bytes = pack_rpc_payload(resp_format, resp_args) self._rpc_queue.finish_async_rpc(address, rpc_id, response_bytes)
[ "Finish a previous asynchronous RPC.\n\n This method should be called by a peripheral tile that previously\n had an RPC called on it and chose to response asynchronously by\n raising ``AsynchronousRPCResponse`` in the RPC handler itself.\n\n The response passed to this function will be returned to the caller\n as if the RPC had returned it immediately.\n\n This method must only ever be called from a coroutine inside the\n emulation loop that is handling background work on behalf of a tile.\n\n Args:\n address (int): The tile address the RPC was called on.\n rpc_id (int): The ID of the RPC that was called.\n *response: The response that should be returned to the caller.\n\n This can either be a single bytes or bytearray object or a\n str object containing the format code followed by the required\n number of python objects that will then be packed using\n pack_rpc_payload(format, args).\n\n If you pass no additional response arguments then an\n empty response will be given.\n " ]
Please provide a description of the function:def start(self): if self._started is True: raise ArgumentError("EmulationLoop.start() called multiple times") self._thread = threading.Thread(target=self._loop_thread_main) self._thread.start() self._started = True
[ "Start the background emulation loop." ]
Please provide a description of the function:def stop(self): if self._started is False: raise ArgumentError("EmulationLoop.stop() called without calling start()") self.verify_calling_thread(False, "Cannot call EmulationLoop.stop() from inside the event loop") if self._thread.is_alive(): self._loop.call_soon_threadsafe(self._loop.create_task, self._clean_shutdown()) self._thread.join()
[ "Stop the background emulation loop." ]
Please provide a description of the function:def wait_idle(self, timeout=1.0): async def _awaiter(): background_work = {x.join() for x in self._work_queues} for event in self._events: if not event.is_set(): background_work.add(event.wait()) _done, pending = await asyncio.wait(background_work, timeout=timeout) if len(pending) > 0: raise TimeoutExpiredError("Timeout waiting for event loop to become idle", pending=pending) if self._on_emulation_thread(): return asyncio.wait_for(_awaiter(), timeout=timeout) self.run_task_external(_awaiter()) return None
[ "Wait until the rpc queue is empty.\n\n This method may be called either from within the event loop or from\n outside of it. If it is called outside of the event loop it will\n block the calling thread until the rpc queue is temporarily empty.\n\n If it is called from within the event loop it will return an awaitable\n object that can be used to wait for the same condition.\n\n The awaitable object will already have a timeout if the timeout\n parameter is passed.\n\n Args:\n timeout (float): The maximum number of seconds to wait.\n " ]
Please provide a description of the function:def run_task_external(self, coroutine): self.verify_calling_thread(False, 'run_task_external must not be called from the emulation thread') future = asyncio.run_coroutine_threadsafe(coroutine, self._loop) return future.result()
[ "Inject a task into the emulation loop and wait for it to finish.\n\n The coroutine parameter is run as a Task inside the EmulationLoop\n until it completes and the return value (or any raised Exception) is\n pased back into the caller's thread.\n\n Args:\n coroutine (coroutine): The task to inject into the event loop.\n\n Returns:\n object: Whatever the coroutine returned.\n " ]
Please provide a description of the function:def call_rpc_external(self, address, rpc_id, arg_payload, timeout=10.0): self.verify_calling_thread(False, "call_rpc_external is for use **outside** of the event loop") response = CrossThreadResponse() self._loop.call_soon_threadsafe(self._rpc_queue.put_rpc, address, rpc_id, arg_payload, response) try: return response.wait(timeout) except RPCRuntimeError as err: return err.binary_error
[ "Call an RPC from outside of the event loop and block until it finishes.\n\n This is the main method by which a caller outside of the EmulationLoop\n can inject an RPC into the EmulationLoop and wait for it to complete.\n This method is synchronous so it blocks until the RPC completes or the\n timeout expires.\n\n Args:\n address (int): The address of the mock tile this RPC is for\n rpc_id (int): The number of the RPC\n payload (bytes): A byte string of payload parameters up to 20 bytes\n timeout (float): The maximum time to wait for the RPC to finish.\n\n Returns:\n bytes: The response payload from the RPC\n " ]
Please provide a description of the function:async def await_rpc(self, address, rpc_id, *args, **kwargs): self.verify_calling_thread(True, "await_rpc must be called from **inside** the event loop") if isinstance(rpc_id, RPCDeclaration): arg_format = rpc_id.arg_format resp_format = rpc_id.resp_format rpc_id = rpc_id.rpc_id else: arg_format = kwargs.get('arg_format', None) resp_format = kwargs.get('resp_format', None) arg_payload = b'' if arg_format is not None: arg_payload = pack_rpc_payload(arg_format, args) self._logger.debug("Sending rpc to %d:%04X, payload=%s", address, rpc_id, args) response = AwaitableResponse() self._rpc_queue.put_rpc(address, rpc_id, arg_payload, response) try: resp_payload = await response.wait(1.0) except RPCRuntimeError as err: resp_payload = err.binary_error if resp_format is None: return [] resp = unpack_rpc_payload(resp_format, resp_payload) return resp
[ "Send an RPC from inside the EmulationLoop.\n\n This is the primary method by which tasks running inside the\n EmulationLoop dispatch RPCs. The RPC is added to the queue of waiting\n RPCs to be drained by the RPC dispatch task and this coroutine will\n block until it finishes.\n\n **This method must only be called from inside the EmulationLoop**\n\n Args:\n address (int): The address of the tile that has the RPC.\n rpc_id (int): The 16-bit id of the rpc we want to call\n *args: Any required arguments for the RPC as python objects.\n **kwargs: Only two keyword arguments are supported:\n - arg_format: A format specifier for the argument list\n - result_format: A format specifier for the result\n\n Returns:\n list: A list of the decoded response members from the RPC.\n " ]
Please provide a description of the function:def verify_calling_thread(self, should_be_emulation, message=None): if should_be_emulation == self._on_emulation_thread(): return if message is None: message = "Operation performed on invalid thread" raise InternalError(message)
[ "Verify if the calling thread is or is not the emulation thread.\n\n This method can be called to make sure that an action is being taken\n in the appropriate context such as not blocking the event loop thread\n or modifying an emulate state outside of the event loop thread.\n\n If the verification fails an InternalError exception is raised,\n allowing this method to be used to protect other methods from being\n called in a context that could deadlock or cause race conditions.\n\n Args:\n should_be_emulation (bool): True if this call should be taking place\n on the emulation, thread, False if it must not take place on\n the emulation thread.\n message (str): Optional message to include when raising the exception.\n Otherwise a generic message is used.\n\n Raises:\n InternalError: When called from the wrong thread.\n " ]
Please provide a description of the function:def add_task(self, tile_address, coroutine): self._loop.call_soon_threadsafe(self._add_task, tile_address, coroutine)
[ "Add a task into the event loop.\n\n This is the main entry point for registering background tasks that are\n associated with a tile. The tasks are added to the EmulationLoop and\n the tile they are a part of is recorded. When the tile is reset, all\n of its background tasks are canceled as part of the reset process.\n\n If you have a task that should not be associated with any tile, you\n may pass `None` for tile_address and the task will not be cancelled\n when any tile is reset.\n\n Args:\n tile_address (int): The address of the tile running\n the task.\n coroutine (coroutine): A coroutine that will be added\n to the event loop.\n " ]
Please provide a description of the function:async def stop_tasks(self, address): tasks = self._tasks.get(address, []) for task in tasks: task.cancel() asyncio.gather(*tasks, return_exceptions=True) self._tasks[address] = []
[ "Clear all tasks pertaining to a tile.\n\n This coroutine will synchronously cancel all running tasks that were\n attached to the given tile and wait for them to stop before returning.\n\n Args:\n address (int): The address of the tile we should stop.\n " ]
Please provide a description of the function:async def _clean_shutdown(self): # Cleanly stop any other outstanding tasks not associated with tiles remaining_tasks = [] for task in self._tasks.get(None, []): self._logger.debug("Cancelling task at shutdown %s", task) task.cancel() remaining_tasks.append(task) asyncio.gather(*remaining_tasks, return_exceptions=True) if len(remaining_tasks) > 0: del self._tasks[None] # Shutdown tasks associated with each tile remaining_tasks = [] for address in sorted(self._tasks, reverse=True): if address is None: continue self._logger.debug("Shutting down tasks for tile at %d", address) for task in self._tasks.get(address, []): task.cancel() remaining_tasks.append(task) asyncio.gather(*remaining_tasks, return_exceptions=True) await self._rpc_queue.stop() self._loop.stop()
[ "Cleanly shutdown the emulation loop." ]
Please provide a description of the function:def _add_task(self, tile_address, coroutine): self.verify_calling_thread(True, "_add_task is not thread safe") if tile_address not in self._tasks: self._tasks[tile_address] = [] task = self._loop.create_task(coroutine) self._tasks[tile_address].append(task)
[ "Add a task from within the event loop.\n\n All tasks are associated with a tile so that they can be cleanly\n stopped when that tile is reset.\n " ]
Please provide a description of the function:def key_rule(self, regex, verifier): if regex is not None: regex = re.compile(regex) self._additional_key_rules.append((regex, verifier))
[ "Add a rule with a pattern that should apply to all keys.\n\n Any key not explicitly listed in an add_required or add_optional rule\n must match ONE OF the rules given in a call to key_rule().\n So these rules are all OR'ed together.\n\n In this case you should pass a raw string specifying a regex that is\n used to determine if the rule is used to check a given key.\n\n\n Args:\n regex (str): The regular expression used to match the rule or None\n if this should apply to all\n verifier (Verifier): The verification rule\n " ]
Please provide a description of the function:def verify(self, obj): out_obj = {} if not isinstance(obj, dict): raise ValidationError("Invalid dictionary", reason="object is not a dictionary") if self._fixed_length is not None and len(obj) != self._fixed_length: raise ValidationError("Dictionary did not have the correct length", expected_length=self._fixed_length, actual_length=self._fixed_length) unmatched_keys = set(obj.keys()) required_keys = set(self._required_keys.keys()) # First check and make sure that all required keys are included and verify them for key in required_keys: if key not in unmatched_keys: raise ValidationError("Required key not found in dictionary", reason="required key %s not found" % key, key=key) out_obj[key] = self._required_keys[key].verify(obj[key]) unmatched_keys.remove(key) # Now check and see if any of the keys in the dictionary are optional and check them to_remove = set() for key in unmatched_keys: if key not in self._optional_keys: continue out_obj[key] = self._optional_keys[key].verify(obj[key]) to_remove.add(key) unmatched_keys -= to_remove # If there are additional keys, they need to match at least one of the additional key rules if len(unmatched_keys) > 0: if len(self._additional_key_rules) == 0: raise ValidationError("Extra key found in dictionary that does not allow extra keys", reason="extra keys found that were not expected", keys=unmatched_keys) to_remove = set() for key in unmatched_keys: for key_match, rule in self._additional_key_rules: if key_match is None or key_match.matches(key): out_obj[key] = rule.verify(obj[key]) to_remove.add(key) break unmatched_keys -= to_remove if len(unmatched_keys) > 0: raise ValidationError("Extra key found in dictionary that did not match any extra key rule", reason="extra keys found that did not match any rule", keys=unmatched_keys) return out_obj
[ "Verify that the object conforms to this verifier's schema\n\n Args:\n obj (object): A python object to verify\n\n Raises:\n ValidationError: If there is a problem verifying the dictionary, a\n ValidationError is thrown with at least the reason key set indicating\n the reason for the lack of validation.\n " ]
Please provide a description of the function:def stream(self, report, callback=None): conn_id = self._find_connection(self.conn_string) if isinstance(report, BroadcastReport): self.adapter.notify_event_nowait(self.conn_string, 'broadcast', report) elif conn_id is not None: self.adapter.notify_event_nowait(self.conn_string, 'report', report) if callback is not None: callback(isinstance(report, BroadcastReport) or (conn_id is not None))
[ "Queue data for streaming\n\n Args:\n report (IOTileReport): A report object to stream to a client\n callback (callable): An optional callback that will be called with\n a bool value of True when this report actually gets streamed.\n If the client disconnects and the report is dropped instead,\n callback will be called with False\n " ]
Please provide a description of the function:def trace(self, data, callback=None): conn_id = self._find_connection(self.conn_string) if conn_id is not None: self.adapter.notify_event_nowait(self.conn_string, 'trace', data) if callback is not None: callback(conn_id is not None)
[ "Queue data for tracing\n\n Args:\n data (bytearray, string): Unstructured data to trace to any\n connected client.\n callback (callable): An optional callback that will be called with\n a bool value of True when this data actually gets traced.\n If the client disconnects and the data is dropped instead,\n callback will be called with False.\n " ]
Please provide a description of the function:def _load_device(self, name, config): if config is None: config_dict = {} elif isinstance(config, dict): config_dict = config elif config[0] == '#': # Allow passing base64 encoded json directly in the port string to ease testing. import base64 config_str = str(base64.b64decode(config[1:]), 'utf-8') config_dict = json.loads(config_str) else: try: with open(config, "r") as conf: data = json.load(conf) except IOError as exc: raise ArgumentError("Could not open config file", error=str(exc), path=config) if 'device' not in data: raise ArgumentError("Invalid configuration file passed to VirtualDeviceAdapter", device_name=name, config_path=config, missing_key='device') config_dict = data['device'] reg = ComponentRegistry() if name.endswith('.py'): _name, device_factory = reg.load_extension(name, class_filter=VirtualIOTileDevice, unique=True) return device_factory(config_dict) seen_names = [] for device_name, device_factory in reg.load_extensions('iotile.virtual_device', class_filter=VirtualIOTileDevice, product_name="virtual_device"): if device_name == name: return device_factory(config_dict) seen_names.append(device_name) raise ArgumentError("Could not find virtual_device by name", name=name, known_names=seen_names)
[ "Load a device either from a script or from an installed module" ]
Please provide a description of the function:async def connect(self, conn_id, connection_string): id_number = int(connection_string) if id_number not in self.devices: raise DeviceAdapterError(conn_id, 'connect', 'device not found') if self._get_conn_id(connection_string) is not None: raise DeviceAdapterError(conn_id, 'connect', 'device already connected') dev = self.devices[id_number] if dev.connected: raise DeviceAdapterError(conn_id, 'connect', 'device already connected') dev.connected = True self._setup_connection(conn_id, connection_string) self._track_property(conn_id, 'device', dev)
[ "Asynchronously connect to a device\n\n Args:\n conn_id (int): A unique identifer that will refer to this connection\n connection_string (string): A DeviceAdapter specific string that can be used to connect to\n a device using this DeviceAdapter.\n callback (callable): A function that will be called when the connection attempt finishes as\n callback(conection_id, adapter_id, success: bool, failure_reason: string or None)\n " ]
Please provide a description of the function:async def disconnect(self, conn_id): self._ensure_connection(conn_id, True) dev = self._get_property(conn_id, 'device') dev.connected = False self._teardown_connection(conn_id)
[ "Asynchronously disconnect from a connected device\n\n Args:\n conn_id (int): A unique identifier that will refer to this connection\n callback (callback): A callback that will be called as\n callback(conn_id, adapter_id, success, failure_reason)\n " ]
Please provide a description of the function:async def send_rpc(self, conn_id, address, rpc_id, payload, timeout): self._ensure_connection(conn_id, True) dev = self._get_property(conn_id, 'device') try: res = dev.call_rpc(address, rpc_id, bytes(payload)) if inspect.iscoroutine(res): return await res else: return res except (RPCInvalidIDError, RPCNotFoundError, TileNotFoundError, RPCErrorCode, BusyRPCResponse): raise except Exception: self._logger.exception("Exception inside rpc %d:0x%04X, payload=%s", address, rpc_id, payload) raise
[ "Asynchronously send an RPC to this IOTile device\n\n Args:\n conn_id (int): A unique identifier that will refer to this connection\n address (int): the address of the tile that we wish to send the RPC to\n rpc_id (int): the 16-bit id of the RPC we want to call\n payload (bytearray): the payload of the command\n timeout (float): the number of seconds to wait for the RPC to execute\n " ]
Please provide a description of the function:async def send_script(self, conn_id, data): self._ensure_connection(conn_id, True) dev = self._get_property(conn_id, 'device') conn_string = self._get_property(conn_id, 'connection_string') # Simulate some progress callbacks (0, 50%, 100%) await self.notify_progress(conn_string, 'script', 0, len(data)) await self.notify_progress(conn_string, 'script', len(data) // 2, len(data)) await self.notify_progress(conn_string, 'script', len(data), len(data)) dev.script = data
[ "Asynchronously send a a script to this IOTile device\n\n Args:\n conn_id (int): A unique identifier that will refer to this connection\n data (bytes or bytearray): the script to send to the device\n " ]
Please provide a description of the function:async def debug(self, conn_id, name, cmd_args): self._ensure_connection(conn_id, True) dev = self._get_property(conn_id, 'device') conn_string = self._get_property(conn_id, 'connection_string') if name != 'inspect_property': raise DeviceAdapterError(conn_id, 'debug', 'operation {} not supported'.format(name)) properties = cmd_args.get('properties', []) result = {} for i, prop in enumerate(properties): if not hasattr(dev, prop): raise DeviceAdapterError(conn_id, 'debug', 'property {} did not exist'.format(prop)) value = getattr(dev, prop) result[prop] = value await self.notify_progress(conn_string, 'debug', i + 1, len(properties)) return result
[ "Send a debug command to a device.\n\n This method responds to a single command 'inspect_property' that takes\n the name of a propery on the device and returns its value. The\n ``cmd_args`` dict should have a single key: 'properties' that is a\n list of strings with the property names that should be returned.\n\n Those properties are all queried and their result returned.\n\n The result is a dict that maps property name to value. There is a\n progress event generated for every property whose purpose is primarily\n to allow for testing the progress system of a device server.\n\n See :meth:`AbstractDeviceAdapter.debug`.\n " ]
Please provide a description of the function:async def _send_scan_event(self, device): conn_string = str(device.iotile_id) info = { 'connection_string': conn_string, 'uuid': device.iotile_id, 'signal_strength': 100, 'validity_period': self.ExpirationTime } await self.notify_event(conn_string, 'device_seen', info)
[ "Send a scan event from a device." ]
Please provide a description of the function:def rpc_name(rpc_id): name = _RPC_NAME_MAP.get(rpc_id) if name is None: name = 'RPC 0x%04X' % rpc_id return name
[ "Map an RPC id to a string name.\n\n This function looks the RPC up in a map of all globally declared RPCs,\n and returns a nice name string. if the RPC is not found in the global\n name map, returns a generic name string such as 'rpc 0x%04X'.\n\n Args:\n rpc_id (int): The id of the RPC that we wish to look up.\n\n Returns:\n str: The nice name of the RPC.\n " ]
Please provide a description of the function:def stream_name(stream_id): name = _STREAM_NAME_MAP.get(stream_id) if name is None: name = str(DataStream.FromEncoded(stream_id)) return "{} (0x{:04X})".format(name, stream_id)
[ "Map a stream id to a human readable name.\n\n The mapping process is as follows:\n\n If the stream id is globally known, its global name is used as <name>\n otherwise a string representation of the stream is used as <name>.\n\n In both cases the hex representation of the stream id is appended as a\n number:\n\n <name> (0x<stream id in hex>)\n\n Args:\n stream_id (int): An integer stream id.\n\n Returns:\n str: The nice name of the stream.\n " ]
Please provide a description of the function:def Parser(version): formatter = SConsIndentedHelpFormatter(max_help_position=30) op = SConsOptionParser(option_class=SConsOption, add_help_option=False, formatter=formatter, usage="usage: scons [OPTION] [TARGET] ...",) op.preserve_unknown_options = True op.version = version # Add the options to the parser we just created. # # These are in the order we want them to show up in the -H help # text, basically alphabetical. Each op.add_option() call below # should have a consistent format: # # op.add_option("-L", "--long-option-name", # nargs=1, type="string", # dest="long_option_name", default='foo', # action="callback", callback=opt_long_option, # help="help text goes here", # metavar="VAR") # # Even though the optparse module constructs reasonable default # destination names from the long option names, we're going to be # explicit about each one for easier readability and so this code # will at least show up when grepping the source for option attribute # names, or otherwise browsing the source code. # options ignored for compatibility def opt_ignore(option, opt, value, parser): sys.stderr.write("Warning: ignoring %s option\n" % opt) op.add_option("-b", "-d", "-e", "-m", "-S", "-t", "-w", "--environment-overrides", "--no-keep-going", "--no-print-directory", "--print-directory", "--stop", "--touch", action="callback", callback=opt_ignore, help="Ignored for compatibility.") op.add_option('-c', '--clean', '--remove', dest="clean", default=False, action="store_true", help="Remove specified targets and dependencies.") op.add_option('-C', '--directory', nargs=1, type="string", dest="directory", default=[], action="append", help="Change to DIR before doing anything.", metavar="DIR") op.add_option('--cache-debug', nargs=1, dest="cache_debug", default=None, action="store", help="Print CacheDir debug info to FILE.", metavar="FILE") op.add_option('--cache-disable', '--no-cache', dest='cache_disable', default=False, action="store_true", help="Do not retrieve built targets from CacheDir.") op.add_option('--cache-force', '--cache-populate', dest='cache_force', default=False, action="store_true", help="Copy already-built targets into the CacheDir.") op.add_option('--cache-readonly', dest='cache_readonly', default=False, action="store_true", help="Do not update CacheDir with built targets.") op.add_option('--cache-show', dest='cache_show', default=False, action="store_true", help="Print build actions for files from CacheDir.") def opt_invalid(group, value, options): errmsg = "`%s' is not a valid %s option type, try:\n" % (value, group) return errmsg + " %s" % ", ".join(options) config_options = ["auto", "force" ,"cache"] opt_config_help = "Controls Configure subsystem: %s." \ % ", ".join(config_options) op.add_option('--config', nargs=1, choices=config_options, dest="config", default="auto", help = opt_config_help, metavar="MODE") op.add_option('-D', dest="climb_up", default=None, action="store_const", const=2, help="Search up directory tree for SConstruct, " "build all Default() targets.") deprecated_debug_options = { "dtree" : '; please use --tree=derived instead', "nomemoizer" : ' and has no effect', "stree" : '; please use --tree=all,status instead', "tree" : '; please use --tree=all instead', } debug_options = ["count", "duplicate", "explain", "findlibs", "includes", "memoizer", "memory", "objects", "pdb", "prepare", "presub", "stacktrace", "time"] def opt_debug(option, opt, value__, parser, debug_options=debug_options, deprecated_debug_options=deprecated_debug_options): for value in value__.split(','): if value in debug_options: parser.values.debug.append(value) elif value in list(deprecated_debug_options.keys()): parser.values.debug.append(value) try: parser.values.delayed_warnings except AttributeError: parser.values.delayed_warnings = [] msg = deprecated_debug_options[value] w = "The --debug=%s option is deprecated%s." % (value, msg) t = (SCons.Warnings.DeprecatedDebugOptionsWarning, w) parser.values.delayed_warnings.append(t) else: raise OptionValueError(opt_invalid('debug', value, debug_options)) opt_debug_help = "Print various types of debugging information: %s." \ % ", ".join(debug_options) op.add_option('--debug', nargs=1, type="string", dest="debug", default=[], action="callback", callback=opt_debug, help=opt_debug_help, metavar="TYPE") def opt_diskcheck(option, opt, value, parser): try: diskcheck_value = diskcheck_convert(value) except ValueError as e: raise OptionValueError("`%s' is not a valid diskcheck type" % e) setattr(parser.values, option.dest, diskcheck_value) op.add_option('--diskcheck', nargs=1, type="string", dest='diskcheck', default=None, action="callback", callback=opt_diskcheck, help="Enable specific on-disk checks.", metavar="TYPE") def opt_duplicate(option, opt, value, parser): if not value in SCons.Node.FS.Valid_Duplicates: raise OptionValueError(opt_invalid('duplication', value, SCons.Node.FS.Valid_Duplicates)) setattr(parser.values, option.dest, value) # Set the duplicate style right away so it can affect linking # of SConscript files. SCons.Node.FS.set_duplicate(value) opt_duplicate_help = "Set the preferred duplication methods. Must be one of " \ + ", ".join(SCons.Node.FS.Valid_Duplicates) op.add_option('--duplicate', nargs=1, type="string", dest="duplicate", default='hard-soft-copy', action="callback", callback=opt_duplicate, help=opt_duplicate_help) op.add_option('-f', '--file', '--makefile', '--sconstruct', nargs=1, type="string", dest="file", default=[], action="append", help="Read FILE as the top-level SConstruct file.") op.add_option('-h', '--help', dest="help", default=False, action="store_true", help="Print defined help message, or this one.") op.add_option("-H", "--help-options", action="help", help="Print this message and exit.") op.add_option('-i', '--ignore-errors', dest='ignore_errors', default=False, action="store_true", help="Ignore errors from build actions.") op.add_option('-I', '--include-dir', nargs=1, dest='include_dir', default=[], action="append", help="Search DIR for imported Python modules.", metavar="DIR") op.add_option('--implicit-cache', dest='implicit_cache', default=False, action="store_true", help="Cache implicit dependencies") def opt_implicit_deps(option, opt, value, parser): setattr(parser.values, 'implicit_cache', True) setattr(parser.values, option.dest, True) op.add_option('--implicit-deps-changed', dest="implicit_deps_changed", default=False, action="callback", callback=opt_implicit_deps, help="Ignore cached implicit dependencies.") op.add_option('--implicit-deps-unchanged', dest="implicit_deps_unchanged", default=False, action="callback", callback=opt_implicit_deps, help="Ignore changes in implicit dependencies.") op.add_option('--interact', '--interactive', dest='interactive', default=False, action="store_true", help="Run in interactive mode.") op.add_option('-j', '--jobs', nargs=1, type="int", dest="num_jobs", default=1, action="store", help="Allow N jobs at once.", metavar="N") op.add_option('-k', '--keep-going', dest='keep_going', default=False, action="store_true", help="Keep going when a target can't be made.") op.add_option('--max-drift', nargs=1, type="int", dest='max_drift', default=SCons.Node.FS.default_max_drift, action="store", help="Set maximum system clock drift to N seconds.", metavar="N") op.add_option('--md5-chunksize', nargs=1, type="int", dest='md5_chunksize', default=SCons.Node.FS.File.md5_chunksize, action="store", help="Set chunk-size for MD5 signature computation to N kilobytes.", metavar="N") op.add_option('-n', '--no-exec', '--just-print', '--dry-run', '--recon', dest='no_exec', default=False, action="store_true", help="Don't build; just print commands.") op.add_option('--no-site-dir', dest='no_site_dir', default=False, action="store_true", help="Don't search or use the usual site_scons dir.") op.add_option('--profile', nargs=1, dest="profile_file", default=None, action="store", help="Profile SCons and put results in FILE.", metavar="FILE") op.add_option('-q', '--question', dest="question", default=False, action="store_true", help="Don't build; exit status says if up to date.") op.add_option('-Q', dest='no_progress', default=False, action="store_true", help="Suppress \"Reading/Building\" progress messages.") op.add_option('--random', dest="random", default=False, action="store_true", help="Build dependencies in random order.") op.add_option('-s', '--silent', '--quiet', dest="silent", default=False, action="store_true", help="Don't print commands.") op.add_option('--site-dir', nargs=1, dest='site_dir', default=None, action="store", help="Use DIR instead of the usual site_scons dir.", metavar="DIR") op.add_option('--stack-size', nargs=1, type="int", dest='stack_size', action="store", help="Set the stack size of the threads used to run jobs to N kilobytes.", metavar="N") op.add_option('--taskmastertrace', nargs=1, dest="taskmastertrace_file", default=None, action="store", help="Trace Node evaluation to FILE.", metavar="FILE") tree_options = ["all", "derived", "prune", "status"] def opt_tree(option, opt, value, parser, tree_options=tree_options): from . import Main tp = Main.TreePrinter() for o in value.split(','): if o == 'all': tp.derived = False elif o == 'derived': tp.derived = True elif o == 'prune': tp.prune = True elif o == 'status': tp.status = True else: raise OptionValueError(opt_invalid('--tree', o, tree_options)) parser.values.tree_printers.append(tp) opt_tree_help = "Print a dependency tree in various formats: %s." \ % ", ".join(tree_options) op.add_option('--tree', nargs=1, type="string", dest="tree_printers", default=[], action="callback", callback=opt_tree, help=opt_tree_help, metavar="OPTIONS") op.add_option('-u', '--up', '--search-up', dest="climb_up", default=0, action="store_const", const=1, help="Search up directory tree for SConstruct, " "build targets at or below current directory.") op.add_option('-U', dest="climb_up", default=0, action="store_const", const=3, help="Search up directory tree for SConstruct, " "build Default() targets from local SConscript.") def opt_version(option, opt, value, parser): sys.stdout.write(parser.version + '\n') sys.exit(0) op.add_option("-v", "--version", action="callback", callback=opt_version, help="Print the SCons version number and exit.") def opt_warn(option, opt, value, parser, tree_options=tree_options): if SCons.Util.is_String(value): value = value.split(',') parser.values.warn.extend(value) op.add_option('--warn', '--warning', nargs=1, type="string", dest="warn", default=[], action="callback", callback=opt_warn, help="Enable or disable warnings.", metavar="WARNING-SPEC") op.add_option('-Y', '--repository', '--srcdir', nargs=1, dest="repository", default=[], action="append", help="Search REPOSITORY for source and target files.") # Options from Make and Cons classic that we do not yet support, # but which we may support someday and whose (potential) meanings # we don't want to change. These all get a "the -X option is not # yet implemented" message and don't show up in the help output. def opt_not_yet(option, opt, value, parser): msg = "Warning: the %s option is not yet implemented\n" % opt sys.stderr.write(msg) op.add_option('-l', '--load-average', '--max-load', nargs=1, type="float", dest="load_average", default=0, action="callback", callback=opt_not_yet, # action="store", # help="Don't start multiple jobs unless load is below " # "LOAD-AVERAGE." help=SUPPRESS_HELP) op.add_option('--list-actions', dest="list_actions", action="callback", callback=opt_not_yet, # help="Don't build; list files and build actions." help=SUPPRESS_HELP) op.add_option('--list-derived', dest="list_derived", action="callback", callback=opt_not_yet, # help="Don't build; list files that would be built." help=SUPPRESS_HELP) op.add_option('--list-where', dest="list_where", action="callback", callback=opt_not_yet, # help="Don't build; list files and where defined." help=SUPPRESS_HELP) op.add_option('-o', '--old-file', '--assume-old', nargs=1, type="string", dest="old_file", default=[], action="callback", callback=opt_not_yet, # action="append", # help = "Consider FILE to be old; don't rebuild it." help=SUPPRESS_HELP) op.add_option('--override', nargs=1, type="string", action="callback", callback=opt_not_yet, dest="override", # help="Override variables as specified in FILE." help=SUPPRESS_HELP) op.add_option('-p', action="callback", callback=opt_not_yet, dest="p", # help="Print internal environments/objects." help=SUPPRESS_HELP) op.add_option('-r', '-R', '--no-builtin-rules', '--no-builtin-variables', action="callback", callback=opt_not_yet, dest="no_builtin_rules", # help="Clear default environments and variables." help=SUPPRESS_HELP) op.add_option('--write-filenames', nargs=1, type="string", dest="write_filenames", action="callback", callback=opt_not_yet, # help="Write all filenames examined into FILE." help=SUPPRESS_HELP) op.add_option('-W', '--new-file', '--assume-new', '--what-if', nargs=1, type="string", dest="new_file", action="callback", callback=opt_not_yet, # help="Consider FILE to be changed." help=SUPPRESS_HELP) op.add_option('--warn-undefined-variables', dest="warn_undefined_variables", action="callback", callback=opt_not_yet, # help="Warn when an undefined variable is referenced." help=SUPPRESS_HELP) return op
[ "\n Returns an options parser object initialized with the standard\n SCons options.\n " ]
Please provide a description of the function:def set_option(self, name, value): if not name in self.settable: raise SCons.Errors.UserError("This option is not settable from a SConscript file: %s"%name) if name == 'num_jobs': try: value = int(value) if value < 1: raise ValueError except ValueError: raise SCons.Errors.UserError("A positive integer is required: %s"%repr(value)) elif name == 'max_drift': try: value = int(value) except ValueError: raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) elif name == 'duplicate': try: value = str(value) except ValueError: raise SCons.Errors.UserError("A string is required: %s"%repr(value)) if not value in SCons.Node.FS.Valid_Duplicates: raise SCons.Errors.UserError("Not a valid duplication style: %s" % value) # Set the duplicate style right away so it can affect linking # of SConscript files. SCons.Node.FS.set_duplicate(value) elif name == 'diskcheck': try: value = diskcheck_convert(value) except ValueError as v: raise SCons.Errors.UserError("Not a valid diskcheck value: %s"%v) if 'diskcheck' not in self.__dict__: # No --diskcheck= option was specified on the command line. # Set this right away so it can affect the rest of the # file/Node lookups while processing the SConscript files. SCons.Node.FS.set_diskcheck(value) elif name == 'stack_size': try: value = int(value) except ValueError: raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) elif name == 'md5_chunksize': try: value = int(value) except ValueError: raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) elif name == 'warn': if SCons.Util.is_String(value): value = [value] value = self.__SConscript_settings__.get(name, []) + value SCons.Warnings.process_warn_strings(value) self.__SConscript_settings__[name] = value
[ "\n Sets an option from an SConscript file.\n " ]
Please provide a description of the function:def format_help(self, formatter): formatter.dedent() result = formatter.format_heading(self.title) formatter.indent() result = result + optparse.OptionContainer.format_help(self, formatter) return result
[ "\n Format an option group's help text, outdenting the title so it's\n flush with the \"SCons Options\" title we print at the top.\n " ]
Please provide a description of the function:def _process_long_opt(self, rargs, values): arg = rargs.pop(0) # Value explicitly attached to arg? Pretend it's the next # argument. if "=" in arg: (opt, next_arg) = arg.split("=", 1) rargs.insert(0, next_arg) had_explicit_value = True else: opt = arg had_explicit_value = False try: opt = self._match_long_opt(opt) except optparse.BadOptionError: if self.preserve_unknown_options: # SCons-specific: if requested, add unknown options to # the "leftover arguments" list for later processing. self.largs.append(arg) if had_explicit_value: # The unknown option will be re-processed later, # so undo the insertion of the explicit value. rargs.pop(0) return raise option = self._long_opt[opt] if option.takes_value(): nargs = option.nargs if nargs == '?': if had_explicit_value: value = rargs.pop(0) else: value = option.const elif len(rargs) < nargs: if nargs == 1: if not option.choices: self.error(_("%s option requires an argument") % opt) else: msg = _("%s option requires an argument " % opt) msg += _("(choose from %s)" % ', '.join(option.choices)) self.error(msg) else: self.error(_("%s option requires %d arguments") % (opt, nargs)) elif nargs == 1: value = rargs.pop(0) else: value = tuple(rargs[0:nargs]) del rargs[0:nargs] elif had_explicit_value: self.error(_("%s option does not take a value") % opt) else: value = None option.process(opt, value, values, self)
[ "\n SCons-specific processing of long options.\n\n This is copied directly from the normal\n optparse._process_long_opt() method, except that, if configured\n to do so, we catch the exception thrown when an unknown option\n is encountered and just stick it back on the \"leftover\" arguments\n for later (re-)processing.\n " ]
Please provide a description of the function:def reparse_local_options(self): rargs = [] largs_restore = [] # Loop over all remaining arguments skip = False for l in self.largs: if skip: # Accept all remaining arguments as they are largs_restore.append(l) else: if len(l) > 2 and l[0:2] == "--": # Check long option lopt = (l,) if "=" in l: # Split into option and value lopt = l.split("=", 1) if lopt[0] in self._long_opt: # Argument is already known rargs.append('='.join(lopt)) else: # Not known yet, so reject for now largs_restore.append('='.join(lopt)) else: if l == "--" or l == "-": # Stop normal processing and don't # process the rest of the command-line opts largs_restore.append(l) skip = True else: rargs.append(l) # Parse the filtered list self.parse_args(rargs, self.values) # Restore the list of remaining arguments for the # next call of AddOption/add_local_option... self.largs = self.largs + largs_restore
[ "\n Re-parse the leftover command-line options stored\n in self.largs, so that any value overridden on the\n command line is immediately available if the user turns\n around and does a GetOption() right away.\n \n We mimic the processing of the single args\n in the original OptionParser._process_args(), but here we\n allow exact matches for long-opts only (no partial\n argument names!).\n\n Else, this would lead to problems in add_local_option()\n below. When called from there, we try to reparse the\n command-line arguments that\n 1. haven't been processed so far (self.largs), but\n 2. are possibly not added to the list of options yet.\n \n So, when we only have a value for \"--myargument\" yet,\n a command-line argument of \"--myarg=test\" would set it.\n Responsible for this behaviour is the method\n _match_long_opt(), which allows for partial matches of\n the option name, as long as the common prefix appears to\n be unique.\n This would lead to further confusion, because we might want\n to add another option \"--myarg\" later on (see issue #2929).\n \n " ]
Please provide a description of the function:def add_local_option(self, *args, **kw): try: group = self.local_option_group except AttributeError: group = SConsOptionGroup(self, 'Local Options') group = self.add_option_group(group) self.local_option_group = group result = group.add_option(*args, **kw) if result: # The option was added successfully. We now have to add the # default value to our object that holds the default values # (so that an attempt to fetch the option's attribute will # yield the default value when not overridden) and then # we re-parse the leftover command-line options, so that # any value overridden on the command line is immediately # available if the user turns around and does a GetOption() # right away. setattr(self.values.__defaults__, result.dest, result.default) self.reparse_local_options() return result
[ "\n Adds a local option to the parser.\n\n This is initiated by a SetOption() call to add a user-defined\n command-line option. We add the option to a separate option\n group for the local options, creating the group if necessary.\n " ]
Please provide a description of the function:def format_heading(self, heading): if heading == 'Options': heading = "SCons Options" return optparse.IndentedHelpFormatter.format_heading(self, heading)
[ "\n This translates any heading of \"options\" or \"Options\" into\n \"SCons Options.\" Unfortunately, we have to do this here,\n because those titles are hard-coded in the optparse calls.\n " ]
Please provide a description of the function:def format_option(self, option): # The help for each option consists of two parts: # * the opt strings and metavars # eg. ("-x", or "-fFILENAME, --file=FILENAME") # * the user-supplied help string # eg. ("turn on expert mode", "read data from FILENAME") # # If possible, we write both of these on the same line: # -x turn on expert mode # # But if the opt string list is too long, we put the help # string on a second line, indented to the same column it would # start in if it fit on the first line. # -fFILENAME, --file=FILENAME # read data from FILENAME result = [] opts = self.option_strings[option] opt_width = self.help_position - self.current_indent - 2 if len(opts) > opt_width: wrapper = textwrap.TextWrapper(width=self.width, initial_indent = ' ', subsequent_indent = ' ') wrapper.wordsep_re = no_hyphen_re opts = wrapper.fill(opts) + '\n' indent_first = self.help_position else: # start help on same line as opts opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts) indent_first = 0 result.append(opts) if option.help: help_text = self.expand_default(option) # SCons: indent every line of the help text but the first. wrapper = textwrap.TextWrapper(width=self.help_width, subsequent_indent = ' ') wrapper.wordsep_re = no_hyphen_re help_lines = wrapper.wrap(help_text) result.append("%*s%s\n" % (indent_first, "", help_lines[0])) for line in help_lines[1:]: result.append("%*s%s\n" % (self.help_position, "", line)) elif opts[-1] != "\n": result.append("\n") return "".join(result)
[ "\n A copy of the normal optparse.IndentedHelpFormatter.format_option()\n method. This has been snarfed so we can modify text wrapping to\n out liking:\n\n -- add our own regular expression that doesn't break on hyphens\n (so things like --no-print-directory don't get broken);\n\n -- wrap the list of options themselves when it's too long\n (the wrapper.fill(opts) call below);\n\n -- set the subsequent_indent when wrapping the help_text.\n " ]
Please provide a description of the function:def to_dict(self): out_dict = {} out_dict['commands'] = self.commands out_dict['configs'] = self.configs out_dict['short_name'] = self.name out_dict['versions'] = { 'module': self.module_version, 'api': self.api_version } return out_dict
[ "Convert this object into a dictionary.\n\n Returns:\n dict: A dict with the same information as this object.\n " ]
Please provide a description of the function:def set_api_version(self, major, minor): if not self._is_byte(major) or not self._is_byte(minor): raise ArgumentError("Invalid API version number with component that does not fit in 1 byte", major=major, minor=minor) self.api_version = (major, minor)
[ "Set the API version this module was designed for.\n\n Each module must declare the mib12 API version it was compiled with as a\n 2 byte major.minor number. This information is used by the pic12_executive\n to decide whether the application is compatible.\n " ]
Please provide a description of the function:def set_module_version(self, major, minor, patch): if not (self._is_byte(major) and self._is_byte(minor) and self._is_byte(patch)): raise ArgumentError("Invalid module version number with component that does not fit in 1 byte", major=major, minor=minor, patch=patch) self.module_version = (major, minor, patch)
[ "Set the module version for this module.\n\n Each module must declare a semantic version number in the form:\n major.minor.patch\n\n where each component is a 1 byte number between 0 and 255.\n " ]
Please provide a description of the function:def set_name(self, name): if len(name) > 6: raise ArgumentError("Name must be at most 6 characters long", name=name) if len(name) < 6: name += ' '*(6 - len(name)) self.name = name
[ "Set the module name to a 6 byte string\n\n If the string is too short it is appended with space characters.\n " ]
Please provide a description of the function:def add_command(self, cmd_id, handler): if cmd_id < 0 or cmd_id >= 2**16: raise ArgumentError("Command ID in mib block is not a non-negative 2-byte number", cmd_id=cmd_id, handler=handler) if cmd_id in self.commands: raise ArgumentError("Attempted to add the same command ID twice.", cmd_id=cmd_id, existing_handler=self.commands[cmd_id], new_handler=handler) self.commands[cmd_id] = handler
[ "Add a command to the TBBlock.\n\n The cmd_id must be a non-negative 2 byte number.\n handler should be the command handler\n " ]
Please provide a description of the function:def add_config(self, config_id, config_data): if config_id < 0 or config_id >= 2**16: raise ArgumentError("Config ID in mib block is not a non-negative 2-byte number", config_data=config_id, data=config_data) if config_id in self.configs: raise ArgumentError("Attempted to add the same command ID twice.", config_data=config_id, old_data=self.configs[config_id], new_data=config_data) self.configs[config_id] = config_data
[ "Add a configuration variable to the MIB block" ]
Please provide a description of the function:def _parse_hwtype(self): self.chip_name = KNOWN_HARDWARE_TYPES.get(self.hw_type, "Unknown Chip (type=%d)" % self.hw_type)
[ "Convert the numerical hardware id to a chip name." ]
Please provide a description of the function:def render_template(self, template_name, out_path=None): return render_template(template_name, self.to_dict(), out_path=out_path)
[ "Render a template based on this TileBus Block.\n\n The template has access to all of the attributes of this block as a\n dictionary (the result of calling self.to_dict()).\n\n You can optionally render to a file by passing out_path.\n\n Args:\n template_name (str): The name of the template to load. This must\n be a file in config/templates inside this package\n out_path (str): An optional path of where to save the output\n file, otherwise it is just returned as a string.\n\n Returns:\n string: The rendered template data.\n " ]
Please provide a description of the function:def Tag(env, target, source, *more_tags, **kw_tags): if not target: target=source first_tag=None else: first_tag=source if first_tag: kw_tags[first_tag[0]] = '' if len(kw_tags) == 0 and len(more_tags) == 0: raise UserError("No tags given.") # XXX: sanity checks for x in more_tags: kw_tags[x] = '' if not SCons.Util.is_List(target): target=[target] else: # hmm, sometimes the target list, is a list of a list # make sure it is flattened prior to processing. # TODO: perhaps some bug ?!? target=env.Flatten(target) for t in target: for (k,v) in kw_tags.items(): # all file tags have to start with PACKAGING_, so we can later # differentiate between "normal" object attributes and the # packaging attributes. As the user should not be bothered with # that, the prefix will be added here if missing. if k[:10] != 'PACKAGING_': k='PACKAGING_'+k t.Tag(k, v)
[ " Tag a file with the given arguments, just sets the accordingly named\n attribute on the file object.\n\n TODO: FIXME\n " ]
Please provide a description of the function:def Package(env, target=None, source=None, **kw): # check if we need to find the source files ourself if not source: source = env.FindInstalledFiles() if len(source)==0: raise UserError("No source for Package() given") # decide which types of packages shall be built. Can be defined through # four mechanisms: command line argument, keyword argument, # environment argument and default selection( zip or tar.gz ) in that # order. try: kw['PACKAGETYPE']=env['PACKAGETYPE'] except KeyError: pass if not kw.get('PACKAGETYPE'): from SCons.Script import GetOption kw['PACKAGETYPE'] = GetOption('package_type') if kw['PACKAGETYPE'] == None: if 'Tar' in env['BUILDERS']: kw['PACKAGETYPE']='targz' elif 'Zip' in env['BUILDERS']: kw['PACKAGETYPE']='zip' else: raise UserError("No type for Package() given") PACKAGETYPE=kw['PACKAGETYPE'] if not is_List(PACKAGETYPE): PACKAGETYPE=PACKAGETYPE.split(',') # load the needed packagers. def load_packager(type): try: file,path,desc=imp.find_module(type, __path__) return imp.load_module(type, file, path, desc) except ImportError as e: raise EnvironmentError("packager %s not available: %s"%(type,str(e))) packagers=list(map(load_packager, PACKAGETYPE)) # set up targets and the PACKAGEROOT try: # fill up the target list with a default target name until the PACKAGETYPE # list is of the same size as the target list. if not target: target = [] size_diff = len(PACKAGETYPE)-len(target) default_name = "%(NAME)s-%(VERSION)s" if size_diff>0: default_target = default_name%kw target.extend( [default_target]*size_diff ) if 'PACKAGEROOT' not in kw: kw['PACKAGEROOT'] = default_name%kw except KeyError as e: raise SCons.Errors.UserError( "Missing Packagetag '%s'"%e.args[0] ) # setup the source files source=env.arg2nodes(source, env.fs.Entry) # call the packager to setup the dependencies. targets=[] try: for packager in packagers: t=[target.pop(0)] t=packager.package(env,t,source, **kw) targets.extend(t) assert( len(target) == 0 ) except KeyError as e: raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ % (e.args[0],packager.__name__) ) except TypeError as e: # this exception means that a needed argument for the packager is # missing. As our packagers get their "tags" as named function # arguments we need to find out which one is missing. from inspect import getargspec args,varargs,varkw,defaults=getargspec(packager.package) if defaults!=None: args=args[:-len(defaults)] # throw away arguments with default values args.remove('env') args.remove('target') args.remove('source') # now remove any args for which we have a value in kw. args=[x for x in args if x not in kw] if len(args)==0: raise # must be a different error, so re-raise elif len(args)==1: raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ % (args[0],packager.__name__) ) else: raise SCons.Errors.UserError( "Missing Packagetags '%s' for %s packager"\ % (", ".join(args),packager.__name__) ) target=env.arg2nodes(target, env.fs.Entry) targets.extend(env.Alias( 'package', targets )) return targets
[ " Entry point for the package tool.\n " ]
Please provide a description of the function:def copy_attr(f1, f2): copyit = lambda x: not hasattr(f2, x) and x[:10] == 'PACKAGING_' if f1._tags: pattrs = [tag for tag in f1._tags if copyit(tag)] for attr in pattrs: f2.Tag(attr, f1.GetTag(attr))
[ " copies the special packaging file attributes from f1 to f2.\n " ]
Please provide a description of the function:def putintopackageroot(target, source, env, pkgroot, honor_install_location=1): # make sure the packageroot is a Dir object. if SCons.Util.is_String(pkgroot): pkgroot=env.Dir(pkgroot) if not SCons.Util.is_List(source): source=[source] new_source = [] for file in source: if SCons.Util.is_String(file): file = env.File(file) if file.is_under(pkgroot): new_source.append(file) else: if file.GetTag('PACKAGING_INSTALL_LOCATION') and\ honor_install_location: new_name=make_path_relative(file.GetTag('PACKAGING_INSTALL_LOCATION')) else: new_name=make_path_relative(file.get_path()) new_file=pkgroot.File(new_name) new_file=env.CopyAs(new_file, file)[0] copy_attr(file, new_file) new_source.append(new_file) return (target, new_source)
[ " Uses the CopyAs builder to copy all source files to the directory given\n in pkgroot.\n\n If honor_install_location is set and the copied source file has an\n PACKAGING_INSTALL_LOCATION attribute, the PACKAGING_INSTALL_LOCATION is\n used as the new name of the source file under pkgroot.\n\n The source file will not be copied if it is already under the the pkgroot\n directory.\n\n All attributes of the source file will be copied to the new file.\n " ]
Please provide a description of the function:def stripinstallbuilder(target, source, env): def has_no_install_location(file): return not (file.has_builder() and\ hasattr(file.builder, 'name') and\ (file.builder.name=="InstallBuilder" or\ file.builder.name=="InstallAsBuilder")) if len([src for src in source if has_no_install_location(src)]): warn(Warning, "there are files to package which have no\ InstallBuilder attached, this might lead to irreproducible packages") n_source=[] for s in source: if has_no_install_location(s): n_source.append(s) else: for ss in s.sources: n_source.append(ss) copy_attr(s, ss) ss.Tag('PACKAGING_INSTALL_LOCATION', s.get_path()) return (target, n_source)
[ " Strips the install builder action from the source list and stores\n the final installation location as the \"PACKAGING_INSTALL_LOCATION\" of\n the source of the source file. This effectively removes the final installed\n files from the source list while remembering the installation location.\n\n It also warns about files which have no install builder attached.\n " ]
Please provide a description of the function:def restore(self, state): selector = DataStreamSelector.FromString(state.get(u'selector')) if selector != self.selector: raise ArgumentError("Attempted to restore a BufferedStreamWalker with a different selector", selector=self.selector, serialized_data=state) self.seek(state.get(u'offset'), target="offset")
[ "Restore a previous state of this stream walker.\n\n Raises:\n ArgumentError: If the state refers to a different selector or the\n offset is invalid.\n " ]
Please provide a description of the function:def pop(self): if self._count == 0: raise StreamEmptyError("Pop called on buffered stream walker without any data", selector=self.selector) while True: curr = self.engine.get(self.storage_type, self.offset) self.offset += 1 stream = DataStream.FromEncoded(curr.stream) if self.matches(stream): self._count -= 1 return curr
[ "Pop a reading off of this stream and return it." ]
Please provide a description of the function:def seek(self, value, target="offset"): if target not in (u'offset', u'id'): raise ArgumentError("You must specify target as either offset or id", target=target) if target == u'offset': self._verify_offset(value) self.offset = value else: self.offset = self._find_id(value) self._count = self.engine.count_matching(self.selector, offset=self.offset) curr = self.engine.get(self.storage_type, self.offset) return self.matches(DataStream.FromEncoded(curr.stream))
[ "Seek this stream to a specific offset or reading id.\n\n There are two modes of use. You can seek to a specific reading id,\n which means the walker will be positioned exactly at the reading\n pointed to by the reading ID. If the reading id cannot be found\n an exception will be raised. The reading id can be found but corresponds\n to a reading that is not selected by this walker, the walker will\n be moved to point at the first reading after that reading and False\n will be returned.\n\n If target==\"offset\", the walker will be positioned at the specified\n offset in the sensor log. It will also update the count of available\n readings based on that new location so that the count remains correct.\n\n The offset does not need to correspond to a reading selected by this\n walker. If offset does not point to a selected reading, the effective\n behavior will be as if the walker pointed to the next selected reading\n after `offset`.\n\n Args:\n value (int): The identifier to seek, either an offset or a\n reading id.\n target (str): The type of thing to seek. Can be offset or id.\n If id is given, then a reading with the given ID will be\n searched for. If offset is given then the walker will\n be positioned at the given offset.\n\n Returns:\n bool: True if an exact match was found, False otherwise.\n\n An exact match means that the offset or reading ID existed and\n corresponded to a reading selected by this walker.\n\n An inexact match means that the offset or reading ID existed but\n corresponded to reading that was not selected by this walker.\n\n If the offset or reading ID could not be found an Exception is\n thrown instead.\n\n Raises:\n ArgumentError: target is an invalid string, must be offset or\n id.\n UnresolvedIdentifierError: the desired offset or reading id\n could not be found.\n " ]
Please provide a description of the function:def skip_all(self): storage, streaming = self.engine.count() if self.selector.output: self.offset = streaming else: self.offset = storage self._count = 0
[ "Skip all readings in this walker." ]
Please provide a description of the function:def notify_rollover(self, stream): self.offset -= 1 if not self.matches(stream): return if self._count == 0: raise InternalError("BufferedStreamWalker out of sync with storage engine, count was wrong.") self._count -= 1
[ "Notify that a reading in the given stream was overwritten.\n\n Args:\n stream (DataStream): The stream that had overwritten data.\n " ]
Please provide a description of the function:def dump(self): reading = self.reading if reading is not None: reading = reading.asdict() return { u'selector': str(self.selector), u'reading': reading }
[ "Serialize the state of this stream walker.\n\n Returns:\n dict: The serialized state.\n " ]
Please provide a description of the function:def restore(self, state): reading = state.get(u'reading') if reading is not None: reading = IOTileReading.FromDict(reading) selector = DataStreamSelector.FromString(state.get(u'selector')) if self.selector != selector: raise ArgumentError("Attempted to restore a VirtualStreamWalker with a different selector", selector=self.selector, serialized_data=state) self.reading = reading
[ "Restore the contents of this virtual stream walker.\n\n Args:\n state (dict): The previously serialized state.\n\n Raises:\n ArgumentError: If the serialized state does not have\n a matching selector.\n " ]
Please provide a description of the function:def push(self, stream, value): if not self.matches(stream): raise ArgumentError("Attempting to push reading to stream walker that does not match", selector=self.selector, stream=stream) self.reading = value
[ "Update this stream walker with a new responsive reading.\n\n Virtual stream walkers keep at most one reading so this function\n just overwrites whatever was previously stored.\n " ]
Please provide a description of the function:def pop(self): if self.reading is None: raise StreamEmptyError("Pop called on virtual stream walker without any data", selector=self.selector) reading = self.reading # If we're not a constant stream, we just exhausted ourselves if self.selector.match_type != DataStream.ConstantType: self.reading = None return reading
[ "Pop a reading off of this virtual stream and return it." ]
Please provide a description of the function:def pop(self): if self._count == 0: raise StreamEmptyError("Pop called on virtual stream walker without any data", selector=self.selector) self._count = self._count - 1 return self.reading
[ "Pop a reading off of this virtual stream and return it." ]
Please provide a description of the function:def peek(self): if self.reading is None: raise StreamEmptyError("peek called on virtual stream walker without any data", selector=self.selector) return self.reading
[ "Peek at the oldest reading in this virtual stream." ]
Please provide a description of the function:def restore(self, state): selector = DataStreamSelector.FromString(state.get(u'selector')) if self.selector != selector: raise ArgumentError("Attempted to restore an InvalidStreamWalker with a different selector", selector=self.selector, serialized_data=state) if state.get(u'type') != u'invalid': raise ArgumentError("Invalid serialized state for InvalidStreamWalker", serialized_data=state)
[ "Restore the contents of this virtual stream walker.\n\n Args:\n state (dict): The previously serialized state.\n\n Raises:\n ArgumentError: If the serialized state does not have\n a matching selector.\n " ]
Please provide a description of the function:def push(self, stream, value): raise ArgumentError("Attempting to push reading to an invalid stream walker that cannot hold data", selector=self.selector, stream=stream)
[ "Update this stream walker with a new responsive reading.\n\n Args:\n stream (DataStream): The stream that we're pushing\n value (IOTileReading): The reading tha we're pushing\n " ]