Code
stringlengths 103
85.9k
| Summary
sequencelengths 0
94
|
---|---|
Please provide a description of the function:def notify_event_nowait(self, conn_string, name, event):
if self._loop.stopping:
self._logger.debug("Ignoring notification %s from %s because loop is shutting down", name, conn_string)
return
self._loop.log_coroutine(self._notify_event_internal, conn_string, name, event) | [
"Notify an event.\n\n This will move the notification to the background event loop and\n return immediately. It is useful for situations where you cannot\n await notify_event but keep in mind that it prevents back-pressure\n when you are notifying too fast so should be used sparingly.\n\n Note that calling this method will push the notification to a\n background task so it can be difficult to reason about when it will\n precisely occur. For that reason, :meth:`notify_event` should be\n preferred when possible since that method guarantees that all\n callbacks will be called synchronously before it finishes.\n\n Args:\n conn_string (str): The connection string for the device that the\n event is associated with.\n name (str): The name of the event. Must be in SUPPORTED_EVENTS.\n event (object): The event object. The type of this object will\n depend on what is being notified.\n "
] |
Please provide a description of the function:def notify_progress(self, conn_string, operation, finished, total, wait=True):
if operation not in self.PROGRESS_OPERATIONS:
raise ArgumentError("Invalid operation for progress event: {}".format(operation))
event = dict(operation=operation, finished=finished, total=total)
if wait:
return self.notify_event(conn_string, 'progress', event)
self.notify_event_nowait(conn_string, 'progress', event)
return None | [
"Send a progress event.\n\n Progress events can be sent for ``debug`` and ``script`` operations and\n notify the caller about the progress of these potentially long-running\n operations. They have two integer properties that specify what fraction\n of the operation has been completed.\n\n Args:\n conn_string (str): The device that is sending the event.\n operations (str): The operation that is in progress: debug or script\n finished (int): The number of \"steps\" that have finished.\n total (int): The total number of steps to perform.\n wait (bool): Whether to return an awaitable that we can use to\n block until the notification has made it to all callbacks.\n\n Returns:\n awaitable or None: An awaitable if wait=True.\n\n If wait is False, the notification is run in the background with\n no way to check its progress and None is returned.\n "
] |
Please provide a description of the function:def generate(env):
cplusplus.generate(env)
env['CXX'] = 'CC'
env['CXXFLAGS'] = SCons.Util.CLVar('-LANG:std')
env['SHCXX'] = '$CXX'
env['SHOBJSUFFIX'] = '.o'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 | [
"Add Builders and construction variables for SGI MIPS C++ to an Environment."
] |
Please provide a description of the function:def read_packet(self, timeout=3.0):
try:
return self.queue.get(timeout=timeout)
except Empty:
raise InternalTimeoutError("Timeout waiting for packet in AsyncPacketBuffer") | [
"read one packet, timeout if one packet is not available in the timeout period"
] |
Please provide a description of the function:def modify_dict(data, key, value, create_if_missing=False):
data_copy = copy.deepcopy(data)
key_copy = copy.deepcopy(key)
delver = data_copy
current_key = key_copy
last_key = "Root"
# Dig through the json, setting delver to the dict that contains the last key in "key"
while len(current_key) > 1:
if current_key[0] not in delver:
raise KeyError("ModifyJsonStep Key Couldn't find Subkey {} in {}.".format(current_key[0], last_key))
if len(current_key) > 2 and not isinstance(delver[current_key[0]], dict):
raise ValueError("ModifyJsonStep The Value of {} is a {}, not a dict".format(current_key[0], type(delver[current_key[0]])))
last_key = current_key[0]
delver = delver[current_key[0]]
current_key.pop(0)
if current_key[0] not in delver and not create_if_missing:
raise KeyError("ModifyJsonStep Key Couldn't find Subkey {} in {}.".format(current_key[0], last_key))
delver[current_key[0]] = value
return data_copy | [
" Change (or add) a json key/value pair.\n\n Args:\n data (dict): The original data. This will not be modified.\n key (list): A list of keys and subkeys specifing the key to change (list can be one)\n value (str): The value to change for the above key\n create_if_missing (bool): Set to true to create key if the last key in the list is not found\n Otherwise the function will throw a KeyError\n Returns:\n (dict): the final modified dict\n "
] |
Please provide a description of the function:def emit_java_classes(target, source, env):
java_suffix = env.get('JAVASUFFIX', '.java')
class_suffix = env.get('JAVACLASSSUFFIX', '.class')
target[0].must_be_same(SCons.Node.FS.Dir)
classdir = target[0]
s = source[0].rentry().disambiguate()
if isinstance(s, SCons.Node.FS.File):
sourcedir = s.dir.rdir()
elif isinstance(s, SCons.Node.FS.Dir):
sourcedir = s.rdir()
else:
raise SCons.Errors.UserError("Java source must be File or Dir, not '%s'" % s.__class__)
slist = []
js = _my_normcase(java_suffix)
for entry in source:
entry = entry.rentry().disambiguate()
if isinstance(entry, SCons.Node.FS.File):
slist.append(entry)
elif isinstance(entry, SCons.Node.FS.Dir):
result = SCons.Util.OrderedDict()
dirnode = entry.rdir()
def find_java_files(arg, dirpath, filenames):
java_files = sorted([n for n in filenames
if _my_normcase(n).endswith(js)])
mydir = dirnode.Dir(dirpath)
java_paths = [mydir.File(f) for f in java_files]
for jp in java_paths:
arg[jp] = True
for dirpath, dirnames, filenames in os.walk(dirnode.get_abspath()):
find_java_files(result, dirpath, filenames)
entry.walk(find_java_files, result)
slist.extend(list(result.keys()))
else:
raise SCons.Errors.UserError("Java source must be File or Dir, not '%s'" % entry.__class__)
version = env.get('JAVAVERSION', '1.4')
full_tlist = []
for f in slist:
tlist = []
source_file_based = True
pkg_dir = None
if not f.is_derived():
pkg_dir, classes = parse_java_file(f.rfile().get_abspath(), version)
if classes:
source_file_based = False
if pkg_dir:
d = target[0].Dir(pkg_dir)
p = pkg_dir + os.sep
else:
d = target[0]
p = ''
for c in classes:
t = d.File(c + class_suffix)
t.attributes.java_classdir = classdir
t.attributes.java_sourcedir = sourcedir
t.attributes.java_classname = classname(p + c)
tlist.append(t)
if source_file_based:
base = f.name[:-len(java_suffix)]
if pkg_dir:
t = target[0].Dir(pkg_dir).File(base + class_suffix)
else:
t = target[0].File(base + class_suffix)
t.attributes.java_classdir = classdir
t.attributes.java_sourcedir = f.dir
t.attributes.java_classname = classname(base)
tlist.append(t)
for t in tlist:
t.set_specific_source([f])
full_tlist.extend(tlist)
return full_tlist, slist | [
"Create and return lists of source java files\n and their corresponding target class files.\n "
] |
Please provide a description of the function:def Java(env, target, source, *args, **kw):
if not SCons.Util.is_List(target):
target = [target]
if not SCons.Util.is_List(source):
source = [source]
# Pad the target list with repetitions of the last element in the
# list so we have a target for every source element.
target = target + ([target[-1]] * (len(source) - len(target)))
java_suffix = env.subst('$JAVASUFFIX')
result = []
for t, s in zip(target, source):
if isinstance(s, SCons.Node.FS.Base):
if isinstance(s, SCons.Node.FS.File):
b = env.JavaClassFile
else:
b = env.JavaClassDir
else:
if os.path.isfile(s):
b = env.JavaClassFile
elif os.path.isdir(s):
b = env.JavaClassDir
elif s[-len(java_suffix):] == java_suffix:
b = env.JavaClassFile
else:
b = env.JavaClassDir
result.extend(b(t, s, *args, **kw))
return result | [
"\n A pseudo-Builder wrapper around the separate JavaClass{File,Dir}\n Builders.\n "
] |
Please provide a description of the function:def generate(env):
java_file = SCons.Tool.CreateJavaFileBuilder(env)
java_class = SCons.Tool.CreateJavaClassFileBuilder(env)
java_class_dir = SCons.Tool.CreateJavaClassDirBuilder(env)
java_class.add_emitter(None, emit_java_classes)
java_class.add_emitter(env.subst('$JAVASUFFIX'), emit_java_classes)
java_class_dir.emitter = emit_java_classes
env.AddMethod(Java)
env['JAVAC'] = 'javac'
env['JAVACFLAGS'] = SCons.Util.CLVar('')
env['JAVABOOTCLASSPATH'] = []
env['JAVACLASSPATH'] = []
env['JAVASOURCEPATH'] = []
env['_javapathopt'] = pathopt
env['_JAVABOOTCLASSPATH'] = '${_javapathopt("-bootclasspath", "JAVABOOTCLASSPATH")} '
env['_JAVACLASSPATH'] = '${_javapathopt("-classpath", "JAVACLASSPATH")} '
env['_JAVASOURCEPATH'] = '${_javapathopt("-sourcepath", "JAVASOURCEPATH", "_JAVASOURCEPATHDEFAULT")} '
env['_JAVASOURCEPATHDEFAULT'] = '${TARGET.attributes.java_sourcedir}'
env['_JAVACCOM'] = '$JAVAC $JAVACFLAGS $_JAVABOOTCLASSPATH $_JAVACLASSPATH -d ${TARGET.attributes.java_classdir} $_JAVASOURCEPATH $SOURCES'
env['JAVACCOM'] = "${TEMPFILE('$_JAVACCOM','$JAVACCOMSTR')}"
env['JAVACLASSSUFFIX'] = '.class'
env['JAVASUFFIX'] = '.java' | [
"Add Builders and construction variables for javac to an Environment."
] |
Please provide a description of the function:def Add(self, key, help="", default=None, validator=None, converter=None, **kw):
if SCons.Util.is_List(key) or isinstance(key, tuple):
self._do_add(*key)
return
if not SCons.Util.is_String(key) or \
not SCons.Environment.is_valid_construction_var(key):
raise SCons.Errors.UserError("Illegal Variables.Add() key `%s'" % str(key))
self._do_add(key, help, default, validator, converter) | [
"\n Add an option.\n\n\n @param key: the name of the variable, or a list or tuple of arguments\n @param help: optional help text for the options\n @param default: optional default value\n @param validator: optional function that is called to validate the option's value\n @type validator: Called with (key, value, environment)\n @param converter: optional function that is called to convert the option's value before putting it in the environment.\n "
] |
Please provide a description of the function:def Update(self, env, args=None):
values = {}
# first set the defaults:
for option in self.options:
if not option.default is None:
values[option.key] = option.default
# next set the value specified in the options file
for filename in self.files:
if os.path.exists(filename):
dir = os.path.split(os.path.abspath(filename))[0]
if dir:
sys.path.insert(0, dir)
try:
values['__name__'] = filename
with open(filename, 'r') as f:
contents = f.read()
exec(contents, {}, values)
finally:
if dir:
del sys.path[0]
del values['__name__']
# set the values specified on the command line
if args is None:
args = self.args
for arg, value in args.items():
added = False
for option in self.options:
if arg in list(option.aliases) + [ option.key ]:
values[option.key] = value
added = True
if not added:
self.unknown[arg] = value
# put the variables in the environment:
# (don't copy over variables that are not declared as options)
for option in self.options:
try:
env[option.key] = values[option.key]
except KeyError:
pass
# Call the convert functions:
for option in self.options:
if option.converter and option.key in values:
value = env.subst('${%s}'%option.key)
try:
try:
env[option.key] = option.converter(value)
except TypeError:
env[option.key] = option.converter(value, env)
except ValueError as x:
raise SCons.Errors.UserError('Error converting option: %s\n%s'%(option.key, x))
# Finally validate the values:
for option in self.options:
if option.validator and option.key in values:
option.validator(option.key, env.subst('${%s}'%option.key), env) | [
"\n Update an environment with the option variables.\n\n env - the environment to update.\n "
] |
Please provide a description of the function:def Save(self, filename, env):
# Create the file and write out the header
try:
fh = open(filename, 'w')
try:
# Make an assignment in the file for each option
# within the environment that was assigned a value
# other than the default.
for option in self.options:
try:
value = env[option.key]
try:
prepare = value.prepare_to_store
except AttributeError:
try:
eval(repr(value))
except KeyboardInterrupt:
raise
except:
# Convert stuff that has a repr() that
# cannot be evaluated into a string
value = SCons.Util.to_String(value)
else:
value = prepare()
defaultVal = env.subst(SCons.Util.to_String(option.default))
if option.converter:
defaultVal = option.converter(defaultVal)
if str(env.subst('${%s}' % option.key)) != str(defaultVal):
fh.write('%s = %s\n' % (option.key, repr(value)))
except KeyError:
pass
finally:
fh.close()
except IOError as x:
raise SCons.Errors.UserError('Error writing options to file: %s\n%s' % (filename, x)) | [
"\n Saves all the options in the given file. This file can\n then be used to load the options next run. This can be used\n to create an option cache file.\n\n filename - Name of the file to save into\n env - the environment get the option values from\n "
] |
Please provide a description of the function:def GenerateHelpText(self, env, sort=None):
if callable(sort):
options = sorted(self.options, key=cmp_to_key(lambda x,y: sort(x.key,y.key)))
elif sort is True:
options = sorted(self.options, key=lambda x: x.key)
else:
options = self.options
def format(opt, self=self, env=env):
if opt.key in env:
actual = env.subst('${%s}' % opt.key)
else:
actual = None
return self.FormatVariableHelpText(env, opt.key, opt.help, opt.default, actual, opt.aliases)
lines = [_f for _f in map(format, options) if _f]
return ''.join(lines) | [
"\n Generate the help text for the options.\n\n env - an environment that is used to get the current values\n of the options.\n cmp - Either a function as follows: The specific sort function should take two arguments and return -1, 0 or 1 \n or a boolean to indicate if it should be sorted.\n "
] |
Please provide a description of the function:def splitext(path):
"Same as os.path.splitext() but faster."
sep = rightmost_separator(path, os.sep)
dot = path.rfind('.')
# An ext is only real if it has at least one non-digit char
if dot > sep and not containsOnly(path[dot:], "0123456789."):
return path[:dot],path[dot:]
else:
return path,"" | [] |
Please provide a description of the function:def updrive(path):
drive, rest = os.path.splitdrive(path)
if drive:
path = drive.upper() + rest
return path | [
"\n Make the drive letter (if any) upper case.\n This is useful because Windows is inconsistent on the case\n of the drive letter, which can cause inconsistencies when\n calculating command signatures.\n "
] |
Please provide a description of the function:def get_environment_var(varstr):
mo=_get_env_var.match(to_String(varstr))
if mo:
var = mo.group(1)
if var[0] == '{':
return var[1:-1]
else:
return var
else:
return None | [
"Given a string, first determine if it looks like a reference\n to a single environment variable, like \"$FOO\" or \"${FOO}\".\n If so, return that variable with no decorations (\"FOO\").\n If not, return None."
] |
Please provide a description of the function:def render_tree(root, child_func, prune=0, margin=[0], visited=None):
rname = str(root)
# Initialize 'visited' dict, if required
if visited is None:
visited = {}
children = child_func(root)
retval = ""
for pipe in margin[:-1]:
if pipe:
retval = retval + "| "
else:
retval = retval + " "
if rname in visited:
return retval + "+-[" + rname + "]\n"
retval = retval + "+-" + rname + "\n"
if not prune:
visited = copy.copy(visited)
visited[rname] = 1
for i in range(len(children)):
margin.append(i < len(children)-1)
retval = retval + render_tree(children[i], child_func, prune, margin, visited)
margin.pop()
return retval | [
"\n Render a tree of nodes into an ASCII tree view.\n\n :Parameters:\n - `root`: the root node of the tree\n - `child_func`: the function called to get the children of a node\n - `prune`: don't visit the same node twice\n - `margin`: the format of the left margin to use for children of root. 1 results in a pipe, and 0 results in no pipe.\n - `visited`: a dictionary of visited nodes in the current branch if not prune, or in the whole tree if prune.\n "
] |
Please provide a description of the function:def print_tree(root, child_func, prune=0, showtags=0, margin=[0], visited=None):
rname = str(root)
# Initialize 'visited' dict, if required
if visited is None:
visited = {}
if showtags:
if showtags == 2:
legend = (' E = exists\n' +
' R = exists in repository only\n' +
' b = implicit builder\n' +
' B = explicit builder\n' +
' S = side effect\n' +
' P = precious\n' +
' A = always build\n' +
' C = current\n' +
' N = no clean\n' +
' H = no cache\n' +
'\n')
sys.stdout.write(legend)
tags = ['[']
tags.append(' E'[IDX(root.exists())])
tags.append(' R'[IDX(root.rexists() and not root.exists())])
tags.append(' BbB'[[0,1][IDX(root.has_explicit_builder())] +
[0,2][IDX(root.has_builder())]])
tags.append(' S'[IDX(root.side_effect)])
tags.append(' P'[IDX(root.precious)])
tags.append(' A'[IDX(root.always_build)])
tags.append(' C'[IDX(root.is_up_to_date())])
tags.append(' N'[IDX(root.noclean)])
tags.append(' H'[IDX(root.nocache)])
tags.append(']')
else:
tags = []
def MMM(m):
return [" ","| "][m]
margins = list(map(MMM, margin[:-1]))
children = child_func(root)
if prune and rname in visited and children:
sys.stdout.write(''.join(tags + margins + ['+-[', rname, ']']) + '\n')
return
sys.stdout.write(''.join(tags + margins + ['+-', rname]) + '\n')
visited[rname] = 1
if children:
margin.append(1)
idx = IDX(showtags)
for C in children[:-1]:
print_tree(C, child_func, prune, idx, margin, visited)
margin[-1] = 0
print_tree(children[-1], child_func, prune, idx, margin, visited)
margin.pop() | [
"\n Print a tree of nodes. This is like render_tree, except it prints\n lines directly instead of creating a string representation in memory,\n so that huge trees can be printed.\n\n :Parameters:\n - `root` - the root node of the tree\n - `child_func` - the function called to get the children of a node\n - `prune` - don't visit the same node twice\n - `showtags` - print status information to the left of each node line\n - `margin` - the format of the left margin to use for children of root. 1 results in a pipe, and 0 results in no pipe.\n - `visited` - a dictionary of visited nodes in the current branch if not prune, or in the whole tree if prune.\n "
] |
Please provide a description of the function:def flatten(obj, isinstance=isinstance, StringTypes=StringTypes,
SequenceTypes=SequenceTypes, do_flatten=do_flatten):
if isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes):
return [obj]
result = []
for item in obj:
if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes):
result.append(item)
else:
do_flatten(item, result)
return result | [
"Flatten a sequence to a non-nested list.\n\n Flatten() converts either a single scalar or a nested sequence\n to a non-nested list. Note that flatten() considers strings\n to be scalars instead of sequences like Python would.\n "
] |
Please provide a description of the function:def PrependPath(oldpath, newpath, sep = os.pathsep,
delete_existing=1, canonicalize=None):
orig = oldpath
is_list = 1
paths = orig
if not is_List(orig) and not is_Tuple(orig):
paths = paths.split(sep)
is_list = 0
if is_String(newpath):
newpaths = newpath.split(sep)
elif not is_List(newpath) and not is_Tuple(newpath):
newpaths = [ newpath ] # might be a Dir
else:
newpaths = newpath
if canonicalize:
newpaths=list(map(canonicalize, newpaths))
if not delete_existing:
# First uniquify the old paths, making sure to
# preserve the first instance (in Unix/Linux,
# the first one wins), and remembering them in normpaths.
# Then insert the new paths at the head of the list
# if they're not already in the normpaths list.
result = []
normpaths = []
for path in paths:
if not path:
continue
normpath = os.path.normpath(os.path.normcase(path))
if normpath not in normpaths:
result.append(path)
normpaths.append(normpath)
newpaths.reverse() # since we're inserting at the head
for path in newpaths:
if not path:
continue
normpath = os.path.normpath(os.path.normcase(path))
if normpath not in normpaths:
result.insert(0, path)
normpaths.append(normpath)
paths = result
else:
newpaths = newpaths + paths # prepend new paths
normpaths = []
paths = []
# now we add them only if they are unique
for path in newpaths:
normpath = os.path.normpath(os.path.normcase(path))
if path and not normpath in normpaths:
paths.append(path)
normpaths.append(normpath)
if is_list:
return paths
else:
return sep.join(paths) | [
"This prepends newpath elements to the given oldpath. Will only\n add any particular path once (leaving the first one it encounters\n and ignoring the rest, to preserve path order), and will\n os.path.normpath and os.path.normcase all paths to help assure\n this. This can also handle the case where the given old path\n variable is a list instead of a string, in which case a list will\n be returned instead of a string.\n\n Example:\n Old Path: \"/foo/bar:/foo\"\n New Path: \"/biz/boom:/foo\"\n Result: \"/biz/boom:/foo:/foo/bar\"\n\n If delete_existing is 0, then adding a path that exists will\n not move it to the beginning; it will stay where it is in the\n list.\n\n If canonicalize is not None, it is applied to each element of\n newpath before use.\n "
] |
Please provide a description of the function:def AddPathIfNotExists(env_dict, key, path, sep=os.pathsep):
try:
is_list = 1
paths = env_dict[key]
if not is_List(env_dict[key]):
paths = paths.split(sep)
is_list = 0
if os.path.normcase(path) not in list(map(os.path.normcase, paths)):
paths = [ path ] + paths
if is_list:
env_dict[key] = paths
else:
env_dict[key] = sep.join(paths)
except KeyError:
env_dict[key] = path | [
"This function will take 'key' out of the dictionary\n 'env_dict', then add the path 'path' to that key if it is not\n already there. This treats the value of env_dict[key] as if it\n has a similar format to the PATH variable...a list of paths\n separated by tokens. The 'path' will get added to the list if it\n is not already there."
] |
Please provide a description of the function:def unique(s):
n = len(s)
if n == 0:
return []
# Try using a dict first, as that's the fastest and will usually
# work. If it doesn't work, it will usually fail quickly, so it
# usually doesn't cost much to *try* it. It requires that all the
# sequence elements be hashable, and support equality comparison.
u = {}
try:
for x in s:
u[x] = 1
except TypeError:
pass # move on to the next method
else:
return list(u.keys())
del u
# We can't hash all the elements. Second fastest is to sort,
# which brings the equal elements together; then duplicates are
# easy to weed out in a single pass.
# NOTE: Python's list.sort() was designed to be efficient in the
# presence of many duplicate elements. This isn't true of all
# sort functions in all languages or libraries, so this approach
# is more effective in Python than it may be elsewhere.
try:
t = sorted(s)
except TypeError:
pass # move on to the next method
else:
assert n > 0
last = t[0]
lasti = i = 1
while i < n:
if t[i] != last:
t[lasti] = last = t[i]
lasti = lasti + 1
i = i + 1
return t[:lasti]
del t
# Brute force is all that's left.
u = []
for x in s:
if x not in u:
u.append(x)
return u | [
"Return a list of the elements in s, but without duplicates.\n\n For example, unique([1,2,3,1,2,3]) is some permutation of [1,2,3],\n unique(\"abcabc\") some permutation of [\"a\", \"b\", \"c\"], and\n unique(([1, 2], [2, 3], [1, 2])) some permutation of\n [[2, 3], [1, 2]].\n\n For best speed, all sequence elements should be hashable. Then\n unique() will usually work in linear time.\n\n If not possible, the sequence elements should enjoy a total\n ordering, and if list(s).sort() doesn't raise TypeError it's\n assumed that they do enjoy a total ordering. Then unique() will\n usually work in O(N*log2(N)) time.\n\n If that's not possible either, the sequence elements must support\n equality-testing. Then unique() will usually work in quadratic\n time.\n "
] |
Please provide a description of the function:def make_path_relative(path):
if os.path.isabs(path):
drive_s,path = os.path.splitdrive(path)
import re
if not drive_s:
path=re.compile("/*(.*)").findall(path)[0]
else:
path=path[1:]
assert( not os.path.isabs( path ) ), path
return path | [
" makes an absolute path name to a relative pathname.\n "
] |
Please provide a description of the function:def AddMethod(obj, function, name=None):
if name is None:
name = function.__name__
else:
function = RenameFunction(function, name)
# Note the Python version checks - WLB
# Python 3.3 dropped the 3rd parameter from types.MethodType
if hasattr(obj, '__class__') and obj.__class__ is not type:
# "obj" is an instance, so it gets a bound method.
if sys.version_info[:2] > (3, 2):
method = MethodType(function, obj)
else:
method = MethodType(function, obj, obj.__class__)
else:
# Handle classes
method = function
setattr(obj, name, method) | [
"\n Adds either a bound method to an instance or the function itself (or an unbound method in Python 2) to a class.\n If name is ommited the name of the specified function\n is used by default.\n\n Example::\n\n a = A()\n def f(self, x, y):\n self.z = x + y\n AddMethod(f, A, \"add\")\n a.add(2, 4)\n print(a.z)\n AddMethod(lambda self, i: self.l[i], a, \"listIndex\")\n print(a.listIndex(5))\n "
] |
Please provide a description of the function:def RenameFunction(function, name):
return FunctionType(function.__code__,
function.__globals__,
name,
function.__defaults__) | [
"\n Returns a function identical to the specified function, but with\n the specified name.\n "
] |
Please provide a description of the function:def _create_old_return_value(payload, num_ints, buff):
parsed = {'ints': payload[:num_ints], 'buffer': None, 'error': 'No Error',
'is_error': False, 'return_value': 0}
if buff:
parsed['buffer'] = bytearray(payload[-1])
return parsed | [
"Parse the response of an RPC call into a dictionary with integer and buffer results"
] |
Please provide a description of the function:def rpc(self, feature, cmd, *args, **kw):
rpc_id = (feature << 8 | cmd)
result_format = _create_resp_format(kw.get('result_type'), kw.get('result_format'))
arg_format = kw.get('arg_format')
if arg_format is None:
arg_format = _create_arg_format(args)
passed_kw = {}
if 'timeout' in kw:
passed_kw['timeout'] = kw['timeout']
response = self.rpc_v2(rpc_id, arg_format, result_format, *args, **passed_kw)
old_return = kw.get('result_type')
if old_return is not None:
return _create_old_return_value(response, *old_return)
return response | [
"Send an RPC call to this module, interpret the return value\n according to the result_type kw argument. Unless raise keyword\n is passed with value False, raise an RPCException if the command\n is not successful.\n "
] |
Please provide a description of the function:def rpc_v2(self, cmd, arg_format, result_format, *args, **kw):
if args:
packed_args = pack_rpc_payload(arg_format, list(args))
elif arg_format == "":
packed_args = b''
else:
raise RPCInvalidArgumentsError("Arg format expects arguments to be present", arg_format=arg_format, args=args)
passed_kw = dict()
if 'timeout' in kw:
passed_kw['timeout'] = kw['timeout']
try:
should_retry = False
payload = self.stream.send_rpc(self.addr, cmd, packed_args, **passed_kw)
except BusyRPCResponse:
if "retries" not in kw:
kw['retries'] = 10
# Sleep 100 ms and try again unless we've exhausted our retry attempts
if kw["retries"] == 0:
raise BusyRPCResponse("Could not complete RPC %d:%04X after 10 attempts due to busy tile" %
(self.addr, cmd))
should_retry = True
# If the tile was busy, automatically retry up to 10 times
if should_retry:
kw['retries'] -= 1
sleep(0.1)
return self.rpc_v2(cmd, arg_format, result_format, *args, **kw)
return unpack_rpc_payload(result_format, payload) | [
"Send an RPC call to this module, interpret the return value\n according to the result_type kw argument. Unless raise keyword\n is passed with value False, raise an RPCException if the command\n is not successful.\n\n v2 enforces the use of arg_format and result_format\n v2 combines the feature+cmd chunks in to a single 2-byte chunk\n "
] |
Please provide a description of the function:def hardware_version(self):
res = self.rpc(0x00, 0x02, result_type=(0, True))
# Result is a string but with zero appended to the end to make it a fixed 10 byte size
binary_version = res['buffer']
ver = ""
for x in binary_version:
if x != 0:
ver += chr(x)
return ver | [
"Return the embedded hardware version string for this tile.\n\n The hardware version is an up to 10 byte user readable string that is\n meant to encode any necessary information about the specific hardware\n that this tile is running on. For example, if you have multiple\n assembly variants of a given tile, you could encode that information\n here.\n\n Returns:\n str: The hardware version read from the tile.\n "
] |
Please provide a description of the function:def check_hardware(self, expected):
if len(expected) < 10:
expected += '\0'*(10 - len(expected))
err, = self.rpc(0x00, 0x03, expected, result_format="L")
if err == 0:
return True
return False | [
"Make sure the hardware version is what we expect.\n\n This convenience function is meant for ensuring that we are talking to\n a tile that has the correct hardware version.\n\n Args:\n expected (str): The expected hardware string that is compared\n against what is reported by the hardware_version RPC.\n\n Returns:\n bool: true if the hardware is the expected version, false otherwise\n "
] |
Please provide a description of the function:def status(self):
hw_type, name, major, minor, patch, status = self.rpc(0x00, 0x04, result_format="H6sBBBB")
status = {
'hw_type': hw_type,
'name': name.decode('utf-8'),
'version': (major, minor, patch),
'status': status
}
return status | [
"Query the status of an IOTile including its name and version"
] |
Please provide a description of the function:def tile_status(self):
stat = self.status()
flags = stat['status']
# FIXME: This needs to stay in sync with lib_common: cdb_status.h
status = {}
status['debug_mode'] = bool(flags & (1 << 3))
status['configured'] = bool(flags & (1 << 1))
status['app_running'] = bool(flags & (1 << 0))
status['trapped'] = bool(flags & (1 << 2))
return status | [
"Get the current status of this tile"
] |
Please provide a description of the function:async def client_event_handler(self, client_id, event_tuple, user_data):
conn_string, event_name, _event = event_tuple
self._logger.debug("Ignoring event %s from device %s forwarded for client %s",
event_name, conn_string, client_id)
return None | [
"Method called to actually send an event to a client.\n\n Users of this class should override this method to actually forward\n device events to their clients. It is called with the client_id\n passed to (or returned from) :meth:`setup_client` as well as the\n user_data object that was included there.\n\n The event tuple is a 3-tuple of:\n\n - connection string\n - event name\n - event object\n\n If you override this to be acoroutine, it will be awaited. The\n default implementation just logs the event.\n\n Args:\n client_id (str): The client_id that this event should be forwarded\n to.\n event_tuple (tuple): The connection_string, event_name and event_object\n that should be forwarded.\n user_data (object): Any user data that was passed to setup_client.\n "
] |
Please provide a description of the function:def setup_client(self, client_id=None, user_data=None, scan=True, broadcast=False):
if client_id is None:
client_id = str(uuid.uuid4())
if client_id in self._clients:
raise ArgumentError("Duplicate client_id: {}".format(client_id))
async def _client_callback(conn_string, _, event_name, event):
event_tuple = (conn_string, event_name, event)
await self._forward_client_event(client_id, event_tuple)
client_monitor = self.adapter.register_monitor([], [], _client_callback)
self._clients[client_id] = dict(user_data=user_data, connections={},
monitor=client_monitor)
self._adjust_global_events(client_id, scan, broadcast)
return client_id | [
"Setup a newly connected client.\n\n ``client_id`` must be unique among all connected clients. If it is\n passed as None, a random client_id will be generated as a string and\n returned.\n\n This method reserves internal resources for tracking what devices this\n client has connected to and installs a monitor into the adapter on\n behalf of the client.\n\n It should be called whenever a new client connects to the device server\n before any other activities by that client are allowed. By default,\n all clients start receiving ``device_seen`` events but if you want\n your client to also receive broadcast events, you can pass broadcast=True.\n\n Args:\n client_id (str): A unique identifier for this client that will be\n used to refer to it in all future interactions. If this is\n None, then a random string will be generated for the client_id.\n user_data (object): An arbitrary object that you would like to store\n with this client and will be passed to your event handler when\n events are forwarded to this client.\n scan (bool): Whether to install a monitor to listen for device_found\n events.\n broadcast (bool): Whether to install a monitor to list for broadcast\n events.\n\n Returns:\n str: The client_id.\n\n If a client id was passed in, it will be the same as what was passed\n in. If no client id was passed in then it will be a random unique\n string.\n "
] |
Please provide a description of the function:async def stop(self):
clients = list(self._clients)
for client in clients:
self._logger.info("Tearing down client %s at server stop()", client)
await self.teardown_client(client) | [
"Stop the server and teardown any remaining clients.\n\n If your subclass overrides this method, make sure to call\n super().stop() to ensure that all devices with open connections from\n thie server are properly closed.\n\n See :meth:`AbstractDeviceServer.stop`.\n "
] |
Please provide a description of the function:async def teardown_client(self, client_id):
client_info = self._client_info(client_id)
self.adapter.remove_monitor(client_info['monitor'])
conns = client_info['connections']
for conn_string, conn_id in conns.items():
try:
self._logger.debug("Disconnecting client %s from conn %s at teardown", client_id, conn_string)
await self.adapter.disconnect(conn_id)
except: #pylint:disable=bare-except; This is a finalization method that should not raise unexpectedly
self._logger.exception("Error disconnecting device during teardown_client: conn_string=%s", conn_string)
del self._clients[client_id] | [
"Release all resources held by a client.\n\n This method must be called and awaited whenever a client is\n disconnected. It ensures that all of the client's resources are\n properly released and any devices they have connected to are\n disconnected cleanly.\n\n Args:\n client_id (str): The client that we should tear down.\n\n Raises:\n ArgumentError: The client_id is unknown.\n "
] |
Please provide a description of the function:async def connect(self, client_id, conn_string):
conn_id = self.adapter.unique_conn_id()
self._client_info(client_id)
await self.adapter.connect(conn_id, conn_string)
self._hook_connect(conn_string, conn_id, client_id) | [
"Connect to a device on behalf of a client.\n\n See :meth:`AbstractDeviceAdapter.connect`.\n\n Args:\n client_id (str): The client we are working for.\n conn_string (str): A connection string that will be\n passed to the underlying device adapter to connect.\n\n Raises:\n DeviceServerError: There is an issue with your client_id.\n DeviceAdapterError: The adapter had an issue connecting.\n "
] |
Please provide a description of the function:async def disconnect(self, client_id, conn_string):
conn_id = self._client_connection(client_id, conn_string)
try:
await self.adapter.disconnect(conn_id)
finally:
self._hook_disconnect(conn_string, client_id) | [
"Disconnect from a device on behalf of a client.\n\n See :meth:`AbstractDeviceAdapter.disconnect`.\n\n Args:\n client_id (str): The client we are working for.\n conn_string (str): A connection string that will be\n passed to the underlying device adapter to connect.\n\n Raises:\n DeviceServerError: There is an issue with your client_id such\n as not being connected to the device.\n DeviceAdapterError: The adapter had an issue disconnecting.\n "
] |
Please provide a description of the function:async def open_interface(self, client_id, conn_string, interface):
conn_id = self._client_connection(client_id, conn_string)
# Hook first so there is no race on getting the first event
self._hook_open_interface(conn_string, interface, client_id)
await self.adapter.open_interface(conn_id, interface) | [
"Open a device interface on behalf of a client.\n\n See :meth:`AbstractDeviceAdapter.open_interface`.\n\n Args:\n client_id (str): The client we are working for.\n conn_string (str): A connection string that will be\n passed to the underlying device adapter.\n interface (str): The name of the interface to open.\n\n Raises:\n DeviceServerError: There is an issue with your client_id such\n as not being connected to the device.\n DeviceAdapterError: The adapter had an issue opening the interface.\n "
] |
Please provide a description of the function:async def close_interface(self, client_id, conn_string, interface):
conn_id = self._client_connection(client_id, conn_string)
await self.adapter.close_interface(conn_id, interface)
self._hook_close_interface(conn_string, interface, client_id) | [
"Close a device interface on behalf of a client.\n\n See :meth:`AbstractDeviceAdapter.close_interface`.\n\n Args:\n client_id (str): The client we are working for.\n conn_string (str): A connection string that will be\n passed to the underlying device adapter.\n interface (str): The name of the interface to close.\n\n Raises:\n DeviceServerError: There is an issue with your client_id such\n as not being connected to the device.\n DeviceAdapterError: The adapter had an issue closing the interface.\n "
] |
Please provide a description of the function:async def send_rpc(self, client_id, conn_string, address, rpc_id, payload, timeout):
conn_id = self._client_connection(client_id, conn_string)
return await self.adapter.send_rpc(conn_id, address, rpc_id, payload, timeout) | [
"Send an RPC on behalf of a client.\n\n See :meth:`AbstractDeviceAdapter.send_rpc`.\n\n Args:\n client_id (str): The client we are working for.\n conn_string (str): A connection string that will be\n passed to the underlying device adapter to connect.\n address (int): The RPC address.\n rpc_id (int): The ID number of the RPC\n payload (bytes): The RPC argument payload\n timeout (float): The RPC's expected timeout to hand to the underlying\n device adapter.\n\n Returns:\n bytes: The RPC response.\n\n Raises:\n DeviceServerError: There is an issue with your client_id such\n as not being connected to the device.\n TileNotFoundError: The destination tile address does not exist\n RPCNotFoundError: The rpc_id does not exist on the given tile\n RPCErrorCode: The RPC was invoked successfully and wishes to fail\n with a non-zero status code.\n RPCInvalidIDError: The rpc_id is too large to fit in 16-bits.\n TileBusSerror: The tile was busy and could not respond to the RPC.\n Exception: The rpc raised an exception during processing.\n DeviceAdapterError: If there is a hardware or communication issue\n invoking the RPC.\n "
] |
Please provide a description of the function:async def send_script(self, client_id, conn_string, script):
conn_id = self._client_connection(client_id, conn_string)
await self.adapter.send_script(conn_id, script) | [
"Send a script to a device on behalf of a client.\n\n See :meth:`AbstractDeviceAdapter.send_script`.\n\n Args:\n client_id (str): The client we are working for.\n conn_string (str): A connection string that will be\n passed to the underlying device adapter.\n script (bytes): The script that we wish to send.\n\n Raises:\n DeviceServerError: There is an issue with your client_id such\n as not being connected to the device.\n DeviceAdapterError: The adapter had a protocol issue sending the script.\n "
] |
Please provide a description of the function:async def debug(self, client_id, conn_string, command, args):
conn_id = self._client_info(client_id, 'connections')[conn_string]
return await self.adapter.debug(conn_id, command, args) | [
"Send a debug command to a device on behalf of a client.\n\n See :meth:`AbstractDeviceAdapter.send_script`.\n\n Args:\n client_id (str): The client we are working for.\n conn_string (str): A connection string that will be\n passed to the underlying device adapter.\n command (str): The name of the debug command to run.\n args (dict): Any command arguments.\n\n Returns:\n object: The response to the debug command.\n\n Raises:\n DeviceServerError: There is an issue with your client_id such\n as not being connected to the device.\n DeviceAdapterError: The adapter had a protocol issue sending the debug\n command.\n "
] |
Please provide a description of the function:def registration_packet(self):
return (self.hw_type, self.api_info[0], self.api_info[1], self.name, self.fw_info[0], self.fw_info[1], self.fw_info[2],
self.exec_info[0], self.exec_info[0], self.exec_info[0], self.slot, self.unique_id) | [
"Serialize this into a tuple suitable for returning from an RPC.\n\n Returns:\n tuple: The serialized values.\n "
] |
Please provide a description of the function:def clear_to_reset(self, config_vars):
super(TileManagerState, self).clear_to_reset(config_vars)
self.registered_tiles = self.registered_tiles[:1]
self.safe_mode = False
self.debug_mode = False | [
"Clear to the state immediately after a reset."
] |
Please provide a description of the function:def insert_tile(self, tile_info):
for i, tile in enumerate(self.registered_tiles):
if tile.slot == tile_info.slot:
self.registered_tiles[i] = tile_info
return
self.registered_tiles.append(tile_info) | [
"Add or replace an entry in the tile cache.\n\n Args:\n tile_info (TileInfo): The newly registered tile.\n "
] |
Please provide a description of the function:def register_tile(self, hw_type, api_major, api_minor, name, fw_major, fw_minor, fw_patch, exec_major, exec_minor, exec_patch, slot, unique_id):
api_info = (api_major, api_minor)
fw_info = (fw_major, fw_minor, fw_patch)
exec_info = (exec_major, exec_minor, exec_patch)
address = 10 + slot
info = TileInfo(hw_type, name, api_info, fw_info, exec_info, slot, unique_id, state=TileState.JUST_REGISTERED, address=address)
self.tile_manager.insert_tile(info)
debug = int(self.tile_manager.debug_mode)
if self.tile_manager.safe_mode:
run_level = RunLevel.SAFE_MODE
info.state = TileState.SAFE_MODE
config_rpcs = []
else:
run_level = RunLevel.START_ON_COMMAND
info.state = TileState.BEING_CONFIGURED
config_rpcs = self.config_database.stream_matching(address, name)
self.tile_manager.queue.put_nowait((info, config_rpcs))
return [address, run_level, debug] | [
"Register a tile with this controller.\n\n This function adds the tile immediately to its internal cache of registered tiles\n and queues RPCs to send all config variables and start tile rpcs back to the tile.\n "
] |
Please provide a description of the function:def describe_tile(self, index):
if index >= len(self.tile_manager.registered_tiles):
tile = TileInfo.CreateInvalid()
else:
tile = self.tile_manager.registered_tiles[index]
return tile.registration_packet() | [
"Get the registration information for the tile at the given index."
] |
Please provide a description of the function:def execute_before(self, sensor_graph, scope_stack):
sensor_graph.add_constant(self.stream, 0)
new_scope = GatedClockScope(sensor_graph, scope_stack, (self.stream, self.trigger))
scope_stack.append(new_scope) | [
"Execute statement before children are executed.\n\n Args:\n sensor_graph (SensorGraph): The sensor graph that we are building or\n modifying\n scope_stack (list(Scope)): A stack of nested scopes that may influence\n how this statement allocates clocks or other stream resources.\n "
] |
Please provide a description of the function:def ParseHeader(cls, script_data):
if len(script_data) < UpdateScript.SCRIPT_HEADER_LENGTH:
raise ArgumentError("Script is too short to contain a script header",
length=len(script_data), header_length=UpdateScript.SCRIPT_HEADER_LENGTH)
embedded_hash, magic, total_length = struct.unpack_from("<16sLL", script_data)
if magic != UpdateScript.SCRIPT_MAGIC:
raise ArgumentError("Script has invalid magic value", expected=UpdateScript.SCRIPT_MAGIC, found=magic)
if total_length != len(script_data):
raise ArgumentError("Script length does not match embedded length",
embedded_length=total_length, length=len(script_data))
hashed_data = script_data[16:]
sha = hashlib.sha256()
sha.update(hashed_data)
hash_value = sha.digest()[:16]
if not compare_digest(embedded_hash, hash_value):
raise ArgumentError("Script has invalid embedded hash", embedded_hash=hexlify(embedded_hash),
calculated_hash=hexlify(hash_value))
return ScriptHeader(UpdateScript.SCRIPT_HEADER_LENGTH, False, True, False) | [
"Parse a script integrity header.\n\n This function makes sure any integrity hashes are correctly parsed and\n returns a ScriptHeader structure containing the information that it\n was able to parse out.\n\n Args:\n script_data (bytearray): The script that we should parse.\n\n Raises:\n ArgumentError: If the script contains malformed data that\n cannot be parsed.\n\n Returns:\n ScriptHeader: The parsed script header information\n "
] |
Please provide a description of the function:def FromBinary(cls, script_data, allow_unknown=True, show_rpcs=False):
curr = 0
records = []
header = cls.ParseHeader(script_data)
curr = header.header_length
cls.logger.debug("Parsed script header: %s, skipping %d bytes", header, curr)
record_count = 0
record_data = bytearray()
partial_match = None
match_offset = 0
while curr < len(script_data):
if len(script_data) - curr < UpdateRecord.HEADER_LENGTH:
raise ArgumentError("Script ended with a partial record", remaining_length=len(script_data) - curr)
# Add another record to our current list of records that we're parsing
total_length, record_type = struct.unpack_from("<LB", script_data[curr:])
cls.logger.debug("Found record of type %d, length %d", record_type, total_length)
record_data += script_data[curr:curr+total_length]
record_count += 1
curr += total_length
try:
if show_rpcs and record_type == SendRPCRecord.MatchType():
cls.logger.debug(" {0}".format(hexlify(record_data)))
record = SendRPCRecord.FromBinary(record_data[UpdateRecord.HEADER_LENGTH:], record_count)
elif show_rpcs and record_type == SendErrorCheckingRPCRecord.MatchType():
cls.logger.debug(" {0}".format(hexlify(record_data)))
record = SendErrorCheckingRPCRecord.FromBinary(record_data[UpdateRecord.HEADER_LENGTH:],
record_count)
else:
record = UpdateRecord.FromBinary(record_data, record_count)
except DeferMatching as defer:
# If we're told to defer matching, continue accumulating record_data
# until we get a complete match. If a partial match is available, keep track of
# that partial match so that we can use it once the record no longer matches.
if defer.partial_match is not None:
partial_match = defer.partial_match
match_offset = curr
continue
except DataError:
if record_count > 1 and partial_match:
record = partial_match
curr = match_offset
elif not allow_unknown:
raise
elif allow_unknown and record_count > 1:
raise ArgumentError("A record matched an initial record subset but failed"
" matching a subsequent addition without leaving a partial_match")
else:
record = UnknownRecord(record_type, record_data[UpdateRecord.HEADER_LENGTH:])
# Reset our record accumulator since we successfully matched one or more records
record_count = 0
record_data = bytearray()
partial_match = None
match_offset = 0
records.append(record)
return UpdateScript(records) | [
"Parse a binary update script.\n\n Args:\n script_data (bytearray): The binary data containing the script.\n allow_unknown (bool): Allow the script to contain unknown records\n so long as they have correct headers to allow us to skip them.\n show_rpcs (bool): Show SendRPCRecord matches for each record rather than\n the more specific operation\n Raises:\n ArgumentError: If the script contains malformed data that cannot\n be parsed.\n DataError: If the script contains unknown records and allow_unknown=False\n\n Returns:\n UpdateScript: The parsed update script.\n "
] |
Please provide a description of the function:def encode(self):
blob = bytearray()
for record in self.records:
blob += record.encode()
header = struct.pack("<LL", self.SCRIPT_MAGIC, len(blob) + self.SCRIPT_HEADER_LENGTH)
blob = header + blob
sha = hashlib.sha256()
sha.update(blob)
hash_value = sha.digest()[:16]
return bytearray(hash_value) + blob | [
"Encode this record into a binary blob.\n\n This binary blob could be parsed via a call to FromBinary().\n\n Returns:\n bytearray: The binary encoded script.\n "
] |
Please provide a description of the function:def create_worker(self, func, interval, *args, **kwargs):
thread = StoppableWorkerThread(func, interval, args, kwargs)
self._workers.append(thread)
if self._started:
thread.start() | [
"Spawn a worker thread running func.\n\n The worker will be automatically be started when start() is called\n and terminated when stop() is called on this object.\n This must be called only from the main thread, not from a worker thread.\n\n create_worker must not be called after stop() has been called. If it\n is called before start() is called, the thread is started when start()\n is called, otherwise it is started immediately.\n\n Args:\n func (callable): Either a function that will be called in a loop\n with a sleep of interval seconds with *args and **kwargs or\n a generator function that will be called once and expected to\n yield periodically so that the worker can check if it should\n be killed.\n interval (float): The time interval between invocations of func.\n This should not be 0 so that the thread doesn't peg the CPU\n and should be short enough so that the worker checks if it\n should be killed in a timely fashion.\n *args: Arguments that are passed to func as positional args\n **kwargs: Arguments that are passed to func as keyword args\n "
] |
Please provide a description of the function:def start_workers(self):
if self._started:
raise InternalError("The method start() was called twice on a BaseRunnable object.")
self._started = True
for worker in self._workers:
worker.start() | [
"Start running this virtual device including any necessary worker threads."
] |
Please provide a description of the function:def stop_workers(self):
self._started = False
for worker in self._workers:
worker.stop() | [
"Synchronously stop any potential workers."
] |
Please provide a description of the function:def stop_workers_async(self):
self._started = False
for worker in self._workers:
worker.signal_stop() | [
"Signal that all workers should stop without waiting."
] |
Please provide a description of the function:def clock(self, interval, basis):
cache_name = self._classify_clock(interval, basis)
cache_data = self.clock_cache.get(cache_name)
if cache_data is None:
parent_stream, trigger = self.parent.clock(interval, basis)
if trigger.use_count is False:
raise SensorGraphSemanticError("Unsupported clock trigger in GatedClockScope", trigger=trigger)
elif interval % trigger.reference != 0:
raise SensorGraphSemanticError("Unsupported trigger ratio in GatedClockScope", trigger=trigger, interval=interval)
ratio = interval // trigger.reference
stream = self.allocator.allocate_stream(DataStream.CounterType)
latch_stream = self.allocator.attach_stream(self.latch_stream)
self.sensor_graph.add_node(u'({} always && {} {}) => {} using copy_latest_a'.format(parent_stream, latch_stream, self.latch_trigger, stream))
self.clock_cache[cache_name] = (stream, ratio)
else:
stream, ratio = cache_data
if interval % ratio != 0:
raise SensorGraphSemanticError("Unsupported trigger ratio in GatedClockScope", ratio=ratio, interval=interval)
count = interval // ratio
clock_stream = self.allocator.attach_stream(stream)
return clock_stream, InputTrigger(u'count', '>=', count) | [
"Return a NodeInput tuple for triggering an event every interval.\n\n We request each distinct type of clock at most once and combine it with our\n latch stream each time it is requested.\n\n Args:\n interval (int): The interval (in seconds) at which this input should\n trigger.\n "
] |
Please provide a description of the function:def _download_ota_script(script_url):
try:
blob = requests.get(script_url, stream=True)
return blob.content
except Exception as e:
iprint("Failed to download OTA script")
iprint(e)
return False | [
"Download the script from the cloud service and store to temporary file location"
] |
Please provide a description of the function:def import_as(module, name):
dir = os.path.split(__file__)[0]
return imp.load_module(name, *imp.find_module(module, [dir])) | [
"\n Imports the specified module (from our local directory) as the\n specified name, returning the loaded module object.\n "
] |
Please provide a description of the function:def rename_module(new, old):
try:
sys.modules[new] = imp.load_module(old, *imp.find_module(old))
return True
except ImportError:
return False | [
"\n Attempts to import the old module and load it under the new name.\n Used for purely cosmetic name changes in Python 3.x.\n "
] |
Please provide a description of the function:def execute_before(self, sensor_graph, scope_stack):
parent = scope_stack[-1]
new_scope = Scope("Configuration Scope", sensor_graph, parent.allocator, parent)
new_scope.add_identifier('current_slot', self.slot)
scope_stack.append(new_scope) | [
"Execute statement before children are executed.\n\n Args:\n sensor_graph (SensorGraph): The sensor graph that we are building or\n modifying\n scope_stack (list(Scope)): A stack of nested scopes that may influence\n how this statement allocates clocks or other stream resources.\n "
] |
Please provide a description of the function:def _parse_conn_string(self, conn_string):
disconnection_required = False
if conn_string is None or 'device' not in conn_string:
if self._default_device_info is not None and self._device_info != self._default_device_info:
disconnection_required = True
self._device_info = self._default_device_info
if conn_string is None or len(conn_string) == 0:
return disconnection_required
if '@' in conn_string:
raise ArgumentError("Configuration files are not yet supported as part of a connection string argument",
conn_string=conn_string)
pairs = conn_string.split(';')
for pair in pairs:
name, _, value = pair.partition('=')
if len(name) == 0 or len(value) == 0:
continue
name = name.strip()
value = value.strip()
if name == 'device':
if value in DEVICE_ALIASES:
device_name = DEVICE_ALIASES[value]
if device_name in KNOWN_DEVICES:
device_info = KNOWN_DEVICES.get(device_name)
if self._device_info != device_info:
self._device_info = device_info
disconnection_required = True
else:
raise ArgumentError("Unknown device name or alias, please select from known_devices",
device_name=value, known_devices=[x for x in DEVICE_ALIASES.keys()])
elif name == 'channel':
if self._mux_func is not None:
if self._channel != int(value):
self._channel = int(value)
disconnection_required = True
else:
print("Warning: multiplexing architecture not selected, channel will not be set")
return disconnection_required | [
"Parse a connection string passed from 'debug -c' or 'connect_direct'\n Returns True if any settings changed in the debug port, which\n would require a jlink disconnection ",
"If device not in conn_string, set to default info"
] |
Please provide a description of the function:def _try_connect(self, connection_string):
if self._parse_conn_string(connection_string):
self._trigger_callback('on_disconnect', self.id, self._connection_id)
self.stop_sync()
if self._mux_func is not None:
self._mux_func(self._channel)
if self._device_info is None:
raise ArgumentError("Missing device name or alias, specify using device=name in port string "
"or -c device=name in connect_direct or debug command",
known_devices=[x for x in DEVICE_ALIASES.keys()])
try:
self.jlink = pylink.JLink()
self.jlink.open(serial_no=self._jlink_serial)
self.jlink.set_tif(pylink.enums.JLinkInterfaces.SWD)
self.jlink.connect(self._device_info.jlink_name)
self.jlink.set_little_endian()
except pylink.errors.JLinkException as exc:
if exc.code == exc.VCC_FAILURE:
raise HardwareError("No target power detected", code=exc.code,
suggestion="Check jlink connection and power wiring")
raise
except:
raise
self._control_thread = JLinkControlThread(self.jlink)
self._control_thread.start()
self.set_config('probe_required', True)
self.set_config('probe_supported', True) | [
"If the connecton string settings are different, try and connect to an attached device"
] |
Please provide a description of the function:def stop_sync(self):
if self._control_thread is not None and self._control_thread.is_alive():
self._control_thread.stop()
self._control_thread.join()
if self.jlink is not None:
self.jlink.close() | [
"Synchronously stop this adapter and release all resources."
] |
Please provide a description of the function:def probe_async(self, callback):
def _on_finished(_name, control_info, exception):
if exception is not None:
callback(self.id, False, str(exception))
return
self._control_info = control_info
try:
info = {
'connection_string': "direct",
'uuid': control_info.uuid,
'signal_strength': 100
}
self._trigger_callback('on_scan', self.id, info, self.ExpirationTime)
finally:
callback(self.id, True, None)
self._control_thread.command(JLinkControlThread.FIND_CONTROL, _on_finished, self._device_info.ram_start, self._device_info.ram_size) | [
"Send advertisements for all connected devices.\n\n Args:\n callback (callable): A callback for when the probe operation has completed.\n callback should have signature callback(adapter_id, success, failure_reason) where:\n success: bool\n failure_reason: None if success is True, otherwise a reason for why we could not probe\n "
] |
Please provide a description of the function:def debug_async(self, conn_id, cmd_name, cmd_args, progress_callback, callback):
known_commands = {
'dump_ram': JLinkControlThread.DUMP_ALL_RAM,
'program_flash': JLinkControlThread.PROGRAM_FLASH,
}
cmd_code = known_commands.get(cmd_name)
if cmd_code is None:
callback(conn_id, self.id, False, None, "Unsupported command: %s" % cmd_name)
def _on_finished(_name, retval, exception):
if exception is not None:
callback(conn_id, self.id, False, None, str(exception))
return
callback(conn_id, self.id, True, retval, None)
self._control_thread.command(cmd_code, _on_finished, self._device_info, self._control_info, cmd_args, progress_callback) | [
"Asynchronously complete a named debug command.\n\n The command name and arguments are passed to the underlying device adapter\n and interpreted there. If the command is long running, progress_callback\n may be used to provide status updates. Callback is called when the command\n has finished.\n\n Args:\n conn_id (int): A unique identifer that will refer to this connection\n cmd_name (string): the name of the debug command we want to invoke\n cmd_args (dict): any arguments that we want to send with this command.\n progress_callback (callable): A function to be called with status on our progress, called as:\n progress_callback(done_count, total_count)\n callback (callable): A callback for when we have finished the debug command, called as:\n callback(connection_id, adapter_id, success, retval, failure_reason)\n 'connection_id': the connection id\n 'adapter_id': this adapter's id\n 'success': a bool indicating whether we received a response to our attempted RPC\n 'failure_reason': a string with the reason for the failure if success == False\n 'retval': A command specific dictionary of return value information\n "
] |
Please provide a description of the function:def connect_async(self, connection_id, connection_string, callback):
self._try_connect(connection_string)
def _on_finished(_name, control_info, exception):
if exception is not None:
callback(connection_id, self.id, False, str(exception))
return
if control_info is not None:
self._control_info = control_info
callback(connection_id, self.id, True, None)
self._connection_id = connection_id
self._control_thread.command(JLinkControlThread.VERIFY_CONTROL, _on_finished, self._device_info, self._control_info) | [
"Connect to a device by its connection_string\n\n This function asynchronously connects to a device by its BLE address\n passed in the connection_string parameter and calls callback when\n finished. Callback is called on either success or failure with the\n signature:\n\n callback(conection_id, adapter_id, success: bool, failure_reason: string or None)\n\n Args:\n connection_string (string): A unique connection string that identifies\n which device to connect to, if many are possible.\n connection_id (int): A unique integer set by the caller for\n referring to this connection once created\n callback (callable): A callback function called when the\n connection has succeeded or failed\n "
] |
Please provide a description of the function:def _open_debug_interface(self, conn_id, callback, connection_string=None):
self._try_connect(connection_string)
callback(conn_id, self.id, True, None) | [
"Enable debug interface for this IOTile device\n\n Args:\n conn_id (int): the unique identifier for the connection\n callback (callback): Callback to be called when this command finishes\n callback(conn_id, adapter_id, success, failure_reason)\n "
] |
Please provide a description of the function:def send_rpc_async(self, conn_id, address, rpc_id, payload, timeout, callback):
def _on_finished(_name, retval, exception):
if exception is not None:
callback(conn_id, self.id, False, str(exception), None, None)
return
callback(conn_id, self.id, True, None, retval['status'], retval['payload'])
# Default to polling for the response every 1 millisecond
# FIXME, add an exponential polling backoff so that we wait 1, 2, 4, 8, etc ms
self._control_thread.command(JLinkControlThread.SEND_RPC, _on_finished, self._device_info, self._control_info, address, rpc_id, payload, 0.001, timeout) | [
"Asynchronously send an RPC to this IOTile device.\n\n Args:\n conn_id (int): A unique identifer that will refer to this connection\n address (int): the addres of the tile that we wish to send the RPC to\n rpc_id (int): the 16-bit id of the RPC we want to call\n payload (bytearray): the payload of the command\n timeout (float): the number of seconds to wait for the RPC to execute\n callback (callable): A callback for when we have finished the RPC. The callback will be called as\"\n callback(connection_id, adapter_id, success, failure_reason, status, payload)\n 'connection_id': the connection id\n 'adapter_id': this adapter's id\n 'success': a bool indicating whether we received a response to our attempted RPC\n 'failure_reason': a string with the reason for the failure if success == False\n 'status': the one byte status code returned for the RPC if success == True else None\n 'payload': a bytearray with the payload returned by RPC if success == True else None\n "
] |
Please provide a description of the function:def send_script_async(self, conn_id, data, progress_callback, callback):
def _on_finished(_name, _retval, exception):
if exception is not None:
callback(conn_id, self.id, False, str(exception))
return
callback(conn_id, self.id, True, None)
self._control_thread.command(JLinkControlThread.SEND_SCRIPT, _on_finished, self._device_info, self._control_info, data, progress_callback) | [
"Asynchronously send a a script to this IOTile device\n\n Args:\n conn_id (int): A unique identifer that will refer to this connection\n data (string): the script to send to the device\n progress_callback (callable): A function to be called with status on our progress, called as:\n progress_callback(done_count, total_count)\n callback (callable): A callback for when we have finished sending the script. The callback will be called as\"\n callback(connection_id, adapter_id, success, failure_reason)\n 'connection_id': the connection id\n 'adapter_id': this adapter's id\n 'success': a bool indicating whether we received a response to our attempted RPC\n 'failure_reason': a string with the reason for the failure if success == False\n "
] |
Please provide a description of the function:def _handle_reset(self):
self._registered.clear()
self._start_received.clear()
self._hosted_app_running.clear()
super(EmulatedPeripheralTile, self)._handle_reset() | [
"Reset this tile.\n\n This process needs to trigger the peripheral tile to reregister itself\n with the controller and get new configuration variables. It also\n needs to clear app_running.\n "
] |
Please provide a description of the function:def dump_state(self):
state = super(EmulatedPeripheralTile, self).dump_state()
state['app_started'] = self._hosted_app_running.is_set()
state['debug_mode'] = self.debug_mode
state['run_level'] = self.run_level
return state | [
"Dump the current state of this emulated tile as a dictionary.\n\n This function just dumps the status of the config variables. It is\n designed to be called in a chained fashion to serialize the complete\n state of a tile subclass.\n\n Returns:\n dict: The current state of the object that could be passed to load_state.\n "
] |
Please provide a description of the function:def restore_state(self, state):
super(EmulatedPeripheralTile, self).restore_state(state)
self.debug_mode = state.get('debug_mode', False)
self.run_level = state.get('run_level', None)
if state.get('app_started', False):
self._hosted_app_running.set() | [
"Restore the current state of this emulated object.\n\n Args:\n state (dict): A previously dumped state produced by dump_state.\n "
] |
Please provide a description of the function:async def start(self):
self._logger.info("Starting all device adapters")
await self.device_manager.start()
self._logger.info("Starting all servers")
for server in self.servers:
await server.start() | [
"Start the gateway."
] |
Please provide a description of the function:async def stop(self):
self._logger.info("Stopping all servers")
for server in self.servers:
await server.stop()
self._logger.info("Stopping all device adapters")
await self.device_manager.stop() | [
"Stop the gateway manager and synchronously wait for it to stop."
] |
Please provide a description of the function:def build_args():
parser = argparse.ArgumentParser(description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('recipe', type=str, help="The recipe file to load and run.")
parser.add_argument('-d', '--define', action="append", default=[], help="Set a free variable in the recipe")
parser.add_argument('-l', '--loop', default=None, help="Loop over a free variable")
parser.add_argument('-i', '--info', action='store_true', help="Lists out all the steps of that recipe, doesn't run the recipe steps")
parser.add_argument('-a', '--archive', help="Archive the passed yaml recipe and do not run it")
parser.add_argument('-c', '--config', default=None, help="A YAML config file with variable definitions")
return parser | [
"Create command line argument parser."
] |
Please provide a description of the function:def load_variables(defines, config_file):
if config_file is not None:
with open(config_file, "r") as conf_file:
variables = yaml.load(conf_file)
else:
variables = {}
for define in defines:
name, equ, value = define.partition('=')
if equ != '=':
print("Invalid variable definition")
print("- expected name=value")
print("- found: '%s'" % define)
sys.exit(1)
variables[name] = value
return variables | [
"Load all variables from cmdline args and/or a config file.\n\n Args:\n defines (list of str): A list of name=value pairs that\n define free variables.\n config_file (str): An optional path to a yaml config\n file that defines a single dict with name=value\n variable definitions.\n "
] |
Please provide a description of the function:def main(argv=None):
if argv is None:
argv = sys.argv[1:]
parser = build_args()
args = parser.parse_args(args=argv)
recipe_name, _ext = os.path.splitext(os.path.basename(args.recipe))
rm = RecipeManager()
rm.add_recipe_folder(os.path.dirname(args.recipe), whitelist=[os.path.basename(args.recipe)])
recipe = rm.get_recipe(recipe_name)
if args.archive is not None:
print("Archiving recipe into %s" % args.archive)
recipe.archive(args.archive)
return 0
if args.info:
print(recipe)
return 0
variables = load_variables(args.define, args.config)
success = 0
start_time = time.time()
if args.loop is None:
try:
recipe.run(variables)
success += 1
except IOTileException as exc:
print("Error running recipe: %s" % str(exc))
return 1
else:
while True:
value = input("Enter value for loop variable %s (return to stop): " % args.loop)
if value == '':
break
local_vars = dict(**variables)
local_vars[args.loop] = value
try:
recipe.run(local_vars)
success += 1
except IOTileException as exc:
print("--> ERROR processing loop variable %s: %s" % (value, str(exc)))
end_time = time.time()
total_time = end_time - start_time
if success == 0:
per_time = 0.0
else:
per_time = total_time / success
print("Performed %d runs in %.1f seconds (%.1f seconds / run)" % (success, total_time, per_time))
return 0 | [
"Main entry point for iotile-ship recipe runner.\n\n This is the iotile-ship command line program.\n\n Args:\n argv (list of str): An optional set of command line\n parameters. If not passed, these are taken from\n sys.argv.\n "
] |
Please provide a description of the function:def MatchQuality(cls, record_data, record_count=1):
if record_count > 1:
return MatchQuality.NoMatch
cmd, _address, _resp_length, _payload = cls._parse_rpc_info(record_data)
if cmd == PersistGraphRecord.RPC_ID:
return MatchQuality.PerfectMatch
return MatchQuality.NoMatch | [
"Check how well this record matches the given binary data.\n\n This function will only be called if the record matches the type code\n given by calling MatchType() and this functon should check how well\n this record matches and return a quality score between 0 and 100, with\n higher quality matches having higher scores. The default value should\n be MatchQuality.GenericMatch which is 50. If this record does not\n match at all, it should return MatchQuality.NoMatch.\n\n Many times, only a single record type will match a given binary record\n but there are times when multiple different logical records produce\n the same type of record in a script, such as set_version and\n set_userkey both producing a call_rpc record with different RPC\n values. The MatchQuality method is used to allow for rich decoding\n of such scripts back to the best possible record that created them.\n\n Args:\n record_data (bytearay): The raw record that we should check for\n a match.\n record_count (int): The number of binary records that are included\n in record_data.\n\n Returns:\n int: The match quality between 0 and 100. You should use the\n constants defined in MatchQuality as much as possible.\n "
] |
Please provide a description of the function:def FromBinary(cls, record_data, record_count=1):
_cmd, address, _resp_length, _payload = cls._parse_rpc_info(record_data)
return PersistGraphRecord(address=address) | [
"Create an UpdateRecord subclass from binary record data.\n\n This should be called with a binary record blob (NOT including the\n record type header) and it will decode it into a PersistGraphRecord.\n\n Args:\n record_data (bytearray): The raw record data that we wish to parse\n into an UpdateRecord subclass NOT including its 8 byte record header.\n record_count (int): The number of records included in record_data.\n\n Raises:\n ArgumentError: If the record_data is malformed and cannot be parsed.\n\n Returns:\n PersistGraphRecord: The decoded reflash tile record.\n "
] |
Please provide a description of the function:def escape_list(mylist, escape_func):
def escape(obj, escape_func=escape_func):
try:
e = obj.escape
except AttributeError:
return obj
else:
return e(escape_func)
return list(map(escape, mylist)) | [
"Escape a list of arguments by running the specified escape_func\n on every object in the list that has an escape() method."
] |
Please provide a description of the function:def subst_dict(target, source):
dict = {}
if target:
def get_tgt_subst_proxy(thing):
try:
subst_proxy = thing.get_subst_proxy()
except AttributeError:
subst_proxy = thing # probably a string, just return it
return subst_proxy
tnl = NLWrapper(target, get_tgt_subst_proxy)
dict['TARGETS'] = Targets_or_Sources(tnl)
dict['TARGET'] = Target_or_Source(tnl)
# This is a total cheat, but hopefully this dictionary goes
# away soon anyway. We just let these expand to $TARGETS
# because that's "good enough" for the use of ToolSurrogates
# (see test/ToolSurrogate.py) to generate documentation.
dict['CHANGED_TARGETS'] = '$TARGETS'
dict['UNCHANGED_TARGETS'] = '$TARGETS'
else:
dict['TARGETS'] = NullNodesList
dict['TARGET'] = NullNodesList
if source:
def get_src_subst_proxy(node):
try:
rfile = node.rfile
except AttributeError:
pass
else:
node = rfile()
try:
return node.get_subst_proxy()
except AttributeError:
return node # probably a String, just return it
snl = NLWrapper(source, get_src_subst_proxy)
dict['SOURCES'] = Targets_or_Sources(snl)
dict['SOURCE'] = Target_or_Source(snl)
# This is a total cheat, but hopefully this dictionary goes
# away soon anyway. We just let these expand to $TARGETS
# because that's "good enough" for the use of ToolSurrogates
# (see test/ToolSurrogate.py) to generate documentation.
dict['CHANGED_SOURCES'] = '$SOURCES'
dict['UNCHANGED_SOURCES'] = '$SOURCES'
else:
dict['SOURCES'] = NullNodesList
dict['SOURCE'] = NullNodesList
return dict | [
"Create a dictionary for substitution of special\n construction variables.\n\n This translates the following special arguments:\n\n target - the target (object or array of objects),\n used to generate the TARGET and TARGETS\n construction variables\n\n source - the source (object or array of objects),\n used to generate the SOURCES and SOURCE\n construction variables\n "
] |
Please provide a description of the function:def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None):
if isinstance(strSubst, str) and strSubst.find('$') < 0:
return strSubst
class StringSubber(object):
def __init__(self, env, mode, conv, gvars):
self.env = env
self.mode = mode
self.conv = conv
self.gvars = gvars
def expand(self, s, lvars):
if is_String(s):
try:
s0, s1 = s[:2]
except (IndexError, ValueError):
return s
if s0 != '$':
return s
if s1 == '$':
# In this case keep the double $'s which we'll later
# swap for a single dollar sign as we need to retain
# this information to properly avoid matching "$("" when
# the actual text was "$$("" (or "$)"" when "$$)"" )
return '$$'
elif s1 in '()':
return s
else:
key = s[1:]
if key[0] == '{' or '.' in key:
if key[0] == '{':
key = key[1:-1]
try:
s = eval(key, self.gvars, lvars)
except KeyboardInterrupt:
raise
except Exception as e:
if e.__class__ in AllowableExceptions:
return ''
raise_exception(e, lvars['TARGETS'], s)
else:
if key in lvars:
s = lvars[key]
elif key in self.gvars:
s = self.gvars[key]
elif not NameError in AllowableExceptions:
raise_exception(NameError(key), lvars['TARGETS'], s)
else:
return ''
# Before re-expanding the result, handle
# recursive expansion by copying the local
# variable dictionary and overwriting a null
# string for the value of the variable name
# we just expanded.
#
# This could potentially be optimized by only
# copying lvars when s contains more expansions,
# but lvars is usually supposed to be pretty
# small, and deeply nested variable expansions
# are probably more the exception than the norm,
# so it should be tolerable for now.
lv = lvars.copy()
var = key.split('.')[0]
lv[var] = ''
return self.substitute(s, lv)
elif is_Sequence(s):
def func(l, conv=self.conv, substitute=self.substitute, lvars=lvars):
return conv(substitute(l, lvars))
return list(map(func, s))
elif callable(s):
try:
s = s(target=lvars['TARGETS'],
source=lvars['SOURCES'],
env=self.env,
for_signature=(self.mode != SUBST_CMD))
except TypeError:
# This probably indicates that it's a callable
# object that doesn't match our calling arguments
# (like an Action).
if self.mode == SUBST_RAW:
return s
s = self.conv(s)
return self.substitute(s, lvars)
elif s is None:
return ''
else:
return s
def substitute(self, args, lvars):
if is_String(args) and not isinstance(args, CmdStringHolder):
args = str(args) # In case it's a UserString.
try:
def sub_match(match):
return self.conv(self.expand(match.group(1), lvars))
result = _dollar_exps.sub(sub_match, args)
except TypeError:
# If the internal conversion routine doesn't return
# strings (it could be overridden to return Nodes, for
# example), then the 1.5.2 re module will throw this
# exception. Back off to a slower, general-purpose
# algorithm that works for all data types.
args = _separate_args.findall(args)
result = []
for a in args:
result.append(self.conv(self.expand(a, lvars)))
if len(result) == 1:
result = result[0]
else:
result = ''.join(map(str, result))
return result
else:
return self.expand(args, lvars)
if conv is None:
conv = _strconv[mode]
# Doing this every time is a bit of a waste, since the Executor
# has typically already populated the OverrideEnvironment with
# $TARGET/$SOURCE variables. We're keeping this (for now), though,
# because it supports existing behavior that allows us to call
# an Action directly with an arbitrary target+source pair, which
# we use in Tool/tex.py to handle calling $BIBTEX when necessary.
# If we dropped that behavior (or found another way to cover it),
# we could get rid of this call completely and just rely on the
# Executor setting the variables.
if 'TARGET' not in lvars:
d = subst_dict(target, source)
if d:
lvars = lvars.copy()
lvars.update(d)
# We're (most likely) going to eval() things. If Python doesn't
# find a __builtins__ value in the global dictionary used for eval(),
# it copies the current global values for you. Avoid this by
# setting it explicitly and then deleting, so we don't pollute the
# construction environment Dictionary(ies) that are typically used
# for expansion.
gvars['__builtins__'] = __builtins__
ss = StringSubber(env, mode, conv, gvars)
result = ss.substitute(strSubst, lvars)
try:
del gvars['__builtins__']
except KeyError:
pass
res = result
if is_String(result):
# Remove $(-$) pairs and any stuff in between,
# if that's appropriate.
remove = _regex_remove[mode]
if remove:
if mode == SUBST_SIG:
result = _list_remove[mode](remove.split(result))
if result is None:
raise SCons.Errors.UserError("Unbalanced $(/$) in: " + res)
result = ' '.join(result)
else:
result = remove.sub('', result)
if mode != SUBST_RAW:
# Compress strings of white space characters into
# a single space.
result = _space_sep.sub(' ', result).strip()
# Now replace escaped $'s currently "$$"
# This is needed because we now retain $$ instead of
# replacing them during substition to avoid
# improperly trying to escape "$$(" as being "$("
result = result.replace('$$','$')
elif is_Sequence(result):
remove = _list_remove[mode]
if remove:
result = remove(result)
if result is None:
raise SCons.Errors.UserError("Unbalanced $(/$) in: " + str(res))
return result | [
"Expand a string or list containing construction variable\n substitutions.\n\n This is the work-horse function for substitutions in file names\n and the like. The companion scons_subst_list() function (below)\n handles separating command lines into lists of arguments, so see\n that function if that's what you're looking for.\n ",
"A class to construct the results of a scons_subst() call.\n\n This binds a specific construction environment, mode, target and\n source with two methods (substitute() and expand()) that handle\n the expansion.\n ",
"Expand a single \"token\" as necessary, returning an\n appropriate string containing the expansion.\n\n This handles expanding different types of things (strings,\n lists, callables) appropriately. It calls the wrapper\n substitute() method to re-expand things as necessary, so that\n the results of expansions of side-by-side strings still get\n re-evaluated separately, not smushed together.\n ",
"Substitute expansions in an argument or list of arguments.\n\n This serves as a wrapper for splitting up a string into\n separate tokens.\n "
] |
Please provide a description of the function:def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None):
class ListSubber(collections.UserList):
def __init__(self, env, mode, conv, gvars):
collections.UserList.__init__(self, [])
self.env = env
self.mode = mode
self.conv = conv
self.gvars = gvars
if self.mode == SUBST_RAW:
self.add_strip = lambda x: self.append(x)
else:
self.add_strip = lambda x: None
self.in_strip = None
self.next_line()
def expand(self, s, lvars, within_list):
if is_String(s):
try:
s0, s1 = s[:2]
except (IndexError, ValueError):
self.append(s)
return
if s0 != '$':
self.append(s)
return
if s1 == '$':
self.append('$')
elif s1 == '(':
self.open_strip('$(')
elif s1 == ')':
self.close_strip('$)')
else:
key = s[1:]
if key[0] == '{' or key.find('.') >= 0:
if key[0] == '{':
key = key[1:-1]
try:
s = eval(key, self.gvars, lvars)
except KeyboardInterrupt:
raise
except Exception as e:
if e.__class__ in AllowableExceptions:
return
raise_exception(e, lvars['TARGETS'], s)
else:
if key in lvars:
s = lvars[key]
elif key in self.gvars:
s = self.gvars[key]
elif not NameError in AllowableExceptions:
raise_exception(NameError(), lvars['TARGETS'], s)
else:
return
# Before re-expanding the result, handle
# recursive expansion by copying the local
# variable dictionary and overwriting a null
# string for the value of the variable name
# we just expanded.
lv = lvars.copy()
var = key.split('.')[0]
lv[var] = ''
self.substitute(s, lv, 0)
self.this_word()
elif is_Sequence(s):
for a in s:
self.substitute(a, lvars, 1)
self.next_word()
elif callable(s):
try:
s = s(target=lvars['TARGETS'],
source=lvars['SOURCES'],
env=self.env,
for_signature=(self.mode != SUBST_CMD))
except TypeError:
# This probably indicates that it's a callable
# object that doesn't match our calling arguments
# (like an Action).
if self.mode == SUBST_RAW:
self.append(s)
return
s = self.conv(s)
self.substitute(s, lvars, within_list)
elif s is None:
self.this_word()
else:
self.append(s)
def substitute(self, args, lvars, within_list):
if is_String(args) and not isinstance(args, CmdStringHolder):
args = str(args) # In case it's a UserString.
args = _separate_args.findall(args)
for a in args:
if a[0] in ' \t\n\r\f\v':
if '\n' in a:
self.next_line()
elif within_list:
self.append(a)
else:
self.next_word()
else:
self.expand(a, lvars, within_list)
else:
self.expand(args, lvars, within_list)
def next_line(self):
collections.UserList.append(self, [])
self.next_word()
def this_word(self):
self.append = self.add_to_current_word
def next_word(self):
self.append = self.add_new_word
def add_to_current_word(self, x):
if not self.in_strip or self.mode != SUBST_SIG:
try:
current_word = self[-1][-1]
except IndexError:
self.add_new_word(x)
else:
# All right, this is a hack and it should probably
# be refactored out of existence in the future.
# The issue is that we want to smoosh words together
# and make one file name that gets escaped if
# we're expanding something like foo$EXTENSION,
# but we don't want to smoosh them together if
# it's something like >$TARGET, because then we'll
# treat the '>' like it's part of the file name.
# So for now, just hard-code looking for the special
# command-line redirection characters...
try:
last_char = str(current_word)[-1]
except IndexError:
last_char = '\0'
if last_char in '<>|':
self.add_new_word(x)
else:
y = current_word + x
# We used to treat a word appended to a literal
# as a literal itself, but this caused problems
# with interpreting quotes around space-separated
# targets on command lines. Removing this makes
# none of the "substantive" end-to-end tests fail,
# so we'll take this out but leave it commented
# for now in case there's a problem not covered
# by the test cases and we need to resurrect this.
#literal1 = self.literal(self[-1][-1])
#literal2 = self.literal(x)
y = self.conv(y)
if is_String(y):
#y = CmdStringHolder(y, literal1 or literal2)
y = CmdStringHolder(y, None)
self[-1][-1] = y
def add_new_word(self, x):
if not self.in_strip or self.mode != SUBST_SIG:
literal = self.literal(x)
x = self.conv(x)
if is_String(x):
x = CmdStringHolder(x, literal)
self[-1].append(x)
self.append = self.add_to_current_word
def literal(self, x):
try:
l = x.is_literal
except AttributeError:
return None
else:
return l()
def open_strip(self, x):
self.add_strip(x)
self.in_strip = 1
def close_strip(self, x):
self.add_strip(x)
self.in_strip = None
if conv is None:
conv = _strconv[mode]
# Doing this every time is a bit of a waste, since the Executor
# has typically already populated the OverrideEnvironment with
# $TARGET/$SOURCE variables. We're keeping this (for now), though,
# because it supports existing behavior that allows us to call
# an Action directly with an arbitrary target+source pair, which
# we use in Tool/tex.py to handle calling $BIBTEX when necessary.
# If we dropped that behavior (or found another way to cover it),
# we could get rid of this call completely and just rely on the
# Executor setting the variables.
if 'TARGET' not in lvars:
d = subst_dict(target, source)
if d:
lvars = lvars.copy()
lvars.update(d)
# We're (most likely) going to eval() things. If Python doesn't
# find a __builtins__ value in the global dictionary used for eval(),
# it copies the current global values for you. Avoid this by
# setting it explicitly and then deleting, so we don't pollute the
# construction environment Dictionary(ies) that are typically used
# for expansion.
gvars['__builtins__'] = __builtins__
ls = ListSubber(env, mode, conv, gvars)
ls.substitute(strSubst, lvars, 0)
try:
del gvars['__builtins__']
except KeyError:
pass
return ls.data | [
"Substitute construction variables in a string (or list or other\n object) and separate the arguments into a command list.\n\n The companion scons_subst() function (above) handles basic\n substitutions within strings, so see that function instead\n if that's what you're looking for.\n ",
"A class to construct the results of a scons_subst_list() call.\n\n Like StringSubber, this class binds a specific construction\n environment, mode, target and source with two methods\n (substitute() and expand()) that handle the expansion.\n\n In addition, however, this class is used to track the state of\n the result(s) we're gathering so we can do the appropriate thing\n whenever we have to append another word to the result--start a new\n line, start a new word, append to the current word, etc. We do\n this by setting the \"append\" attribute to the right method so\n that our wrapper methods only need ever call ListSubber.append(),\n and the rest of the object takes care of doing the right thing\n internally.\n ",
"Expand a single \"token\" as necessary, appending the\n expansion to the current result.\n\n This handles expanding different types of things (strings,\n lists, callables) appropriately. It calls the wrapper\n substitute() method to re-expand things as necessary, so that\n the results of expansions of side-by-side strings still get\n re-evaluated separately, not smushed together.\n ",
"Substitute expansions in an argument or list of arguments.\n\n This serves as a wrapper for splitting up a string into\n separate tokens.\n ",
"Arrange for the next word to start a new line. This\n is like starting a new word, except that we have to append\n another line to the result.",
"Arrange for the next word to append to the end of the\n current last word in the result.",
"Arrange for the next word to start a new word.",
"Append the string x to the end of the current last word\n in the result. If that is not possible, then just add\n it as a new word. Make sure the entire concatenated string\n inherits the object attributes of x (in particular, the\n escape function) by wrapping it as CmdStringHolder.",
"Handle the \"open strip\" $( token.",
"Handle the \"close strip\" $) token."
] |
Please provide a description of the function:def scons_subst_once(strSubst, env, key):
if isinstance(strSubst, str) and strSubst.find('$') < 0:
return strSubst
matchlist = ['$' + key, '${' + key + '}']
val = env.get(key, '')
def sub_match(match, val=val, matchlist=matchlist):
a = match.group(1)
if a in matchlist:
a = val
if is_Sequence(a):
return ' '.join(map(str, a))
else:
return str(a)
if is_Sequence(strSubst):
result = []
for arg in strSubst:
if is_String(arg):
if arg in matchlist:
arg = val
if is_Sequence(arg):
result.extend(arg)
else:
result.append(arg)
else:
result.append(_dollar_exps.sub(sub_match, arg))
else:
result.append(arg)
return result
elif is_String(strSubst):
return _dollar_exps.sub(sub_match, strSubst)
else:
return strSubst | [
"Perform single (non-recursive) substitution of a single\n construction variable keyword.\n\n This is used when setting a variable when copying or overriding values\n in an Environment. We want to capture (expand) the old value before\n we override it, so people can do things like:\n\n env2 = env.Clone(CCFLAGS = '$CCFLAGS -g')\n\n We do this with some straightforward, brute-force code here...\n "
] |
Please provide a description of the function:def escape(self, escape_func, quote_func=quote_spaces):
if self.is_literal():
return escape_func(self.data)
elif ' ' in self.data or '\t' in self.data:
return quote_func(self.data)
else:
return self.data | [
"Escape the string with the supplied function. The\n function is expected to take an arbitrary string, then\n return it with all special characters escaped and ready\n for passing to the command interpreter.\n\n After calling this function, the next call to str() will\n return the escaped string.\n "
] |
Please provide a description of the function:def indent_list(inlist, level):
indent = ' '*level
joinstr = '\n' + indent
retval = joinstr.join(inlist)
return indent + retval | [
"Join a list of strings, one per line with 'level' spaces before each one"
] |
Please provide a description of the function:def process_warn_strings(arguments):
def _capitalize(s):
if s[:5] == "scons":
return "SCons" + s[5:]
else:
return s.capitalize()
for arg in arguments:
elems = arg.lower().split('-')
enable = 1
if elems[0] == 'no':
enable = 0
del elems[0]
if len(elems) == 1 and elems[0] == 'all':
class_name = "Warning"
else:
class_name = ''.join(map(_capitalize, elems)) + "Warning"
try:
clazz = globals()[class_name]
except KeyError:
sys.stderr.write("No warning type: '%s'\n" % arg)
else:
if enable:
enableWarningClass(clazz)
elif issubclass(clazz, MandatoryDeprecatedWarning):
fmt = "Can not disable mandataory warning: '%s'\n"
sys.stderr.write(fmt % arg)
else:
suppressWarningClass(clazz) | [
"Process string specifications of enabling/disabling warnings,\n as passed to the --warn option or the SetOption('warn') function.\n \n\n An argument to this option should be of the form <warning-class>\n or no-<warning-class>. The warning class is munged in order\n to get an actual class name from the classes above, which we\n need to pass to the {enable,disable}WarningClass() functions.\n The supplied <warning-class> is split on hyphens, each element\n is capitalized, then smushed back together. Then the string\n \"Warning\" is appended to get the class name.\n\n For example, 'deprecated' will enable the DeprecatedWarning\n class. 'no-dependency' will disable the DependencyWarning class.\n\n As a special case, --warn=all and --warn=no-all will enable or\n disable (respectively) the base Warning class of all warnings.\n\n "
] |
Please provide a description of the function:def generate(env):
fortran.generate(env)
for dialect in ['F77', 'F90', 'FORTRAN', 'F95', 'F03', 'F08']:
env['%s' % dialect] = 'gfortran'
env['SH%s' % dialect] = '$%s' % dialect
if env['PLATFORM'] in ['cygwin', 'win32']:
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect)
else:
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect)
env['INC%sPREFIX' % dialect] = "-I"
env['INC%sSUFFIX' % dialect] = "" | [
"Add Builders and construction variables for gfortran to an\n Environment."
] |
Please provide a description of the function:def _extract_device_uuid(cls, slug):
if len(slug) != 22:
raise ArgumentError("Invalid device slug", slug=slug)
hexdigits = slug[3:]
hexdigits = hexdigits.replace('-', '')
try:
rawbytes = binascii.unhexlify(hexdigits)
words = struct.unpack(">LL", rawbytes)
return (words[0] << 32) | (words[1])
except ValueError as exc:
raise ArgumentError("Could not convert device slug to hex integer", slug=slug, error=str(exc)) | [
"Turn a string slug into a UUID\n "
] |
Please provide a description of the function:def start(self):
self._prepare()
self._disconnector = tornado.ioloop.PeriodicCallback(self._disconnect_hanging_devices, 1000, self._loop)
self._disconnector.start() | [
"Start this gateway agent."
] |
Please provide a description of the function:def stop(self):
if self._disconnector:
self._disconnector.stop()
self.client.disconnect() | [
"Stop this gateway agent."
] |
Please provide a description of the function:def _validate_connection(self, action, uuid, key):
if uuid not in self._connections:
self._logger.warn("Received message for device with no connection 0x%X", uuid)
return None
data = self._connections[uuid]
if key != data['key']:
self._logger.warn("Received message for device with incorrect key, uuid=0x%X", uuid)
return None
return data['connection_id'] | [
"Validate that a message received for a device has the right key\n\n If this action is valid the corresponding internal connection id to\n be used with the DeviceManager is returned, otherwise None is returned\n and an invalid message status is published.\n\n Args:\n slug (string): The slug for the device we're trying to connect to\n uuid (int): The uuid corresponding to the slug\n key (string): The key passed in when this device was first connected\n to\n\n Returns:\n int: if the action is allowed, otherwise None\n "
] |
Please provide a description of the function:def _publish_status(self, slug, data):
status_topic = self.topics.prefix + 'devices/{}/data/status'.format(slug)
self._logger.debug("Publishing status message: (topic=%s) (message=%s)", status_topic, str(data))
self.client.publish(status_topic, data) | [
"Publish a status message for a device\n\n Args:\n slug (string): The device slug that we are publishing on behalf of\n data (dict): The status message data to be sent back to the caller\n "
] |
Please provide a description of the function:def _publish_response(self, slug, message):
resp_topic = self.topics.gateway_topic(slug, 'data/response')
self._logger.debug("Publishing response message: (topic=%s) (message=%s)", resp_topic, message)
self.client.publish(resp_topic, message) | [
"Publish a response message for a device\n\n Args:\n slug (string): The device slug that we are publishing on behalf of\n message (dict): A set of key value pairs that are used to create the message\n that is sent.\n "
] |
Please provide a description of the function:def _on_action(self, sequence, topic, message):
try:
slug = None
parts = topic.split('/')
slug = parts[-3]
uuid = self._extract_device_uuid(slug)
except Exception as exc:
self._logger.warn("Error parsing slug in action handler (slug=%s, topic=%s)", slug, topic)
return
if messages.DisconnectCommand.matches(message):
self._logger.debug("Received disconnect command for device 0x%X", uuid)
key = message['key']
client = message['client']
self._loop.add_callback(self._disconnect_from_device, uuid, key, client)
elif messages.OpenInterfaceCommand.matches(message) or messages.CloseInterfaceCommand.matches(message):
self._logger.debug("Received %s command for device 0x%X", message['operation'], uuid)
key = message['key']
client = message['client']
oper = message['operation']
if oper == 'open_interface':
self._loop.add_callback(self._open_interface, client, uuid, message['interface'], key)
else:
self._loop.add_callback(self._close_interface, client, uuid, message['interface'], key)
elif messages.RPCCommand.matches(message):
rpc_msg = messages.RPCCommand.verify(message)
client = rpc_msg['client']
address = rpc_msg['address']
rpc = rpc_msg['rpc_id']
payload = rpc_msg['payload']
key = rpc_msg['key']
timeout = rpc_msg['timeout']
self._loop.add_callback(self._send_rpc, client, uuid, address, rpc, payload, timeout, key)
elif messages.ScriptCommand.matches(message):
script_msg = messages.ScriptCommand.verify(message)
key = script_msg['key']
client = script_msg['client']
script = script_msg['script']
self._loop.add_callback(self._send_script, client, uuid, script, key, (script_msg['fragment_index'], script_msg['fragment_count']))
else:
self._logger.error("Unsupported message received (topic=%s) (message=%s)", topic, str(message)) | [
"Process a command action that we received on behalf of a device.\n\n Args:\n sequence (int): The sequence number of the packet received\n topic (string): The topic this message was received on\n message (dict): The message itself\n "
] |
Please provide a description of the function:def _on_connect(self, sequence, topic, message):
try:
slug = None
parts = topic.split('/')
slug = parts[-3]
uuid = self._extract_device_uuid(slug)
except Exception:
self._logger.exception("Error parsing slug from connection request (slug=%s, topic=%s)", slug, topic)
return
if messages.ConnectCommand.matches(message):
key = message['key']
client = message['client']
self._loop.add_callback(self._connect_to_device, uuid, key, client)
else:
self._logger.warn("Unknown message received on connect topic=%s, message=%s", topic, message) | [
"Process a request to connect to an IOTile device\n\n A connection message triggers an attempt to connect to a device,\n any error checking is done by the DeviceManager that is actually\n managing the devices.\n\n A disconnection message is checked to make sure its key matches\n what we except for this device and is either discarded or\n forwarded on to the DeviceManager.\n Args:\n sequence (int): The sequence number of the packet received\n topic (string): The topic this message was received on\n message_type (string): The type of the packet received\n message (dict): The message itself\n "
] |
Please provide a description of the function:def _send_rpc(self, client, uuid, address, rpc, payload, timeout, key):
conn_id = self._validate_connection('send_rpc', uuid, key)
if conn_id is None:
return
conn_data = self._connections[uuid]
conn_data['last_touch'] = monotonic()
slug = self._build_device_slug(uuid)
try:
resp = yield self._manager.send_rpc(conn_id, address, rpc >> 8, rpc & 0xFF, bytes(payload), timeout)
except Exception as exc:
self._logger.error("Error in manager send rpc: %s" % str(exc))
resp = {'success': False, 'reason': "Internal error: %s" % str(exc)}
payload = {'client': client, 'type': 'response', 'operation': 'rpc'}
payload['success'] = resp['success']
if resp['success'] is False:
payload['failure_reason'] = resp['reason']
else:
payload['status'] = resp['status']
payload['payload'] = binascii.hexlify(resp['payload'])
self._publish_response(slug, payload) | [
"Send an RPC to a connected device\n\n Args:\n client (string): The client that sent the rpc request\n uuid (int): The id of the device we're opening the interface on\n address (int): The address of the tile that we want to send the RPC to\n rpc (int): The id of the rpc that we want to send.\n payload (bytearray): The payload of arguments that we want to send\n timeout (float): The number of seconds to wait for the response\n key (string): The key to authenticate the caller\n "
] |
Please provide a description of the function:def _send_script(self, client, uuid, chunk, key, chunk_status):
conn_id = self._validate_connection('send_script', uuid, key)
if conn_id is None:
return
conn_data = self._connections[uuid]
conn_data['last_touch'] = monotonic()
slug = self._build_device_slug(uuid)
# Check and see if we have the entire script or if we need to accumulate it
index, count = chunk_status
if index == 0:
conn_data['script'] = bytes()
conn_data['script'] += chunk
# If there is more than one chunk and we aren't on the last one, wait until we receive them
# all before sending them on to the device as a unit
if index != count - 1:
return
# Initialize our progress throttling system in case we need to throttle progress reports
conn_data['last_progress'] = None
try:
resp = yield self._manager.send_script(conn_id, conn_data['script'], lambda x, y: self._notify_progress_async(uuid, client, x, y))
yield None # Make sure we give time for any progress notifications that may have been queued to flush out
conn_data['script'] = bytes()
except Exception as exc:
self._logger.exception("Error in manager send_script")
resp = {'success': False, 'reason': "Internal error: %s" % str(exc)}
payload = {'client': client, 'type': 'response', 'operation': 'send_script', 'success': resp['success']}
if resp['success'] is False:
payload['failure_reason'] = resp['reason']
self._publish_response(slug, payload) | [
"Send a script to the connected device.\n\n Args:\n client (string): The client that sent the rpc request\n uuid (int): The id of the device we're opening the interface on\n chunk (bytes): The binary script to send to the device\n key (string): The key to authenticate the caller\n last_chunk (tuple): the chunk index and count of chunks of this script\n so that we know to either accumulate it or send it on to the device\n immediately.\n "
] |
Please provide a description of the function:def _notify_progress_sync(self, uuid, client, done_count, total_count):
# If the connection was closed, don't notify anything
conn_data = self._connections.get(uuid, None)
if conn_data is None:
return
last_progress = conn_data['last_progress']
should_drop = False
# We drop status updates that come faster than our configured update interval
# unless those updates are the final update, which we send on. The first
# update is always also sent since there would not have been an update before
# that.
now = monotonic()
if last_progress is not None and (now - last_progress) < self.throttle_progress:
should_drop = True
if should_drop and (done_count != total_count):
return
conn_data['last_progress'] = now
slug = self._build_device_slug(uuid)
status_msg = {'type': 'notification', 'operation': 'send_script', 'client': client, 'done_count': done_count, 'total_count': total_count}
self._publish_response(slug, status_msg) | [
"Notify progress reporting on the status of a script download.\n\n This function must be called synchronously inside of the event loop.\n\n Args:\n uuid (int): The id of the device that we are talking to\n client (string): The client identifier\n done_count (int): The number of items that have been finished\n total_count (int): The total number of items\n "
] |
Please provide a description of the function:def _notify_progress_async(self, uuid, client, done_count, total_count):
self._loop.add_callback(self._notify_progress_sync, uuid, client, done_count, total_count) | [
"Notify progress reporting on the status of a script download.\n\n This function is called asynchronously to the event loop so it cannot\n do any processing on its own. It's job is to schedule the sync version\n of itself in the event loop.\n\n Args:\n uuid (int): The id of the device that we are talking to\n client (string): The client identifier\n done_count (int): The number of items that have been finished\n total_count (int): The total number of items\n "
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.