Code
stringlengths 103
85.9k
| Summary
sequencelengths 0
94
|
---|---|
Please provide a description of the function:def get_all_children(self):
result = SCons.Util.UniqueList([])
for target in self.get_all_targets():
result.extend(target.children())
return result | [
"Returns all unique children (dependencies) for all batches\n of this Executor.\n\n The Taskmaster can recognize when it's already evaluated a\n Node, so we don't have to make this list unique for its intended\n canonical use case, but we expect there to be a lot of redundancy\n (long lists of batched .cc files #including the same .h files\n over and over), so removing the duplicates once up front should\n save the Taskmaster a lot of work.\n "
] |
Please provide a description of the function:def get_all_prerequisites(self):
result = SCons.Util.UniqueList([])
for target in self.get_all_targets():
if target.prerequisites is not None:
result.extend(target.prerequisites)
return result | [
"Returns all unique (order-only) prerequisites for all batches\n of this Executor.\n "
] |
Please provide a description of the function:def get_action_side_effects(self):
result = SCons.Util.UniqueList([])
for target in self.get_action_targets():
result.extend(target.side_effects)
return result | [
"Returns all side effects for all batches of this\n Executor used by the underlying Action.\n "
] |
Please provide a description of the function:def get_build_env(self):
try:
return self._memo['get_build_env']
except KeyError:
pass
# Create the build environment instance with appropriate
# overrides. These get evaluated against the current
# environment's construction variables so that users can
# add to existing values by referencing the variable in
# the expansion.
overrides = {}
for odict in self.overridelist:
overrides.update(odict)
import SCons.Defaults
env = self.env or SCons.Defaults.DefaultEnvironment()
build_env = env.Override(overrides)
self._memo['get_build_env'] = build_env
return build_env | [
"Fetch or create the appropriate build Environment\n for this Executor.\n "
] |
Please provide a description of the function:def get_build_scanner_path(self, scanner):
env = self.get_build_env()
try:
cwd = self.batches[0].targets[0].cwd
except (IndexError, AttributeError):
cwd = None
return scanner.path(env, cwd,
self.get_all_targets(),
self.get_all_sources()) | [
"Fetch the scanner path for this executor's targets and sources.\n "
] |
Please provide a description of the function:def add_sources(self, sources):
# TODO(batch): extend to multiple batches
assert (len(self.batches) == 1)
# TODO(batch): remove duplicates?
sources = [x for x in sources if x not in self.batches[0].sources]
self.batches[0].sources.extend(sources) | [
"Add source files to this Executor's list. This is necessary\n for \"multi\" Builders that can be called repeatedly to build up\n a source file list for a given target."
] |
Please provide a description of the function:def add_batch(self, targets, sources):
self.batches.append(Batch(targets, sources)) | [
"Add pair of associated target and source to this Executor's list.\n This is necessary for \"batch\" Builders that can be called repeatedly\n to build up a list of matching target and source files that will be\n used in order to update multiple target files at once from multiple\n corresponding source files, for tools like MSVC that support it."
] |
Please provide a description of the function:def prepare(self):
for s in self.get_all_sources():
if s.missing():
msg = "Source `%s' not found, needed by target `%s'."
raise SCons.Errors.StopError(msg % (s, self.batches[0].targets[0])) | [
"\n Preparatory checks for whether this Executor can go ahead\n and (try to) build its targets.\n "
] |
Please provide a description of the function:def get_contents(self):
try:
return self._memo['get_contents']
except KeyError:
pass
env = self.get_build_env()
action_list = self.get_action_list()
all_targets = self.get_all_targets()
all_sources = self.get_all_sources()
result = bytearray("",'utf-8').join([action.get_contents(all_targets,
all_sources,
env)
for action in action_list])
self._memo['get_contents'] = result
return result | [
"Fetch the signature contents. This is the main reason this\n class exists, so we can compute this once and cache it regardless\n of how many target or source Nodes there are.\n "
] |
Please provide a description of the function:def scan(self, scanner, node_list):
env = self.get_build_env()
path = self.get_build_scanner_path
kw = self.get_kw()
# TODO(batch): scan by batches)
deps = []
for node in node_list:
node.disambiguate()
deps.extend(node.get_implicit_deps(env, scanner, path, kw))
deps.extend(self.get_implicit_deps())
for tgt in self.get_all_targets():
tgt.add_to_implicit(deps) | [
"Scan a list of this Executor's files (targets or sources) for\n implicit dependencies and update all of the targets with them.\n This essentially short-circuits an N*M scan of the sources for\n each individual target, which is a hell of a lot more efficient.\n "
] |
Please provide a description of the function:def get_implicit_deps(self):
result = []
build_env = self.get_build_env()
for act in self.get_action_list():
deps = act.get_implicit_deps(self.get_all_targets(),
self.get_all_sources(),
build_env)
result.extend(deps)
return result | [
"Return the executor's implicit dependencies, i.e. the nodes of\n the commands to be executed."
] |
Please provide a description of the function:def _morph(self):
batches = self.batches
self.__class__ = Executor
self.__init__([])
self.batches = batches | [
"Morph this Null executor to a real Executor object."
] |
Please provide a description of the function:def encode(self):
contents = self.encode_contents()
record_type = self.MatchType()
header = struct.pack("<LB3x", len(contents) + UpdateRecord.HEADER_LENGTH, record_type)
return bytearray(header) + contents | [
"Encode this record into binary, suitable for embedded into an update script.\n\n This function just adds the required record header and delegates all\n work to the subclass implementation of encode_contents().\n\n Returns:\n bytearary: The binary version of the record that could be parsed via\n a call to UpdateRecord.FromBinary()\n "
] |
Please provide a description of the function:def LoadPlugins(cls):
if cls.PLUGINS_LOADED:
return
reg = ComponentRegistry()
for _, record in reg.load_extensions('iotile.update_record'):
cls.RegisterRecordType(record)
cls.PLUGINS_LOADED = True | [
"Load all registered iotile.update_record plugins."
] |
Please provide a description of the function:def RegisterRecordType(cls, record_class):
record_type = record_class.MatchType()
if record_type not in UpdateRecord.KNOWN_CLASSES:
UpdateRecord.KNOWN_CLASSES[record_type] = []
UpdateRecord.KNOWN_CLASSES[record_type].append(record_class) | [
"Register a known record type in KNOWN_CLASSES.\n\n Args:\n record_class (UpdateRecord): An update record subclass.\n "
] |
Please provide a description of the function:def FromBinary(cls, record_data, record_count=1):
# Make sure any external record types are registered
cls.LoadPlugins()
if len(record_data) < UpdateRecord.HEADER_LENGTH:
raise ArgumentError("Record data is too short to contain a record header",
length=len(record_data), header_length=UpdateRecord.HEADER_LENGTH)
total_length, record_type = struct.unpack_from("<LB3x", record_data)
if record_count == 1 and len(record_data) != total_length:
raise ArgumentError("Record data is corrupt, embedded length does not agree with actual length",
length=len(record_data), embedded_length=total_length)
record_classes = UpdateRecord.KNOWN_CLASSES.get(record_type, [])
if len(record_classes) == 0:
raise DataError("No matching record type found for record", record_type=record_type,
known_types=[x for x in UpdateRecord.KNOWN_CLASSES])
best_match = MatchQuality.NoMatch
matching_class = None
for record_class in record_classes:
match_data = record_data[UpdateRecord.HEADER_LENGTH:]
if record_count > 1:
match_data = record_data
quality = record_class.MatchQuality(match_data, record_count)
if quality > best_match:
best_match = quality
matching_class = record_class
if best_match == MatchQuality.DeferMatch:
raise DeferMatching(matching_class)
elif best_match == MatchQuality.PartialMatch:
raise DeferMatching(matching_class, matching_class.FromBinary(match_data, record_count))
if matching_class is None:
raise DataError("Record type found but no specific class reported a match",
record_type=record_type, considered_classes=record_classes)
return matching_class.FromBinary(match_data, record_count) | [
"Create an UpdateRecord subclass from binary record data.\n\n This should be called with a binary record blob (including the record\n type header) and it will return the best record class match that it\n can find for that record.\n\n Args:\n record_data (bytearray): The raw record data that we wish to parse\n into an UpdateRecord subclass including its 4 byte record header.\n record_count (int): If we are asked to create a record from multiple\n records, the record_data will be passed to the record subclass\n with headers intact since there will be more than one header.\n\n Raises:\n ArgumentError: If the record_data is malformed and cannot be parsed.\n DataError: If there is no matching record type registered.\n\n Returns:\n UpdateRecord: A subclass of UpdateRecord based on what record\n type matches the best.\n "
] |
Please provide a description of the function:def _setup(self):
# Create a root system ticks and user configurable ticks
systick = self.allocator.allocate_stream(DataStream.CounterType, attach=True)
fasttick = self.allocator.allocate_stream(DataStream.CounterType, attach=True)
user1tick = self.allocator.allocate_stream(DataStream.CounterType, attach=True)
user2tick = self.allocator.allocate_stream(DataStream.CounterType, attach=True)
self.sensor_graph.add_node("({} always) => {} using copy_all_a".format(system_tick, systick))
self.sensor_graph.add_node("({} always) => {} using copy_all_a".format(fast_tick, fasttick))
self.sensor_graph.add_config(SlotIdentifier.FromString('controller'), config_fast_tick_secs, 'uint32_t', 1)
self.sensor_graph.add_node("({} always) => {} using copy_all_a".format(tick_1, user1tick))
self.sensor_graph.add_node("({} always) => {} using copy_all_a".format(tick_2, user2tick))
self.system_tick = systick
self.fast_tick = fasttick
self.user1_tick = user1tick
self.user2_tick = user2tick | [
"Prepare for code generation by setting up root clock nodes.\n\n These nodes are subsequently used as the basis for all clock operations.\n "
] |
Please provide a description of the function:def clock(self, interval, basis="system"):
if basis == "system":
if (interval % 10) == 0:
tick = self.allocator.attach_stream(self.system_tick)
count = interval // 10
else:
tick = self.allocator.attach_stream(self.fast_tick)
count = interval
trigger = InputTrigger(u'count', '>=', count)
return (tick, trigger)
elif basis == 'tick_1':
tick = self.allocator.attach_stream(self.user1_tick)
trigger = InputTrigger(u'count', '>=', interval)
return (tick, trigger)
elif basis == 'tick_2':
tick = self.allocator.attach_stream(self.user2_tick)
trigger = InputTrigger(u'count', '>=', interval)
return (tick, trigger)
raise SensorGraphSemanticError("Unkwown tick source specified in RootScope.clock", basis=basis) | [
"Return a NodeInput tuple for triggering an event every interval.\n\n Args:\n interval (int): The interval at which this input should\n trigger. If basis == system (the default), this interval must\n be in seconds. Otherwise it will be in units of whatever the\n basis tick is configured with.\n basis (str): The basis to use for calculating the interval. This\n can either be system, tick_1 or tick_2. System means that the\n clock will use either the fast or regular builtin tick. Passing\n tick_1 or tick_2 will cause the clock to be generated based on\n the selected tick.\n "
] |
Please provide a description of the function:def generate(env):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
if 'CXX' not in env:
env['CXX'] = env.Detect(compilers) or compilers[0]
cxx.generate(env)
# platform specific settings
if env['PLATFORM'] == 'aix':
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -mminimal-toc')
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
elif env['PLATFORM'] == 'hpux':
env['SHOBJSUFFIX'] = '.pic.o'
elif env['PLATFORM'] == 'sunos':
env['SHOBJSUFFIX'] = '.pic.o'
# determine compiler version
version = gcc.detect_version(env, env['CXX'])
if version:
env['CXXVERSION'] = version | [
"Add Builders and construction variables for g++ to an Environment."
] |
Please provide a description of the function:def find_proxy_plugin(component, plugin_name):
reg = ComponentRegistry()
plugins = reg.load_extensions('iotile.proxy_plugin', comp_filter=component, class_filter=TileBusProxyPlugin,
product_name='proxy_plugin')
for _name, plugin in plugins:
if plugin.__name__ == plugin_name:
return plugin
raise DataError("Could not find proxy plugin module in registered components or installed distributions",
component=component, name=plugin_name) | [
" Attempt to find a proxy plugin provided by a specific component\n\n Args:\n component (string): The name of the component that provides the plugin\n plugin_name (string): The name of the plugin to load\n\n Returns:\n TileBuxProxyPlugin: The plugin, if found, otherwise raises DataError\n "
] |
Please provide a description of the function:def verify(self, obj):
if not isinstance(obj, bool):
raise ValidationError("Object is not a bool", reason='object is not a bool', object=obj)
if self._require_value is not None and obj != self._require_value:
raise ValidationError("Boolean is not equal to specified literal", reason='boolean value %s should be %s'
% (str(obj), str(self._require_value)))
return obj | [
"Verify that the object conforms to this verifier's schema\n\n Args:\n obj (object): A python object to verify\n\n Raises:\n ValidationError: If there is a problem verifying the dictionary, a\n ValidationError is thrown with at least the reason key set indicating\n the reason for the lack of validation.\n "
] |
Please provide a description of the function:def format(self, indent_level, indent_size=4):
name = self.format_name('Boolean', indent_size)
if self._require_value is not None:
if self.long_desc is not None:
name += '\n'
name += self.wrap_lines('must be %s\n' % str(self._require_value).lower(), 1, indent_size)
return self.wrap_lines(name, indent_level, indent_size) | [
"Format this verifier\n\n Returns:\n string: A formatted string\n "
] |
Please provide a description of the function:def FromBinary(cls, record_data, record_count=1):
_cmd, address, _resp_length, payload = cls._parse_rpc_info(record_data)
descriptor = parse_binary_descriptor(payload)
return AddNodeRecord(descriptor, address=address) | [
"Create an UpdateRecord subclass from binary record data.\n\n This should be called with a binary record blob (NOT including the\n record type header) and it will decode it into a AddNodeRecord.\n\n Args:\n record_data (bytearray): The raw record data that we wish to parse\n into an UpdateRecord subclass NOT including its 8 byte record header.\n record_count (int): The number of records included in record_data.\n\n Raises:\n ArgumentError: If the record_data is malformed and cannot be parsed.\n\n Returns:\n AddNodeRecord: The decoded reflash tile record.\n "
] |
Please provide a description of the function:def _convert_trigger(self, trigger_def, parent):
if trigger_def.explicit_stream is None:
stream = parent.resolve_identifier(trigger_def.named_event, DataStream)
trigger = TrueTrigger()
else:
stream = trigger_def.explicit_stream
trigger = trigger_def.explicit_trigger
return (stream, trigger) | [
"Convert a TriggerDefinition into a stream, trigger pair."
] |
Please provide a description of the function:def _parse_trigger(self, trigger_clause):
cond = trigger_clause[0]
named_event = None
explicit_stream = None
explicit_trigger = None
# Identifier parse tree is Group(Identifier)
if cond.getName() == 'identifier':
named_event = cond[0]
elif cond.getName() == 'stream_trigger':
trigger_type = cond[0]
stream = cond[1]
oper = cond[2]
ref = cond[3]
trigger = InputTrigger(trigger_type, oper, ref)
explicit_stream = stream
explicit_trigger = trigger
elif cond.getName() == 'stream_always':
stream = cond[0]
trigger = TrueTrigger()
explicit_stream = stream
explicit_trigger = trigger
else:
raise ArgumentError("OnBlock created from an invalid ParseResults object", parse_results=trigger_clause)
return TriggerDefinition(named_event, explicit_stream, explicit_trigger) | [
"Parse a named event or explicit stream trigger into a TriggerDefinition."
] |
Please provide a description of the function:def execute_before(self, sensor_graph, scope_stack):
parent = scope_stack[-1]
alloc = parent.allocator
stream_a, trigger_a = self._convert_trigger(self.trigger_a, parent)
if self.trigger_b is None:
new_scope = TriggerScope(sensor_graph, scope_stack, (stream_a, trigger_a))
else:
stream_b, trigger_b = self._convert_trigger(self.trigger_b, parent)
trigger_stream = alloc.allocate_stream(DataStream.UnbufferedType)
if self.combiner == u'and':
combiner = '&&'
else:
combiner = '||'
if stream_a.input and not stream_b.input:
unbuffered_stream = alloc.allocate_stream(DataStream.UnbufferedType, attach=True)
sensor_graph.add_node(u"({} always) => {} using copy_latest_a".format(stream_a, unbuffered_stream))
sensor_graph.add_node(u"({} {} {} {} {}) => {} using copy_latest_a".format(unbuffered_stream, trigger_a, combiner, stream_b, trigger_b, trigger_stream))
elif stream_b.input and not stream_a.input:
unbuffered_stream = alloc.allocate_stream(DataStream.UnbufferedType, attach=True)
sensor_graph.add_node(u"({} always) => {} using copy_latest_a".format(stream_b, unbuffered_stream))
sensor_graph.add_node(u"({} {} {} {} {}) => {} using copy_latest_a".format(stream_a, trigger_a, combiner, unbuffered_stream, trigger_b, trigger_stream))
else:
sensor_graph.add_node(u"({} {} {} {} {}) => {} using copy_latest_a".format(stream_a, trigger_a, combiner, stream_b, trigger_b, trigger_stream))
new_scope = TriggerScope(sensor_graph, scope_stack, (trigger_stream, TrueTrigger()))
scope_stack.append(new_scope) | [
"Execute statement before children are executed.\n\n Args:\n sensor_graph (SensorGraph): The sensor graph that we are building or\n modifying\n scope_stack (list(Scope)): A stack of nested scopes that may influence\n how this statement allocates clocks or other stream resources.\n "
] |
Please provide a description of the function:def platform_default():
osname = os.name
if osname == 'java':
osname = os._osType
if osname == 'posix':
if sys.platform == 'cygwin':
return 'cygwin'
elif sys.platform.find('irix') != -1:
return 'irix'
elif sys.platform.find('sunos') != -1:
return 'sunos'
elif sys.platform.find('hp-ux') != -1:
return 'hpux'
elif sys.platform.find('aix') != -1:
return 'aix'
elif sys.platform.find('darwin') != -1:
return 'darwin'
else:
return 'posix'
elif os.name == 'os2':
return 'os2'
else:
return sys.platform | [
"Return the platform string for our execution environment.\n\n The returned value should map to one of the SCons/Platform/*.py\n files. Since we're architecture independent, though, we don't\n care about the machine architecture.\n "
] |
Please provide a description of the function:def platform_module(name = platform_default()):
full_name = 'SCons.Platform.' + name
if full_name not in sys.modules:
if os.name == 'java':
eval(full_name)
else:
try:
file, path, desc = imp.find_module(name,
sys.modules['SCons.Platform'].__path__)
try:
mod = imp.load_module(full_name, file, path, desc)
finally:
if file:
file.close()
except ImportError:
try:
import zipimport
importer = zipimport.zipimporter( sys.modules['SCons.Platform'].__path__[0] )
mod = importer.load_module(full_name)
except ImportError:
raise SCons.Errors.UserError("No platform named '%s'" % name)
setattr(SCons.Platform, name, mod)
return sys.modules[full_name] | [
"Return the imported module for the platform.\n\n This looks for a module name that matches the specified argument.\n If the name is unspecified, we fetch the appropriate default for\n our execution environment.\n "
] |
Please provide a description of the function:def Platform(name = platform_default()):
module = platform_module(name)
spec = PlatformSpec(name, module.generate)
return spec | [
"Select a canned Platform specification.\n "
] |
Please provide a description of the function:def jarSources(target, source, env, for_signature):
try:
env['JARCHDIR']
except KeyError:
jarchdir_set = False
else:
jarchdir_set = True
jarchdir = env.subst('$JARCHDIR', target=target, source=source)
if jarchdir:
jarchdir = env.fs.Dir(jarchdir)
result = []
for src in source:
contents = src.get_text_contents()
if contents[:16] != "Manifest-Version":
if jarchdir_set:
_chdir = jarchdir
else:
try:
_chdir = src.attributes.java_classdir
except AttributeError:
_chdir = None
if _chdir:
# If we are changing the dir with -C, then sources should
# be relative to that directory.
src = SCons.Subst.Literal(src.get_path(_chdir))
result.append('-C')
result.append(_chdir)
result.append(src)
return result | [
"Only include sources that are not a manifest file."
] |
Please provide a description of the function:def jarManifest(target, source, env, for_signature):
for src in source:
contents = src.get_text_contents()
if contents[:16] == "Manifest-Version":
return src
return '' | [
"Look in sources for a manifest file, if any."
] |
Please provide a description of the function:def jarFlags(target, source, env, for_signature):
jarflags = env.subst('$JARFLAGS', target=target, source=source)
for src in source:
contents = src.get_text_contents()
if contents[:16] == "Manifest-Version":
if not 'm' in jarflags:
return jarflags + 'm'
break
return jarflags | [
"If we have a manifest, make sure that the 'm'\n flag is specified."
] |
Please provide a description of the function:def Jar(env, target = None, source = [], *args, **kw):
# jar target should not be a list so assume they passed
# no target and want implicit target to be made and the arg
# was actaully the list of sources
if SCons.Util.is_List(target) and source == []:
SCons.Warnings.Warning("Making implicit target jar file, " +
"and treating the list as sources")
source = target
target = None
# mutiple targets pass so build each target the same from the
# same source
#TODO Maybe this should only be done once, and the result copied
# for each target since it should result in the same?
if SCons.Util.is_List(target) and SCons.Util.is_List(source):
jars = []
for single_target in target:
jars += env.Jar( target = single_target, source = source, *args, **kw)
return jars
# they passed no target so make a target implicitly
if target == None:
try:
# make target from the first source file
target = os.path.splitext(str(source[0]))[0] + env.subst('$JARSUFFIX')
except:
# something strange is happening but attempt anyways
SCons.Warnings.Warning("Could not make implicit target from sources, using directory")
target = os.path.basename(str(env.Dir('.'))) + env.subst('$JARSUFFIX')
# make lists out of our target and sources
if not SCons.Util.is_List(target):
target = [target]
if not SCons.Util.is_List(source):
source = [source]
# setup for checking through all the sources and handle accordingly
java_class_suffix = env.subst('$JAVACLASSSUFFIX')
java_suffix = env.subst('$JAVASUFFIX')
target_classes = []
# function for determining what to do with a file and not a directory
# if its already a class file then it can be used as a
# source for jar, otherwise turn it into a class file then
# return the source
def file_to_class(s):
if(str(_my_normcase(s)).endswith(java_suffix)):
return env.JavaClassFile(source = s, *args, **kw)
else:
return [env.fs.File(s)]
# In the case that we are passed just string to a node which is directory
# but does not exist, we need to check all the current targets to see if
# that directory is going to exist so we can add it as a source to Jar builder
def get_all_targets(env, node='.'):
def get_all_targets_iter(env, node):
if node.has_builder():
yield node
for kid in node.all_children():
for kid in get_all_targets(env, kid):
yield kid
node = env.arg2nodes(node, env.fs.Entry)[0]
return list(get_all_targets_iter(env, node))
# loop through the sources and handle each accordingly
# the goal here is to get all the source files into a class
# file or a directory that contains class files
for s in source:
s = env.subst(s)
if isinstance(s, SCons.Node.FS.Base):
if isinstance(s, SCons.Node.FS.File):
# found a file so make sure its a class file
target_classes.extend(file_to_class(s))
else:
# found a dir so make sure its a dir of class files
target_classes.extend(env.JavaClassDir(source = env.fs.Dir(s), *args, **kw))
else:
if os.path.isfile(s):
# found a file that exists on the FS, make sure its a class file
target_classes.extend(file_to_class(s))
elif os.path.isdir(s):
# found a dir on the FS, add it as a dir of class files
target_classes.append(env.fs.Dir(s))
elif s[-len(java_suffix):] == java_suffix or s[-len(java_class_suffix):] == java_class_suffix:
# found a file that may not exists and is only a string
# so add it after converting it to a class file
target_classes.extend(file_to_class(s))
else:
# found a swig file so add it after converting it to class files
if(os.path.splitext(str(s))[1] == ".i"):
target_classes.extend(env.JavaClassFile(source = s, *args, **kw))
else:
# found a directory that does not yet exist, but can exist as a node
# check the target nodes to make sure it will be built, then add
# it as a source
for node in get_all_targets(env):
if(s in str(node) and os.path.splitext(str(node))[1] == ""):
target_classes.append(node)
# at this point all our sources have been converted to classes or directories of class
# so pass it to the Jar builder
return env.JarFile(target = target, source = target_classes, *args, **kw) | [
"\n A pseudo-Builder wrapper around the separate Jar sources{File,Dir}\n Builders.\n "
] |
Please provide a description of the function:def generate(env):
SCons.Tool.CreateJarBuilder(env)
SCons.Tool.CreateJavaFileBuilder(env)
SCons.Tool.CreateJavaClassFileBuilder(env)
SCons.Tool.CreateJavaClassDirBuilder(env)
env.AddMethod(Jar)
env['JAR'] = 'jar'
env['JARFLAGS'] = SCons.Util.CLVar('cf')
env['_JARFLAGS'] = jarFlags
env['_JARMANIFEST'] = jarManifest
env['_JARSOURCES'] = jarSources
env['_JARCOM'] = '$JAR $_JARFLAGS $TARGET $_JARMANIFEST $_JARSOURCES'
env['JARCOM'] = "${TEMPFILE('$_JARCOM','$JARCOMSTR')}"
env['JARSUFFIX'] = '.jar' | [
"Add Builders and construction variables for jar to an Environment."
] |
Please provide a description of the function:def mock(self, slot, rpc_id, value):
address = slot.address
if address not in self.mock_rpcs:
self.mock_rpcs[address] = {}
self.mock_rpcs[address][rpc_id] = value | [
"Store a mock return value for an RPC\n\n Args:\n slot (SlotIdentifier): The slot we are mocking\n rpc_id (int): The rpc we are mocking\n value (int): The value that should be returned\n when the RPC is called.\n "
] |
Please provide a description of the function:def rpc(self, address, rpc_id):
# Always allow mocking an RPC to override whatever the defaul behavior is
if address in self.mock_rpcs and rpc_id in self.mock_rpcs[address]:
value = self.mock_rpcs[address][rpc_id]
return value
result = self._call_rpc(address, rpc_id, bytes())
if len(result) != 4:
self.warn(u"RPC 0x%X on address %d: response had invalid length %d not equal to 4" % (rpc_id, address, len(result)))
if len(result) < 4:
raise HardwareError("Response from RPC was not long enough to parse as an integer", rpc_id=rpc_id, address=address, response_length=len(result))
if len(result) > 4:
result = result[:4]
res, = struct.unpack("<L", result)
return res | [
"Call an RPC and receive the result as an integer.\n\n If the RPC does not properly return a 32 bit integer, raise a warning\n unless it cannot be converted into an integer at all, in which case\n a HardwareError is thrown.\n\n Args:\n address (int): The address of the tile we want to call the RPC\n on\n rpc_id (int): The id of the RPC that we want to call\n\n Returns:\n int: The result of the RPC call. If the rpc did not succeed\n an error is thrown instead.\n "
] |
Please provide a description of the function:def _find_modules(src):
directors = 0
mnames = []
try:
matches = _reModule.findall(open(src).read())
except IOError:
# If the file's not yet generated, guess the module name from the file stem
matches = []
mnames.append(os.path.splitext(os.path.basename(src))[0])
for m in matches:
mnames.append(m[2])
directors = directors or m[0].find('directors') >= 0
return mnames, directors | [
"Find all modules referenced by %module lines in `src`, a SWIG .i file.\n Returns a list of all modules, and a flag set if SWIG directors have\n been requested (SWIG will generate an additional header file in this\n case.)"
] |
Please provide a description of the function:def _get_swig_version(env, swig):
swig = env.subst(swig)
pipe = SCons.Action._subproc(env, SCons.Util.CLVar(swig) + ['-version'],
stdin = 'devnull',
stderr = 'devnull',
stdout = subprocess.PIPE)
if pipe.wait() != 0: return
# MAYBE: out = SCons.Util.to_str (pipe.stdout.read())
out = SCons.Util.to_str(pipe.stdout.read())
match = re.search('SWIG Version\s+(\S+).*', out, re.MULTILINE)
if match:
if verbose: print("Version is:%s"%match.group(1))
return match.group(1)
else:
if verbose: print("Unable to detect version: [%s]"%out) | [
"Run the SWIG command line tool to get and return the version number"
] |
Please provide a description of the function:def generate(env):
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
c_file.suffix['.i'] = swigSuffixEmitter
cxx_file.suffix['.i'] = swigSuffixEmitter
c_file.add_action('.i', SwigAction)
c_file.add_emitter('.i', _swigEmitter)
cxx_file.add_action('.i', SwigAction)
cxx_file.add_emitter('.i', _swigEmitter)
java_file = SCons.Tool.CreateJavaFileBuilder(env)
java_file.suffix['.i'] = swigSuffixEmitter
java_file.add_action('.i', SwigAction)
java_file.add_emitter('.i', _swigEmitter)
if 'SWIG' not in env:
env['SWIG'] = env.Detect(swigs) or swigs[0]
env['SWIGVERSION'] = _get_swig_version(env, env['SWIG'])
env['SWIGFLAGS'] = SCons.Util.CLVar('')
env['SWIGDIRECTORSUFFIX'] = '_wrap.h'
env['SWIGCFILESUFFIX'] = '_wrap$CFILESUFFIX'
env['SWIGCXXFILESUFFIX'] = '_wrap$CXXFILESUFFIX'
env['_SWIGOUTDIR'] = r'${"-outdir \"%s\"" % SWIGOUTDIR}'
env['SWIGPATH'] = []
env['SWIGINCPREFIX'] = '-I'
env['SWIGINCSUFFIX'] = ''
env['_SWIGINCFLAGS'] = '$( ${_concat(SWIGINCPREFIX, SWIGPATH, SWIGINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
env['SWIGCOM'] = '$SWIG -o $TARGET ${_SWIGOUTDIR} ${_SWIGINCFLAGS} $SWIGFLAGS $SOURCES' | [
"Add Builders and construction variables for swig to an Environment."
] |
Please provide a description of the function:def _select_ftdi_channel(channel):
if channel < 0 or channel > 8:
raise ArgumentError("FTDI-selected multiplexer only has channels 0-7 valid, "
"make sure you specify channel with -c channel=number", channel=channel)
from pylibftdi import BitBangDevice
bb = BitBangDevice(auto_detach=False)
bb.direction = 0b111
bb.port = channel | [
"Select multiplexer channel. Currently uses a FTDI chip via pylibftdi"
] |
Please provide a description of the function:def parse_binary_descriptor(bindata, sensor_log=None):
if len(bindata) != 14:
raise ArgumentError("Invalid length of binary data in streamer descriptor", length=len(bindata), expected=14, data=bindata)
dest_tile, stream_id, trigger, format_code, type_code = struct.unpack("<8sHBBBx", bindata)
dest_id = SlotIdentifier.FromEncoded(dest_tile)
selector = DataStreamSelector.FromEncoded(stream_id)
format_name = DataStreamer.KnownFormatCodes.get(format_code)
type_name = DataStreamer.KnownTypeCodes.get(type_code)
if format_name is None:
raise ArgumentError("Unknown format code", code=format_code, known_code=DataStreamer.KnownFormatCodes)
if type_name is None:
raise ArgumentError("Unknown type code", code=type_code, known_codes=DataStreamer.KnownTypeCodes)
with_other = None
if trigger & (1 << 7):
auto = False
with_other = trigger & ((1 << 7) - 1)
elif trigger == 0:
auto = False
elif trigger == 1:
auto = True
else:
raise ArgumentError("Unknown trigger type for streamer", trigger_code=trigger)
return DataStreamer(selector, dest_id, format_name, auto, type_name, with_other=with_other, sensor_log=sensor_log) | [
"Convert a binary streamer descriptor into a string descriptor.\n\n Binary streamer descriptors are 20-byte binary structures that encode all\n information needed to create a streamer. They are used to communicate\n that information to an embedded device in an efficent format. This\n function exists to turn such a compressed streamer description back into\n an understandable string.\n\n Args:\n bindata (bytes): The binary streamer descriptor that we want to\n understand.\n sensor_log (SensorLog): Optional sensor_log to add this streamer to\n a an underlying data store.\n\n Returns:\n DataStreamer: A DataStreamer object representing the streamer.\n\n You can get a useful human readable string by calling str() on the\n return value.\n "
] |
Please provide a description of the function:def create_binary_descriptor(streamer):
trigger = 0
if streamer.automatic:
trigger = 1
elif streamer.with_other is not None:
trigger = (1 << 7) | streamer.with_other
return struct.pack("<8sHBBBx", streamer.dest.encode(), streamer.selector.encode(), trigger, streamer.KnownFormats[streamer.format], streamer.KnownTypes[streamer.report_type]) | [
"Create a packed binary descriptor of a DataStreamer object.\n\n Args:\n streamer (DataStreamer): The streamer to create a packed descriptor for\n\n Returns:\n bytes: A packed 14-byte streamer descriptor.\n "
] |
Please provide a description of the function:def parse_string_descriptor(string_desc):
if not isinstance(string_desc, str):
string_desc = str(string_desc)
if not string_desc.endswith(';'):
string_desc += ';'
parsed = get_streamer_parser().parseString(string_desc)[0]
realtime = 'realtime' in parsed
broadcast = 'broadcast' in parsed
encrypted = 'security' in parsed and parsed['security'] == 'encrypted'
signed = 'security' in parsed and parsed['security'] == 'signed'
auto = 'manual' not in parsed
with_other = None
if 'with_other' in parsed:
with_other = parsed['with_other']
auto = False
dest = SlotIdentifier.FromString('controller')
if 'explicit_tile' in parsed:
dest = parsed['explicit_tile']
selector = parsed['selector']
# Make sure all of the combination are valid
if realtime and (encrypted or signed):
raise SensorGraphSemanticError("Realtime streamers cannot be either signed or encrypted")
if broadcast and (encrypted or signed):
raise SensorGraphSemanticError("Broadcast streamers cannot be either signed or encrypted")
report_type = 'broadcast' if broadcast else 'telegram'
dest = dest
selector = selector
if realtime or broadcast:
report_format = u'individual'
elif signed:
report_format = u'signedlist_userkey'
elif encrypted:
raise SensorGraphSemanticError("Encrypted streamers are not yet supported")
else:
report_format = u'hashedlist'
return DataStreamer(selector, dest, report_format, auto, report_type=report_type, with_other=with_other) | [
"Parse a string descriptor of a streamer into a DataStreamer object.\n\n Args:\n string_desc (str): The string descriptor that we wish to parse.\n\n Returns:\n DataStreamer: A DataStreamer object representing the streamer.\n "
] |
Please provide a description of the function:def verify(self, obj):
if not isinstance(obj, float):
raise ValidationError("Object is not a float", reason='object is not a float', object=obj)
return obj | [
"Verify that the object conforms to this verifier's schema.\n\n Args:\n obj (object): A python object to verify\n\n Raises:\n ValidationError: If there is a problem verifying the dictionary, a\n ValidationError is thrown with at least the reason key set indicating\n the reason for the lack of validation.\n "
] |
Please provide a description of the function:def execute_before(self, sensor_graph, scope_stack):
parent = scope_stack[-1]
alloc = parent.allocator
# We want to create a gated clock that only fires when there is a connection
# to a communication tile. So we create a latching constant stream that is used to gate the
# clock passed through from the previous scope.
connect_stream = alloc.allocate_stream(DataStream.UnbufferedType, attach=True)
disconnect_stream = alloc.allocate_stream(DataStream.UnbufferedType, attach=True)
latch_stream = alloc.allocate_stream(DataStream.ConstantType, attach=True)
latch_on_stream = alloc.allocate_stream(DataStream.ConstantType, attach=True)
latch_off_stream = alloc.allocate_stream(DataStream.ConstantType, attach=True)
sensor_graph.add_node(u"({} always) => {} using copy_latest_a".format(user_connected, connect_stream))
sensor_graph.add_node(u"({} always) => {} using copy_latest_a".format(user_disconnected, disconnect_stream))
sensor_graph.add_node(u"({} always && {} when value=={}) => {} using copy_latest_a".format(latch_on_stream, connect_stream, self.slot_id.address, latch_stream))
sensor_graph.add_node(u"({} always && {} when value=={}) => {} using copy_latest_a".format(latch_off_stream, disconnect_stream, self.slot_id.address, latch_stream))
sensor_graph.add_constant(latch_on_stream, 1)
sensor_graph.add_constant(latch_off_stream, 0)
sensor_graph.add_constant(latch_stream, 0)
new_scope = GatedClockScope(sensor_graph, scope_stack, (latch_stream, InputTrigger(u'value', u'==', 1)))
# Add two new identifiers to the scope for supporting on connect and on disconnect events
new_scope.add_identifier('connect', connect_stream)
new_scope.add_identifier('disconnect', disconnect_stream)
scope_stack.append(new_scope) | [
"Execute statement before children are executed.\n\n Args:\n sensor_graph (SensorGraph): The sensor graph that we are building or\n modifying\n scope_stack (list(Scope)): A stack of nested scopes that may influence\n how this statement allocates clocks or other stream resources.\n "
] |
Please provide a description of the function:def generate(env):
link.generate(env)
env['FRAMEWORKPATHPREFIX'] = '-F'
env['_FRAMEWORKPATH'] = '${_concat(FRAMEWORKPATHPREFIX, FRAMEWORKPATH, "", __env__)}'
env['_FRAMEWORKS'] = '${_concat("-framework ", FRAMEWORKS, "", __env__)}'
env['LINKCOM'] = env['LINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -dynamiclib')
env['SHLINKCOM'] = env['SHLINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'
# TODO: Work needed to generate versioned shared libraries
# Leaving this commented out, and also going to disable versioned library checking for now
# see: http://docstore.mik.ua/orelly/unix3/mac/ch05_04.htm for proper naming
#link._setup_versioned_lib_variables(env, tool = 'applelink')#, use_soname = use_soname)
#env['LINKCALLBACKS'] = link._versioned_lib_callbacks()
# override the default for loadable modules, which are different
# on OS X than dynamic shared libs. echoing what XCode does for
# pre/suffixes:
env['LDMODULEPREFIX'] = ''
env['LDMODULESUFFIX'] = ''
env['LDMODULEFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -bundle')
env['LDMODULECOM'] = '$LDMODULE -o ${TARGET} $LDMODULEFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' | [
"Add Builders and construction variables for applelink to an\n Environment."
] |
Please provide a description of the function:def _generateGUID(slnfile, name):
m = hashlib.md5()
# Normalize the slnfile path to a Windows path (\ separators) so
# the generated file has a consistent GUID even if we generate
# it on a non-Windows platform.
m.update(bytearray(ntpath.normpath(str(slnfile)) + str(name),'utf-8'))
solution = m.hexdigest().upper()
# convert most of the signature to GUID form (discard the rest)
solution = "{" + solution[:8] + "-" + solution[8:12] + "-" + solution[12:16] + "-" + solution[16:20] + "-" + solution[20:32] + "}"
return solution | [
"This generates a dummy GUID for the sln file to use. It is\n based on the MD5 signatures of the sln filename plus the name of\n the project. It basically just needs to be unique, and not\n change with each invocation."
] |
Please provide a description of the function:def msvs_parse_version(s):
num, suite = version_re.match(s).groups()
return float(num), suite | [
"\n Split a Visual Studio version, which may in fact be something like\n '7.0Exp', into is version number (returned as a float) and trailing\n \"suite\" portion.\n "
] |
Please provide a description of the function:def makeHierarchy(sources):
'''Break a list of files into a hierarchy; for each value, if it is a string,
then it is a file. If it is a dictionary, it is a folder. The string is
the original path of the file.'''
hierarchy = {}
for file in sources:
path = splitFully(file)
if len(path):
dict = hierarchy
for part in path[:-1]:
if part not in dict:
dict[part] = {}
dict = dict[part]
dict[path[-1]] = file
#else:
# print 'Warning: failed to decompose path for '+str(file)
return hierarchy | [] |
Please provide a description of the function:def GenerateDSP(dspfile, source, env):
version_num = 6.0
if 'MSVS_VERSION' in env:
version_num, suite = msvs_parse_version(env['MSVS_VERSION'])
if version_num >= 10.0:
g = _GenerateV10DSP(dspfile, source, env)
g.Build()
elif version_num >= 7.0:
g = _GenerateV7DSP(dspfile, source, env)
g.Build()
else:
g = _GenerateV6DSP(dspfile, source, env)
g.Build() | [
"Generates a Project file based on the version of MSVS that is being used"
] |
Please provide a description of the function:def GenerateDSW(dswfile, source, env):
version_num = 6.0
if 'MSVS_VERSION' in env:
version_num, suite = msvs_parse_version(env['MSVS_VERSION'])
if version_num >= 7.0:
g = _GenerateV7DSW(dswfile, source, env)
g.Build()
else:
g = _GenerateV6DSW(dswfile, source, env)
g.Build() | [
"Generates a Solution/Workspace file based on the version of MSVS that is being used"
] |
Please provide a description of the function:def projectEmitter(target, source, env):
# todo: Not sure what sets source to what user has passed as target,
# but this is what happens. When that is fixed, we also won't have
# to make the user always append env['MSVSPROJECTSUFFIX'] to target.
if source[0] == target[0]:
source = []
# make sure the suffix is correct for the version of MSVS we're running.
(base, suff) = SCons.Util.splitext(str(target[0]))
suff = env.subst('$MSVSPROJECTSUFFIX')
target[0] = base + suff
if not source:
source = 'prj_inputs:'
source = source + env.subst('$MSVSSCONSCOM', 1)
source = source + env.subst('$MSVSENCODING', 1)
# Project file depends on CPPDEFINES and CPPPATH
preprocdefs = xmlify(';'.join(processDefines(env.get('CPPDEFINES', []))))
includepath_Dirs = processIncludes(env.get('CPPPATH', []), env, None, None)
includepath = xmlify(';'.join([str(x) for x in includepath_Dirs]))
source = source + "; ppdefs:%s incpath:%s"%(preprocdefs, includepath)
if 'buildtarget' in env and env['buildtarget'] != None:
if SCons.Util.is_String(env['buildtarget']):
source = source + ' "%s"' % env['buildtarget']
elif SCons.Util.is_List(env['buildtarget']):
for bt in env['buildtarget']:
if SCons.Util.is_String(bt):
source = source + ' "%s"' % bt
else:
try: source = source + ' "%s"' % bt.get_abspath()
except AttributeError: raise SCons.Errors.InternalError("buildtarget can be a string, a node, a list of strings or nodes, or None")
else:
try: source = source + ' "%s"' % env['buildtarget'].get_abspath()
except AttributeError: raise SCons.Errors.InternalError("buildtarget can be a string, a node, a list of strings or nodes, or None")
if 'outdir' in env and env['outdir'] != None:
if SCons.Util.is_String(env['outdir']):
source = source + ' "%s"' % env['outdir']
elif SCons.Util.is_List(env['outdir']):
for s in env['outdir']:
if SCons.Util.is_String(s):
source = source + ' "%s"' % s
else:
try: source = source + ' "%s"' % s.get_abspath()
except AttributeError: raise SCons.Errors.InternalError("outdir can be a string, a node, a list of strings or nodes, or None")
else:
try: source = source + ' "%s"' % env['outdir'].get_abspath()
except AttributeError: raise SCons.Errors.InternalError("outdir can be a string, a node, a list of strings or nodes, or None")
if 'name' in env:
if SCons.Util.is_String(env['name']):
source = source + ' "%s"' % env['name']
else:
raise SCons.Errors.InternalError("name must be a string")
if 'variant' in env:
if SCons.Util.is_String(env['variant']):
source = source + ' "%s"' % env['variant']
elif SCons.Util.is_List(env['variant']):
for variant in env['variant']:
if SCons.Util.is_String(variant):
source = source + ' "%s"' % variant
else:
raise SCons.Errors.InternalError("name must be a string or a list of strings")
else:
raise SCons.Errors.InternalError("variant must be a string or a list of strings")
else:
raise SCons.Errors.InternalError("variant must be specified")
for s in _DSPGenerator.srcargs:
if s in env:
if SCons.Util.is_String(env[s]):
source = source + ' "%s' % env[s]
elif SCons.Util.is_List(env[s]):
for t in env[s]:
if SCons.Util.is_String(t):
source = source + ' "%s"' % t
else:
raise SCons.Errors.InternalError(s + " must be a string or a list of strings")
else:
raise SCons.Errors.InternalError(s + " must be a string or a list of strings")
source = source + ' "%s"' % str(target[0])
source = [SCons.Node.Python.Value(source)]
targetlist = [target[0]]
sourcelist = source
if env.get('auto_build_solution', 1):
env['projects'] = [env.File(t).srcnode() for t in targetlist]
t, s = solutionEmitter(target, target, env)
targetlist = targetlist + t
# Beginning with Visual Studio 2010 for each project file (.vcxproj) we have additional file (.vcxproj.filters)
version_num = 6.0
if 'MSVS_VERSION' in env:
version_num, suite = msvs_parse_version(env['MSVS_VERSION'])
if version_num >= 10.0:
targetlist.append(targetlist[0] + '.filters')
return (targetlist, sourcelist) | [
"Sets up the DSP dependencies."
] |
Please provide a description of the function:def solutionEmitter(target, source, env):
# todo: Not sure what sets source to what user has passed as target,
# but this is what happens. When that is fixed, we also won't have
# to make the user always append env['MSVSSOLUTIONSUFFIX'] to target.
if source[0] == target[0]:
source = []
# make sure the suffix is correct for the version of MSVS we're running.
(base, suff) = SCons.Util.splitext(str(target[0]))
suff = env.subst('$MSVSSOLUTIONSUFFIX')
target[0] = base + suff
if not source:
source = 'sln_inputs:'
if 'name' in env:
if SCons.Util.is_String(env['name']):
source = source + ' "%s"' % env['name']
else:
raise SCons.Errors.InternalError("name must be a string")
if 'variant' in env:
if SCons.Util.is_String(env['variant']):
source = source + ' "%s"' % env['variant']
elif SCons.Util.is_List(env['variant']):
for variant in env['variant']:
if SCons.Util.is_String(variant):
source = source + ' "%s"' % variant
else:
raise SCons.Errors.InternalError("name must be a string or a list of strings")
else:
raise SCons.Errors.InternalError("variant must be a string or a list of strings")
else:
raise SCons.Errors.InternalError("variant must be specified")
if 'slnguid' in env:
if SCons.Util.is_String(env['slnguid']):
source = source + ' "%s"' % env['slnguid']
else:
raise SCons.Errors.InternalError("slnguid must be a string")
if 'projects' in env:
if SCons.Util.is_String(env['projects']):
source = source + ' "%s"' % env['projects']
elif SCons.Util.is_List(env['projects']):
for t in env['projects']:
if SCons.Util.is_String(t):
source = source + ' "%s"' % t
source = source + ' "%s"' % str(target[0])
source = [SCons.Node.Python.Value(source)]
return ([target[0]], source) | [
"Sets up the DSW dependencies."
] |
Please provide a description of the function:def generate(env):
try:
env['BUILDERS']['MSVSProject']
except KeyError:
env['BUILDERS']['MSVSProject'] = projectBuilder
try:
env['BUILDERS']['MSVSSolution']
except KeyError:
env['BUILDERS']['MSVSSolution'] = solutionBuilder
env['MSVSPROJECTCOM'] = projectAction
env['MSVSSOLUTIONCOM'] = solutionAction
if SCons.Script.call_stack:
# XXX Need to find a way to abstract this; the build engine
# shouldn't depend on anything in SCons.Script.
env['MSVSSCONSCRIPT'] = SCons.Script.call_stack[0].sconscript
else:
global default_MSVS_SConscript
if default_MSVS_SConscript is None:
default_MSVS_SConscript = env.File('SConstruct')
env['MSVSSCONSCRIPT'] = default_MSVS_SConscript
env['MSVSSCONS'] = '"%s" -c "%s"' % (python_executable, getExecScriptMain(env))
env['MSVSSCONSFLAGS'] = '-C "${MSVSSCONSCRIPT.dir.get_abspath()}" -f ${MSVSSCONSCRIPT.name}'
env['MSVSSCONSCOM'] = '$MSVSSCONS $MSVSSCONSFLAGS'
env['MSVSBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"'
env['MSVSREBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"'
env['MSVSCLEANCOM'] = '$MSVSSCONSCOM -c "$MSVSBUILDTARGET"'
# Set-up ms tools paths for default version
msvc_setup_env_once(env)
if 'MSVS_VERSION' in env:
version_num, suite = msvs_parse_version(env['MSVS_VERSION'])
else:
(version_num, suite) = (7.0, None) # guess at a default
if 'MSVS' not in env:
env['MSVS'] = {}
if (version_num < 7.0):
env['MSVS']['PROJECTSUFFIX'] = '.dsp'
env['MSVS']['SOLUTIONSUFFIX'] = '.dsw'
elif (version_num < 10.0):
env['MSVS']['PROJECTSUFFIX'] = '.vcproj'
env['MSVS']['SOLUTIONSUFFIX'] = '.sln'
else:
env['MSVS']['PROJECTSUFFIX'] = '.vcxproj'
env['MSVS']['SOLUTIONSUFFIX'] = '.sln'
if (version_num >= 10.0):
env['MSVSENCODING'] = 'utf-8'
else:
env['MSVSENCODING'] = 'Windows-1252'
env['GET_MSVSPROJECTSUFFIX'] = GetMSVSProjectSuffix
env['GET_MSVSSOLUTIONSUFFIX'] = GetMSVSSolutionSuffix
env['MSVSPROJECTSUFFIX'] = '${GET_MSVSPROJECTSUFFIX}'
env['MSVSSOLUTIONSUFFIX'] = '${GET_MSVSSOLUTIONSUFFIX}'
env['SCONS_HOME'] = os.environ.get('SCONS_HOME') | [
"Add Builders and construction variables for Microsoft Visual\n Studio project files to an Environment."
] |
Please provide a description of the function:def PrintSolution(self):
self.file.write('Microsoft Visual Studio Solution File, Format Version %s\n' % self.versionstr)
if self.version_num >= 12.0:
self.file.write('# Visual Studio 14\n')
elif self.version_num >= 11.0:
self.file.write('# Visual Studio 11\n')
elif self.version_num >= 10.0:
self.file.write('# Visual Studio 2010\n')
elif self.version_num >= 9.0:
self.file.write('# Visual Studio 2008\n')
elif self.version_num >= 8.0:
self.file.write('# Visual Studio 2005\n')
for dspinfo in self.dspfiles_info:
name = dspinfo['NAME']
base, suffix = SCons.Util.splitext(name)
if suffix == '.vcproj':
name = base
self.file.write('Project("%s") = "%s", "%s", "%s"\n'
% (external_makefile_guid, name, dspinfo['SLN_RELATIVE_FILE_PATH'], dspinfo['GUID']))
if self.version_num >= 7.1 and self.version_num < 8.0:
self.file.write('\tProjectSection(ProjectDependencies) = postProject\n'
'\tEndProjectSection\n')
self.file.write('EndProject\n')
self.file.write('Global\n')
env = self.env
if 'MSVS_SCC_PROVIDER' in env:
scc_number_of_projects = len(self.dspfiles) + 1
slnguid = self.slnguid
scc_provider = env.get('MSVS_SCC_PROVIDER', '').replace(' ', r'\u0020')
scc_project_name = env.get('MSVS_SCC_PROJECT_NAME', '').replace(' ', r'\u0020')
scc_connection_root = env.get('MSVS_SCC_CONNECTION_ROOT', os.curdir)
scc_local_path = os.path.relpath(scc_connection_root, self.dsw_folder_path).replace('\\', '\\\\')
self.file.write('\tGlobalSection(SourceCodeControl) = preSolution\n'
'\t\tSccNumberOfProjects = %(scc_number_of_projects)d\n'
'\t\tSccProjectName0 = %(scc_project_name)s\n'
'\t\tSccLocalPath0 = %(scc_local_path)s\n'
'\t\tSccProvider0 = %(scc_provider)s\n'
'\t\tCanCheckoutShared = true\n' % locals())
sln_relative_path_from_scc = os.path.relpath(self.dsw_folder_path, scc_connection_root)
if sln_relative_path_from_scc != os.curdir:
self.file.write('\t\tSccProjectFilePathRelativizedFromConnection0 = %s\\\\\n'
% sln_relative_path_from_scc.replace('\\', '\\\\'))
if self.version_num < 8.0:
# When present, SolutionUniqueID is automatically removed by VS 2005
# TODO: check for Visual Studio versions newer than 2005
self.file.write('\t\tSolutionUniqueID = %s\n' % slnguid)
for dspinfo in self.dspfiles_info:
i = self.dspfiles_info.index(dspinfo) + 1
dsp_relative_file_path = dspinfo['SLN_RELATIVE_FILE_PATH'].replace('\\', '\\\\')
dsp_scc_relative_folder_path = os.path.relpath(dspinfo['FOLDER_PATH'], scc_connection_root).replace('\\', '\\\\')
self.file.write('\t\tSccProjectUniqueName%(i)s = %(dsp_relative_file_path)s\n'
'\t\tSccLocalPath%(i)d = %(scc_local_path)s\n'
'\t\tCanCheckoutShared = true\n'
'\t\tSccProjectFilePathRelativizedFromConnection%(i)s = %(dsp_scc_relative_folder_path)s\\\\\n'
% locals())
self.file.write('\tEndGlobalSection\n')
if self.version_num >= 8.0:
self.file.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n')
else:
self.file.write('\tGlobalSection(SolutionConfiguration) = preSolution\n')
confkeys = sorted(self.configs.keys())
cnt = 0
for name in confkeys:
variant = self.configs[name].variant
platform = self.configs[name].platform
if self.version_num >= 8.0:
self.file.write('\t\t%s|%s = %s|%s\n' % (variant, platform, variant, platform))
else:
self.file.write('\t\tConfigName.%d = %s\n' % (cnt, variant))
cnt = cnt + 1
self.file.write('\tEndGlobalSection\n')
if self.version_num <= 7.1:
self.file.write('\tGlobalSection(ProjectDependencies) = postSolution\n'
'\tEndGlobalSection\n')
if self.version_num >= 8.0:
self.file.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n')
else:
self.file.write('\tGlobalSection(ProjectConfiguration) = postSolution\n')
for name in confkeys:
variant = self.configs[name].variant
platform = self.configs[name].platform
if self.version_num >= 8.0:
for dspinfo in self.dspfiles_info:
guid = dspinfo['GUID']
self.file.write('\t\t%s.%s|%s.ActiveCfg = %s|%s\n'
'\t\t%s.%s|%s.Build.0 = %s|%s\n' % (guid,variant,platform,variant,platform,guid,variant,platform,variant,platform))
else:
for dspinfo in self.dspfiles_info:
guid = dspinfo['GUID']
self.file.write('\t\t%s.%s.ActiveCfg = %s|%s\n'
'\t\t%s.%s.Build.0 = %s|%s\n' %(guid,variant,variant,platform,guid,variant,variant,platform))
self.file.write('\tEndGlobalSection\n')
if self.version_num >= 8.0:
self.file.write('\tGlobalSection(SolutionProperties) = preSolution\n'
'\t\tHideSolutionNode = FALSE\n'
'\tEndGlobalSection\n')
else:
self.file.write('\tGlobalSection(ExtensibilityGlobals) = postSolution\n'
'\tEndGlobalSection\n'
'\tGlobalSection(ExtensibilityAddIns) = postSolution\n'
'\tEndGlobalSection\n')
self.file.write('EndGlobal\n')
if self.nokeep == 0:
pdata = pickle.dumps(self.configs,PICKLE_PROTOCOL)
pdata = base64.encodestring(pdata).decode()
self.file.write(pdata)
self.file.write('\n') | [
"Writes a solution file"
] |
Please provide a description of the function:def PrintWorkspace(self):
name = self.name
dspfile = os.path.relpath(self.dspfiles[0], self.dsw_folder_path)
self.file.write(V6DSWHeader % locals()) | [
" writes a DSW file "
] |
Please provide a description of the function:def waiters(self, path=None):
context = self._waiters
if path is None:
path = []
for key in path:
context = context[key]
if self._LEAF in context:
for future in context[self._LEAF]:
yield (path, future)
for key in context:
if key is self._LEAF:
continue
yield from self.waiters(path=path + [key]) | [
"Iterate over all waiters.\n\n This method will return the waiters in unspecified order\n including the future or callback object that will be invoked\n and a list containing the keys/value that are being matched.\n\n Yields:\n list, future or callable\n "
] |
Please provide a description of the function:def every_match(self, callback, **kwargs):
if len(kwargs) == 0:
raise ArgumentError("You must specify at least one message field to wait on")
spec = MessageSpec(**kwargs)
responder = self._add_waiter(spec, callback)
return (spec, responder) | [
"Invoke callback every time a matching message is received.\n\n The callback will be invoked directly inside process_message so that\n you can guarantee that it has been called by the time process_message\n has returned.\n\n The callback can be removed by a call to remove_waiter(), passing the\n handle object returned by this call to identify it.\n\n Args:\n callback (callable): A callable function that will be called as\n callback(message) whenever a matching message is received.\n\n Returns:\n object: An opaque handle that can be passed to remove_waiter().\n\n This handle is the only way to remove this callback if you no\n longer want it to be called.\n "
] |
Please provide a description of the function:def remove_waiter(self, waiter_handle):
spec, waiter = waiter_handle
self._remove_waiter(spec, waiter) | [
"Remove a message callback.\n\n This call will remove a callback previously registered using\n every_match.\n\n Args:\n waiter_handle (object): The opaque handle returned by the\n previous call to every_match().\n "
] |
Please provide a description of the function:def clear(self):
for _, waiter in self.waiters():
if isinstance(waiter, asyncio.Future) and not waiter.done():
waiter.set_exception(asyncio.CancelledError())
self._waiters = {} | [
"Clear all waiters.\n\n This method will remove any current scheduled waiter with an\n asyncio.CancelledError exception.\n "
] |
Please provide a description of the function:def wait_for(self, timeout=None, **kwargs):
if len(kwargs) == 0:
raise ArgumentError("You must specify at least one message field to wait on")
spec = MessageSpec(**kwargs)
future = self._add_waiter(spec)
future.add_done_callback(lambda x: self._remove_waiter(spec, future))
return asyncio.wait_for(future, timeout=timeout) | [
"Wait for a specific matching message or timeout.\n\n You specify the message by passing name=value keyword arguments to\n this method. The first message received after this function has been\n called that has all of the given keys with the given values will be\n returned when this function is awaited.\n\n If no matching message is received within the specified timeout (if\n given), then asyncio.TimeoutError will be raised.\n\n This function only matches a single message and removes itself once\n the message is seen or the timeout expires.\n\n Args:\n timeout (float): Optional timeout, defaults to None for no timeout.\n **kwargs: Keys to match in the message with their corresponding values.\n You must pass at least one keyword argument so there is something\n to look for.\n\n Returns:\n awaitable: The response\n "
] |
Please provide a description of the function:async def process_message(self, message, wait=True):
to_check = deque([self._waiters])
ignored = True
while len(to_check) > 0:
context = to_check.popleft()
waiters = context.get(OperationManager._LEAF, [])
for waiter in waiters:
if isinstance(waiter, asyncio.Future):
waiter.set_result(message)
else:
try:
await _wait_or_launch(self._loop, waiter, message, wait)
except: #pylint:disable=bare-except;We can't let a user callback break this routine
self._logger.warning("Error calling every_match callback, callback=%s, message=%s",
waiter, message, exc_info=True)
ignored = False
for key in context:
if key is OperationManager._LEAF:
continue
message_val = _get_key(message, key)
if message_val is _MISSING:
continue
next_level = context[key]
if message_val in next_level:
to_check.append(next_level[message_val])
return not ignored | [
"Process a message to see if it wakes any waiters.\n\n This will check waiters registered to see if they match the given\n message. If so, they are awoken and passed the message. All matching\n waiters will be woken.\n\n This method returns False if the message matched no waiters so it was\n ignored.\n\n Normally you want to use wait=True (the default behavior) to guarantee\n that all callbacks have finished before this method returns. However,\n sometimes that can cause a deadlock if those callbacks would\n themselves invoke behavior that requires whatever is waiting for this\n method to be alive. In that case you can pass wait=False to ensure\n that the caller of this method does not block.\n\n Args:\n message (dict or object): The message that we should process\n wait (bool): Whether to block until all callbacks have finished\n or to return once the callbacks have been launched.\n\n Returns:\n bool: True if at least one waiter matched, otherwise False.\n "
] |
Please provide a description of the function:def generate(env):
try:
bld = env['BUILDERS']['Zip']
except KeyError:
bld = ZipBuilder
env['BUILDERS']['Zip'] = bld
env['ZIP'] = 'zip'
env['ZIPFLAGS'] = SCons.Util.CLVar('')
env['ZIPCOM'] = zipAction
env['ZIPCOMPRESSION'] = zipcompression
env['ZIPSUFFIX'] = '.zip'
env['ZIPROOT'] = SCons.Util.CLVar('') | [
"Add Builders and construction variables for zip to an Environment."
] |
Please provide a description of the function:def FromBinary(cls, record_data, record_count=1):
_cmd, address, _resp_length, _payload = cls._parse_rpc_info(record_data)
if address != 8:
raise ArgumentError("Clear config variables sent to non-controller tile")
return ClearConfigVariablesRecord() | [
"Create an UpdateRecord subclass from binary record data.\n\n This should be called with a binary record blob (NOT including the\n record type header) and it will decode it into a ClearConfigVariablesRecord.\n\n Args:\n record_data (bytearray): The raw record data that we wish to parse\n into an UpdateRecord subclass NOT including its 8 byte record header.\n record_count (int): The number of records included in record_data.\n\n Raises:\n ArgumentError: If the record_data is malformed and cannot be parsed.\n\n Returns:\n ClearConfigVariablesRecord: The decoded reflash tile record.\n "
] |
Please provide a description of the function:async def send_rpc(self, conn_id, address, rpc_id, payload, timeout):
try:
return await super(EmulatedDeviceAdapter, self).send_rpc(conn_id, address, rpc_id, payload, timeout)
finally:
for dev in self.devices.values():
dev.wait_idle() | [
"Asynchronously send an RPC to this IOTile device\n\n Args:\n conn_id (int): A unique identifier that will refer to this connection\n address (int): the address of the tile that we wish to send the RPC to\n rpc_id (int): the 16-bit id of the RPC we want to call\n payload (bytearray): the payload of the command\n timeout (float): the number of seconds to wait for the RPC to execute\n "
] |
Please provide a description of the function:async def debug(self, conn_id, name, cmd_args):
device = self._get_property(conn_id, 'device')
retval = None
try:
if name == 'dump_state':
retval = device.dump_state()
elif name == 'restore_state':
state = cmd_args['snapshot']
device.restore_state(state)
elif name == 'load_scenario':
scenario = cmd_args['scenario']
device.load_metascenario(scenario)
elif name == 'track_changes':
if cmd_args['enabled']:
device.state_history.enable()
else:
device.state_history.disable()
elif name == 'dump_changes':
outpath = cmd_args['path']
device.state_history.dump(outpath)
else:
reason = "Unknown command %s" % name
raise DeviceAdapterError(conn_id, 'debug {}'.format(name), reason)
except Exception as exc:
self._logger.exception("Error processing debug command %s: args=%s", name, cmd_args)
reason = "Exception %s occurred during processing" % str(exc)
raise DeviceAdapterError(conn_id, 'debug {}'.format(name), reason) from exc
return retval | [
"Asynchronously complete a named debug command.\n\n The command name and arguments are passed to the underlying device adapter\n and interpreted there.\n\n Args:\n conn_id (int): A unique identifer that will refer to this connection\n name (string): the name of the debug command we want to invoke\n cmd_args (dict): any arguments that we want to send with this command.\n "
] |
Please provide a description of the function:def verify(self, obj):
if obj not in self.options:
raise ValidationError("Object is not in list of enumerated options",
reason='not in list of enumerated options', object=obj, options=self.options)
return obj | [
"Verify that the object conforms to this verifier's schema.\n\n Args:\n obj (object): A python object to verify\n\n Raises:\n ValidationError: If there is a problem verifying the object, a\n ValidationError is thrown with at least the reason key set indicating\n the reason for the lack of validation.\n "
] |
Please provide a description of the function:def one_line_desc(obj):
logger = logging.getLogger(__name__)
try:
doc = ParsedDocstring(obj.__doc__)
return doc.short_desc
except: # pylint:disable=bare-except; We don't want a misbehaving exception to break the program
logger.warning("Could not parse docstring for %s", obj, exc_info=True)
return "" | [
"Get a one line description of a class."
] |
Please provide a description of the function:def main(argv=None, loop=SharedLoop):
if argv is None:
argv = sys.argv[1:]
list_parser = argparse.ArgumentParser(add_help=False)
list_parser.add_argument('-l', '--list', action='store_true', help="List all known installed interfaces and devices and then exit")
list_parser.add_argument('-v', '--verbose', action="count", default=0, help="Increase logging level (goes error, warn, info, debug)")
parser = argparse.ArgumentParser(description="Serve acess to a virtual IOTile device using a virtual IOTile interface")
parser.add_argument('interface', help="The name of the virtual device interface to use")
parser.add_argument('device', help="The name of the virtual device to create")
parser.add_argument('-c', '--config', help="An optional JSON config file with arguments for the interface and device")
parser.add_argument('-l', '--list', action='store_true', help="List all known installed interfaces and devices and then exit")
parser.add_argument('-n', '--scenario', help="Load a test scenario from the given file")
parser.add_argument('-s', '--state', help="Load a given state into the device before starting to serve it. Only works with emulated devices.")
parser.add_argument('-d', '--dump', help="Dump the device's state when we exit the program. Only works with emulated devices.")
parser.add_argument('-t', '--track', help="Track all changes to the device's state. Only works with emulated devices.")
parser.add_argument('-v', '--verbose', action="count", default=0, help="Increase logging level (goes error, warn, info, debug)")
args, _rest = list_parser.parse_known_args(argv)
if args.list:
configure_logging(args.verbose)
reg = ComponentRegistry()
print("Installed Device Servers:")
for name, _iface in reg.load_extensions('iotile.device_server', class_filter=AbstractDeviceServer):
print('- {}'.format(name))
print("\nInstalled Virtual Devices:")
for name, dev in reg.load_extensions('iotile.virtual_device', class_filter=VirtualIOTileDevice,
product_name="virtual_device"):
print('- {}: {}'.format(name, one_line_desc(dev)))
return 0
args = parser.parse_args(argv)
configure_logging(args.verbose)
config = {}
if args.config is not None:
with open(args.config, "r") as conf_file:
config = json.load(conf_file)
started = False
device = None
stop_immediately = args.interface == 'null'
try:
server = instantiate_interface(args.interface, config, loop)
device = instantiate_device(args.device, config, loop)
if args.state is not None:
print("Loading device state from file %s" % args.state)
device.load_state(args.state)
if args.scenario is not None:
print("Loading scenario from file %s" % args.scenario)
with open(args.scenario, "r") as infile:
scenario = json.load(infile)
# load_metascenario expects a list of scenarios even when there is only one
if isinstance(scenario, dict):
scenario = [scenario]
device.load_metascenario(scenario)
if args.track is not None:
print("Tracking all state changes to device")
device.state_history.enable()
adapter = VirtualDeviceAdapter(devices=[device], loop=loop)
server.adapter = adapter
loop.run_coroutine(adapter.start())
try:
loop.run_coroutine(server.start())
except:
loop.run_coroutine(adapter.stop())
adapter = None
raise
started = True
print("Starting to serve virtual IOTile device")
if stop_immediately:
return 0
# We need to periodically process events that are queued up in the interface
while True:
time.sleep(0.5)
except KeyboardInterrupt:
print("Break received, cleanly exiting...")
finally:
if args.dump is not None and device is not None:
print("Dumping final device state to %s" % args.dump)
device.save_state(args.dump)
if started:
loop.run_coroutine(server.stop())
loop.run_coroutine(adapter.stop())
if args.track is not None and device is not None:
print("Saving state history to file %s" % args.track)
device.state_history.dump(args.track)
return 0 | [
"Serve access to a virtual IOTile device using a virtual iotile interface."
] |
Please provide a description of the function:def instantiate_device(virtual_dev, config, loop):
conf = {}
if 'device' in config:
conf = config['device']
# If we're given a path to a script, try to load and use that rather than search for an installed module
try:
reg = ComponentRegistry()
if virtual_dev.endswith('.py'):
_name, dev = reg.load_extension(virtual_dev, class_filter=VirtualIOTileDevice, unique=True)
else:
_name, dev = reg.load_extensions('iotile.virtual_device', name_filter=virtual_dev,
class_filter=VirtualIOTileDevice,
product_name="virtual_device", unique=True)
return dev(conf)
except ArgumentError as err:
print("ERROR: Could not load virtual device (%s): %s" % (virtual_dev, err.msg))
sys.exit(1) | [
"Find a virtual device by name and instantiate it\n\n Args:\n virtual_dev (string): The name of the pkg_resources entry point corresponding to\n the device. It should be in group iotile.virtual_device. If virtual_dev ends\n in .py, it is interpreted as a python script and loaded directly from the script.\n config (dict): A dictionary with a 'device' key with the config info for configuring\n this virtual device. This is optional.\n\n Returns:\n VirtualIOTileDevice: The instantiated subclass of VirtualIOTileDevice\n "
] |
Please provide a description of the function:def instantiate_interface(virtual_iface, config, loop):
# Allow the null virtual interface for testing
if virtual_iface == 'null':
return StandardDeviceServer(None, {}, loop=loop)
conf = {}
if 'interface' in config:
conf = config['interface']
try:
reg = ComponentRegistry()
if virtual_iface.endswith('.py'):
_name, iface = reg.load_extension(virtual_iface, class_filter=AbstractDeviceServer, unique=True)
else:
_name, iface = reg.load_extensions('iotile.device_server', name_filter=virtual_iface,
class_filter=AbstractDeviceServer, unique=True)
return iface(None, conf, loop=loop)
except ArgumentError as err:
print("ERROR: Could not load device_server (%s): %s" % (virtual_iface, err.msg))
sys.exit(1) | [
"Find a virtual interface by name and instantiate it\n\n Args:\n virtual_iface (string): The name of the pkg_resources entry point corresponding to\n the interface. It should be in group iotile.virtual_interface\n config (dict): A dictionary with a 'interface' key with the config info for configuring\n this virtual interface. This is optional.\n\n Returns:\n VirtualInterface: The instantiated subclass of VirtualInterface\n "
] |
Please provide a description of the function:def generate(env):
try:
bld = env['BUILDERS']['Tar']
except KeyError:
bld = TarBuilder
env['BUILDERS']['Tar'] = bld
env['TAR'] = env.Detect(tars) or 'gtar'
env['TARFLAGS'] = SCons.Util.CLVar('-c')
env['TARCOM'] = '$TAR $TARFLAGS -f $TARGET $SOURCES'
env['TARSUFFIX'] = '.tar' | [
"Add Builders and construction variables for tar to an Environment."
] |
Please provide a description of the function:def register_command(self, name, handler, validator):
self._commands[name] = (handler, validator) | [
"Register a coroutine command handler.\n\n This handler will be called whenever a command message is received\n from the client, whose operation key matches ``name``. The handler\n will be called as::\n\n response_payload = await handler(cmd_payload, context)\n\n If the coroutine returns, it will be assumed to have completed\n correctly and its return value will be sent as the result of the\n command. If the coroutine wishes to signal an error handling the\n command, it must raise a ServerCommandError exception that contains a\n string reason code for the error. This will generate an error\n response to the command.\n\n The cmd_payload is first verified using the SchemaVerifier passed in\n ``validator`` and handler is only called if verification succeeds. If\n verification fails, a failure response to the command is returned\n automatically to the client.\n\n Args:\n name (str): The unique command name that will be used to dispatch\n client command messages to this handler.\n handler (coroutine function): A coroutine function that will be\n called whenever this command is received.\n validator (SchemaVerifier): A validator object for checking the\n command payload before calling this handler.\n "
] |
Please provide a description of the function:async def start(self):
if self._server_task is not None:
self._logger.debug("AsyncValidatingWSServer.start() called twice, ignoring")
return
started_signal = self._loop.create_future()
self._server_task = self._loop.add_task(self._run_server_task(started_signal))
await started_signal
if self.port is None:
self.port = started_signal.result() | [
"Start the websocket server.\n\n When this method returns, the websocket server will be running and\n the port property of this class will have its assigned port number.\n\n This method should be called only once in the lifetime of the server\n and must be paired with a call to stop() to cleanly release the\n server's resources.\n "
] |
Please provide a description of the function:async def _run_server_task(self, started_signal):
try:
server = await websockets.serve(self._manage_connection, self.host, self.port)
port = server.sockets[0].getsockname()[1]
started_signal.set_result(port)
except Exception as err:
self._logger.exception("Error starting server on host %s, port %s", self.host, self.port)
started_signal.set_exception(err)
return
try:
while True:
await asyncio.sleep(1)
except asyncio.CancelledError:
self._logger.info("Stopping server due to stop() command")
finally:
server.close()
await server.wait_closed()
self._logger.debug("Server stopped, exiting task") | [
"Create a BackgroundTask to manage the server.\n\n This allows subclasess to attach their server related tasks as\n subtasks that are properly cleaned up when this parent task is\n stopped and not require them all to overload start() and stop()\n to perform this action.\n "
] |
Please provide a description of the function:async def send_event(self, con, name, payload):
message = dict(type="event", name=name, payload=payload)
encoded = pack(message)
await con.send(encoded) | [
"Send an event to a client connection.\n\n This method will push an event message to the client with the given\n name and payload. You need to have access to the the ``connection``\n object for the client, which is only available once the client has\n connected and passed to self.prepare_conn(connection).\n\n Args:\n con (websockets.Connection): The connection to use to send\n the event.\n name (str): The name of the event to send.\n payload (object): The msgpack-serializable object so send\n as the event's payload.\n "
] |
Please provide a description of the function:def encode(self):
header = struct.pack("<LB3x", len(self.record_contents) + UpdateRecord.HEADER_LENGTH, self.record_type)
return bytearray(header) + self.record_contents | [
"Encode this record into binary, suitable for embedded into an update script.\n\n This function just adds the required record header and copies the raw data\n we were passed in verbatim since we don't know what it means\n\n Returns:\n bytearary: The binary version of the record that could be parsed via\n a call to UpdateRecord.FromBinary()\n "
] |
Please provide a description of the function:def DviPdfPsFunction(XXXDviAction, target = None, source= None, env=None):
try:
abspath = source[0].attributes.path
except AttributeError :
abspath = ''
saved_env = SCons.Scanner.LaTeX.modify_env_var(env, 'TEXPICTS', abspath)
result = XXXDviAction(target, source, env)
if saved_env is _null:
try:
del env['ENV']['TEXPICTS']
except KeyError:
pass # was never set
else:
env['ENV']['TEXPICTS'] = saved_env
return result | [
"A builder for DVI files that sets the TEXPICTS environment\n variable before running dvi2ps or dvipdf."
] |
Please provide a description of the function:def DviPdfStrFunction(target = None, source= None, env=None):
if env.GetOption("no_exec"):
result = env.subst('$DVIPDFCOM',0,target,source)
else:
result = ''
return result | [
"A strfunction for dvipdf that returns the appropriate\n command string for the no_exec options."
] |
Please provide a description of the function:def PDFEmitter(target, source, env):
def strip_suffixes(n):
return not SCons.Util.splitext(str(n))[1] in ['.aux', '.log']
source = [src for src in source if strip_suffixes(src)]
return (target, source) | [
"Strips any .aux or .log files from the input source list.\n These are created by the TeX Builder that in all likelihood was\n used to generate the .dvi file we're using as input, and we only\n care about the .dvi file.\n "
] |
Please provide a description of the function:def generate(env):
global PDFAction
if PDFAction is None:
PDFAction = SCons.Action.Action('$DVIPDFCOM', '$DVIPDFCOMSTR')
global DVIPDFAction
if DVIPDFAction is None:
DVIPDFAction = SCons.Action.Action(DviPdfFunction, strfunction = DviPdfStrFunction)
from . import pdf
pdf.generate(env)
bld = env['BUILDERS']['PDF']
bld.add_action('.dvi', DVIPDFAction)
bld.add_emitter('.dvi', PDFEmitter)
env['DVIPDF'] = 'dvipdf'
env['DVIPDFFLAGS'] = SCons.Util.CLVar('')
env['DVIPDFCOM'] = 'cd ${TARGET.dir} && $DVIPDF $DVIPDFFLAGS ${SOURCE.file} ${TARGET.file}'
# Deprecated synonym.
env['PDFCOM'] = ['$DVIPDFCOM'] | [
"Add Builders and construction variables for dvipdf to an Environment."
] |
Please provide a description of the function:def FromString(cls, desc):
parse_exp = Literal(u'run_time').suppress() + time_interval(u'interval')
try:
data = parse_exp.parseString(desc)
return TimeBasedStopCondition(data[u'interval'][0])
except ParseException:
raise ArgumentError(u"Could not parse time based stop condition") | [
"Parse this stop condition from a string representation.\n\n The string needs to match:\n run_time number [seconds|minutes|hours|days|months|years]\n\n Args:\n desc (str): The description\n\n Returns:\n TimeBasedStopCondition\n "
] |
Please provide a description of the function:def generate(env):
SCons.Tool.createStaticLibBuilder(env)
SCons.Tool.createSharedLibBuilder(env)
SCons.Tool.createProgBuilder(env)
env['AR'] = 'mwld'
env['ARCOM'] = '$AR $ARFLAGS -library -o $TARGET $SOURCES'
env['LIBDIRPREFIX'] = '-L'
env['LIBDIRSUFFIX'] = ''
env['LIBLINKPREFIX'] = '-l'
env['LIBLINKSUFFIX'] = '.lib'
env['LINK'] = 'mwld'
env['LINKCOM'] = '$LINK $LINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = '$LINKFLAGS'
env['SHLINKCOM'] = shlib_action
env['SHLIBEMITTER']= shlib_emitter
env['LDMODULEEMITTER']= shlib_emitter | [
"Add Builders and construction variables for lib to an Environment."
] |
Please provide a description of the function:def collectintargz(target, source, env):
# the rpm tool depends on a source package, until this is changed
# this hack needs to be here that tries to pack all sources in.
sources = env.FindSourceFiles()
# filter out the target we are building the source list for.
sources = [s for s in sources if s not in target]
# find the .spec file for rpm and add it since it is not necessarily found
# by the FindSourceFiles function.
sources.extend( [s for s in source if str(s).rfind('.spec')!=-1] )
# sort to keep sources from changing order across builds
sources.sort()
# as the source contains the url of the source package this rpm package
# is built from, we extract the target name
tarball = (str(target[0])+".tar.gz").replace('.rpm', '')
try:
tarball = env['SOURCE_URL'].split('/')[-1]
except KeyError as e:
raise SCons.Errors.UserError( "Missing PackageTag '%s' for RPM packager" % e.args[0] )
tarball = src_targz.package(env, source=sources, target=tarball,
PACKAGEROOT=env['PACKAGEROOT'], )
return (target, tarball) | [
" Puts all source files into a tar.gz file. "
] |
Please provide a description of the function:def build_specfile(target, source, env):
file = open(target[0].get_abspath(), 'w')
try:
file.write( build_specfile_header(env) )
file.write( build_specfile_sections(env) )
file.write( build_specfile_filesection(env, source) )
file.close()
# call a user specified function
if 'CHANGE_SPECFILE' in env:
env['CHANGE_SPECFILE'](target, source)
except KeyError as e:
raise SCons.Errors.UserError( '"%s" package field for RPM is missing.' % e.args[0] ) | [
" Builds a RPM specfile from a dictionary with string metadata and\n by analyzing a tree of nodes.\n "
] |
Please provide a description of the function:def build_specfile_sections(spec):
str = ""
mandatory_sections = {
'DESCRIPTION' : '\n%%description\n%s\n\n', }
str = str + SimpleTagCompiler(mandatory_sections).compile( spec )
optional_sections = {
'DESCRIPTION_' : '%%description -l %s\n%s\n\n',
'CHANGELOG' : '%%changelog\n%s\n\n',
'X_RPM_PREINSTALL' : '%%pre\n%s\n\n',
'X_RPM_POSTINSTALL' : '%%post\n%s\n\n',
'X_RPM_PREUNINSTALL' : '%%preun\n%s\n\n',
'X_RPM_POSTUNINSTALL' : '%%postun\n%s\n\n',
'X_RPM_VERIFY' : '%%verify\n%s\n\n',
# These are for internal use but could possibly be overridden
'X_RPM_PREP' : '%%prep\n%s\n\n',
'X_RPM_BUILD' : '%%build\n%s\n\n',
'X_RPM_INSTALL' : '%%install\n%s\n\n',
'X_RPM_CLEAN' : '%%clean\n%s\n\n',
}
# Default prep, build, install and clean rules
# TODO: optimize those build steps, to not compile the project a second time
if 'X_RPM_PREP' not in spec:
spec['X_RPM_PREP'] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"' + '\n%setup -q'
if 'X_RPM_BUILD' not in spec:
spec['X_RPM_BUILD'] = '[ ! -e "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && mkdir "$RPM_BUILD_ROOT"'
if 'X_RPM_INSTALL' not in spec:
spec['X_RPM_INSTALL'] = 'scons --install-sandbox="$RPM_BUILD_ROOT" "$RPM_BUILD_ROOT"'
if 'X_RPM_CLEAN' not in spec:
spec['X_RPM_CLEAN'] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"'
str = str + SimpleTagCompiler(optional_sections, mandatory=0).compile( spec )
return str | [
" Builds the sections of a rpm specfile.\n "
] |
Please provide a description of the function:def build_specfile_header(spec):
str = ""
# first the mandatory sections
mandatory_header_fields = {
'NAME' : '%%define name %s\nName: %%{name}\n',
'VERSION' : '%%define version %s\nVersion: %%{version}\n',
'PACKAGEVERSION' : '%%define release %s\nRelease: %%{release}\n',
'X_RPM_GROUP' : 'Group: %s\n',
'SUMMARY' : 'Summary: %s\n',
'LICENSE' : 'License: %s\n', }
str = str + SimpleTagCompiler(mandatory_header_fields).compile( spec )
# now the optional tags
optional_header_fields = {
'VENDOR' : 'Vendor: %s\n',
'X_RPM_URL' : 'Url: %s\n',
'SOURCE_URL' : 'Source: %s\n',
'SUMMARY_' : 'Summary(%s): %s\n',
'X_RPM_DISTRIBUTION' : 'Distribution: %s\n',
'X_RPM_ICON' : 'Icon: %s\n',
'X_RPM_PACKAGER' : 'Packager: %s\n',
'X_RPM_GROUP_' : 'Group(%s): %s\n',
'X_RPM_REQUIRES' : 'Requires: %s\n',
'X_RPM_PROVIDES' : 'Provides: %s\n',
'X_RPM_CONFLICTS' : 'Conflicts: %s\n',
'X_RPM_BUILDREQUIRES' : 'BuildRequires: %s\n',
'X_RPM_SERIAL' : 'Serial: %s\n',
'X_RPM_EPOCH' : 'Epoch: %s\n',
'X_RPM_AUTOREQPROV' : 'AutoReqProv: %s\n',
'X_RPM_EXCLUDEARCH' : 'ExcludeArch: %s\n',
'X_RPM_EXCLUSIVEARCH' : 'ExclusiveArch: %s\n',
'X_RPM_PREFIX' : 'Prefix: %s\n',
# internal use
'X_RPM_BUILDROOT' : 'BuildRoot: %s\n', }
# fill in default values:
# Adding a BuildRequires renders the .rpm unbuildable under System, which
# are not managed by rpm, since the database to resolve this dependency is
# missing (take Gentoo as an example)
# if not s.has_key('x_rpm_BuildRequires'):
# s['x_rpm_BuildRequires'] = 'scons'
if 'X_RPM_BUILDROOT' not in spec:
spec['X_RPM_BUILDROOT'] = '%{_tmppath}/%{name}-%{version}-%{release}'
str = str + SimpleTagCompiler(optional_header_fields, mandatory=0).compile( spec )
return str | [
" Builds all sections but the %file of a rpm specfile\n "
] |
Please provide a description of the function:def build_specfile_filesection(spec, files):
str = '%files\n'
if 'X_RPM_DEFATTR' not in spec:
spec['X_RPM_DEFATTR'] = '(-,root,root)'
str = str + '%%defattr %s\n' % spec['X_RPM_DEFATTR']
supported_tags = {
'PACKAGING_CONFIG' : '%%config %s',
'PACKAGING_CONFIG_NOREPLACE' : '%%config(noreplace) %s',
'PACKAGING_DOC' : '%%doc %s',
'PACKAGING_UNIX_ATTR' : '%%attr %s',
'PACKAGING_LANG_' : '%%lang(%s) %s',
'PACKAGING_X_RPM_VERIFY' : '%%verify %s',
'PACKAGING_X_RPM_DIR' : '%%dir %s',
'PACKAGING_X_RPM_DOCDIR' : '%%docdir %s',
'PACKAGING_X_RPM_GHOST' : '%%ghost %s', }
for file in files:
# build the tagset
tags = {}
for k in list(supported_tags.keys()):
try:
v = file.GetTag(k)
if v:
tags[k] = v
except AttributeError:
pass
# compile the tagset
str = str + SimpleTagCompiler(supported_tags, mandatory=0).compile( tags )
str = str + ' '
str = str + file.GetTag('PACKAGING_INSTALL_LOCATION')
str = str + '\n\n'
return str | [
" builds the %file section of the specfile\n "
] |
Please provide a description of the function:def compile(self, values):
def is_international(tag):
return tag.endswith('_')
def get_country_code(tag):
return tag[-2:]
def strip_country_code(tag):
return tag[:-2]
replacements = list(self.tagset.items())
str = ""
domestic = [t for t in replacements if not is_international(t[0])]
for key, replacement in domestic:
try:
str = str + replacement % values[key]
except KeyError as e:
if self.mandatory:
raise e
international = [t for t in replacements if is_international(t[0])]
for key, replacement in international:
try:
x = [t for t in values.items() if strip_country_code(t[0]) == key]
int_values_for_key = [(get_country_code(t[0]),t[1]) for t in x]
for v in int_values_for_key:
str = str + replacement % v
except KeyError as e:
if self.mandatory:
raise e
return str | [
" Compiles the tagset and returns a str containing the result\n "
] |
Please provide a description of the function:def generate(env):
fscan = FortranScan("FORTRANPATH")
SCons.Tool.SourceFileScanner.add_scanner('.i', fscan)
SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan)
if 'FORTRANFILESUFFIXES' not in env:
env['FORTRANFILESUFFIXES'] = ['.i']
else:
env['FORTRANFILESUFFIXES'].append('.i')
if 'F90FILESUFFIXES' not in env:
env['F90FILESUFFIXES'] = ['.i90']
else:
env['F90FILESUFFIXES'].append('.i90')
add_all_to_env(env)
env['FORTRAN'] = 'ifl'
env['SHFORTRAN'] = '$FORTRAN'
env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' | [
"Add Builders and construction variables for ifl to an Environment."
] |
Please provide a description of the function:def generate(env):
findIt('bcc32', env)
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in ['.c', '.cpp']:
static_obj.add_action(suffix, SCons.Defaults.CAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
env['CC'] = 'bcc32'
env['CCFLAGS'] = SCons.Util.CLVar('')
env['CFLAGS'] = SCons.Util.CLVar('')
env['CCCOM'] = '$CC -q $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['SHCCCOM'] = '$SHCC -WD $SHCFLAGS $SHCCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = ''
env['SHOBJSUFFIX'] = '.dll'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
env['CFILESUFFIX'] = '.cpp' | [
"Add Builders and construction variables for bcc to an\n Environment."
] |
Please provide a description of the function:def require(builder_name):
reg = ComponentRegistry()
for _name, autobuild_func in reg.load_extensions('iotile.autobuild', name_filter=builder_name):
return autobuild_func
raise BuildError('Cannot find required autobuilder, make sure the distribution providing it is installed',
name=builder_name) | [
"Find an advertised autobuilder and return it\n\n This function searches through all installed distributions to find\n if any advertise an entry point with group 'iotile.autobuild' and\n name equal to builder_name. The first one that is found is returned.\n\n This function raises a BuildError if it cannot find the required\n autobuild function\n\n Args:\n builder_name (string): The name of the builder to find\n\n Returns:\n callable: the autobuilder function found in the search\n "
] |
Please provide a description of the function:def autobuild_onlycopy():
try:
# Build only release information
family = utilities.get_family('module_settings.json')
autobuild_release(family)
Alias('release', os.path.join('build', 'output'))
Default(['release'])
except unit_test.IOTileException as e:
print(e.format())
Exit(1) | [
"Autobuild a project that does not require building firmware, pcb or documentation\n "
] |
Please provide a description of the function:def autobuild_docproject():
try:
#Build only release information
family = utilities.get_family('module_settings.json')
autobuild_release(family)
autobuild_documentation(family.tile)
except unit_test.IOTileException as e:
print(e.format())
Exit(1) | [
"Autobuild a project that only contains documentation"
] |
Please provide a description of the function:def autobuild_release(family=None):
if family is None:
family = utilities.get_family('module_settings.json')
env = Environment(tools=[])
env['TILE'] = family.tile
target = env.Command(['#build/output/module_settings.json'], ['#module_settings.json'],
action=env.Action(create_release_settings_action, "Creating release manifest"))
env.AlwaysBuild(target)
# Copy over release notes if they exist
if os.path.exists('RELEASE.md'):
env.Command(['build/output/RELEASE.md'], ['RELEASE.md'], Copy("$TARGET", "$SOURCE"))
# Now copy across the build products that are not copied automatically
copy_include_dirs(family.tile)
copy_tilebus_definitions(family.tile)
copy_dependency_docs(family.tile)
copy_linker_scripts(family.tile)
# Allow users to specify a hide_dependency_images flag that does not copy over all firmware images
if not family.tile.settings.get('hide_dependency_images', False):
copy_dependency_images(family.tile)
copy_extra_files(family.tile)
build_python_distribution(family.tile) | [
"Copy necessary files into build/output so that this component can be used by others\n\n Args:\n family (ArchitectureGroup): The architecture group that we are targeting. If not\n provided, it is assumed that we are building in the current directory and the\n module_settings.json file is read to create an ArchitectureGroup\n "
] |
Please provide a description of the function:def autobuild_arm_program(elfname, test_dir=os.path.join('firmware', 'test'), patch=True):
try:
#Build for all targets
family = utilities.get_family('module_settings.json')
family.for_all_targets(family.tile.short_name, lambda x: arm.build_program(family.tile, elfname, x, patch=patch))
#Build all unit tests
unit_test.build_units(os.path.join('firmware','test'), family.targets(family.tile.short_name))
Alias('release', os.path.join('build', 'output'))
Alias('test', os.path.join('build', 'test', 'output'))
Default(['release', 'test'])
autobuild_release(family)
if os.path.exists('doc'):
autobuild_documentation(family.tile)
except IOTileException as e:
print(e.format())
sys.exit(1) | [
"\n Build the an ARM module for all targets and build all unit tests. If pcb files are given, also build those.\n "
] |
Please provide a description of the function:def autobuild_doxygen(tile):
iotile = IOTile('.')
doxydir = os.path.join('build', 'doc')
doxyfile = os.path.join(doxydir, 'doxygen.txt')
outfile = os.path.join(doxydir, '%s.timestamp' % tile.unique_id)
env = Environment(ENV=os.environ, tools=[])
env['IOTILE'] = iotile
# There is no /dev/null on Windows
if platform.system() == 'Windows':
action = 'doxygen %s > NUL' % doxyfile
else:
action = 'doxygen %s > /dev/null' % doxyfile
Alias('doxygen', doxydir)
env.Clean(outfile, doxydir)
inputfile = doxygen_source_path()
env.Command(doxyfile, inputfile, action=env.Action(lambda target, source, env: generate_doxygen_file(str(target[0]), iotile), "Creating Doxygen Config File"))
env.Command(outfile, doxyfile, action=env.Action(action, "Building Firmware Documentation")) | [
"Generate documentation for firmware in this module using doxygen"
] |
Please provide a description of the function:def autobuild_documentation(tile):
docdir = os.path.join('#doc')
docfile = os.path.join(docdir, 'conf.py')
outdir = os.path.join('build', 'output', 'doc', tile.unique_id)
outfile = os.path.join(outdir, '%s.timestamp' % tile.unique_id)
env = Environment(ENV=os.environ, tools=[])
# Only build doxygen documentation if we have C firmware to build from
if os.path.exists('firmware'):
autobuild_doxygen(tile)
env.Depends(outfile, 'doxygen')
# There is no /dev/null on Windows
# Also disable color output on Windows since it seems to leave powershell
# in a weird state.
if platform.system() == 'Windows':
action = 'sphinx-build --no-color -b html %s %s > NUL' % (docdir[1:], outdir)
else:
action = 'sphinx-build -b html %s %s > /dev/null' % (docdir[1:], outdir)
env.Command(outfile, docfile, action=env.Action(action, "Building Component Documentation"))
Alias('documentation', outdir)
env.Clean(outfile, outdir) | [
"Generate documentation for this module using a combination of sphinx and breathe"
] |
Please provide a description of the function:def autobuild_trub_script(file_name, slot_assignments=None, os_info=None, sensor_graph=None,
app_info=None, use_safeupdate=False):
build_update_script(file_name, slot_assignments, os_info, sensor_graph, app_info, use_safeupdate) | [
"Build a trub script that loads given firmware into the given slots.\n\n slot_assignments should be a list of tuples in the following form:\n (\"slot X\" or \"controller\", firmware_image_name)\n\n The output of this autobuild action will be a trub script in\n build/output/<file_name> that assigns the given firmware to each slot in\n the order specified in the slot_assignments list.\n\n Args:\n file_name (str): The name of the output file that we should create.\n This file name should end in .trub\n slot_assignments (list of (str, str)): A list of tuples containing\n the slot name and the firmware image that we should use to build\n our update script. Optional\n os_info (tuple(int, str)): A tuple of OS version tag and X.Y version\n number that will be set as part of the OTA script if included. Optional.\n sensor_graph (str): Name of sgf file. Optional.\n app_info (tuple(int, str)): A tuple of App version tag and X.Y version\n number that will be set as part of the OTA script if included. Optional.\n use_safeupdate (bool): If True, Enables safemode before the firmware update records, then\n disables them after the firmware update records.\n "
] |
Please provide a description of the function:def autobuild_bootstrap_file(file_name, image_list):
family = utilities.get_family('module_settings.json')
target = family.platform_independent_target()
resolver = ProductResolver.Create()
env = Environment(tools=[])
output_dir = target.build_dirs()['output']
build_dir = target.build_dirs()['build']
build_output_name = os.path.join(build_dir, file_name)
full_output_name = os.path.join(output_dir, file_name)
processed_input_images = []
for image_name in image_list:
image_info = resolver.find_unique('firmware_image', image_name)
image_path = image_info.full_path
hex_path = arm.ensure_image_is_hex(image_path)
processed_input_images.append(hex_path)
env.Command(build_output_name, processed_input_images,
action=Action(arm.merge_hex_executables,
"Merging %d hex files into $TARGET" % len(processed_input_images)))
env.Command(full_output_name, build_output_name, Copy("$TARGET", "$SOURCE")) | [
"Combine multiple firmware images into a single bootstrap hex file.\n\n The files listed in image_list must be products of either this tile or any\n dependency tile and should correspond exactly with the base name listed on\n the products section of the module_settings.json file of the corresponding\n tile. They must be listed as firmware_image type products.\n\n This function keeps a global map of all of the intermediate files that it\n has had to create so that we don't try to build them multiple times.\n\n Args:\n file_name(str): Full name of the output bootstrap hex file.\n image_list(list of str): List of files that will be combined into a\n single hex file that will be used to flash a chip.\n "
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.