Code
stringlengths 103
85.9k
| Summary
sequencelengths 0
94
|
---|---|
Please provide a description of the function:def run(self, refresh_interval=0.05):
try:
from asciimatics.screen import Screen
except ImportError:
raise ExternalError("You must have asciimatics installed to use LinebufferUI",
suggestion="pip install iotilecore[ui]")
Screen.wrapper(self._run_loop, arguments=[refresh_interval]) | [
"Set up the loop, check that the tool is installed"
] |
Please provide a description of the function:def is_host_target_supported(host_target, msvc_version):
# We assume that any Visual Studio version supports x86 as a target
if host_target[1] != "x86":
maj, min = msvc_version_to_maj_min(msvc_version)
if maj < 8:
return False
return True | [
"Return True if the given (host, target) tuple is supported given the\n msvc version.\n\n Parameters\n ----------\n host_target: tuple\n tuple of (canonalized) host-target, e.g. (\"x86\", \"amd64\") for cross\n compilation from 32 bits windows to 64 bits.\n msvc_version: str\n msvc version (major.minor, e.g. 10.0)\n\n Note\n ----\n This only check whether a given version *may* support the given (host,\n target), not that the toolchain is actually present on the machine.\n "
] |
Please provide a description of the function:def find_vc_pdir_vswhere(msvc_version):
vswhere_path = os.path.join(
'C:\\',
'Program Files (x86)',
'Microsoft Visual Studio',
'Installer',
'vswhere.exe'
)
vswhere_cmd = [vswhere_path, '-version', msvc_version, '-property', 'installationPath']
if os.path.exists(vswhere_path):
sp = subprocess.Popen(vswhere_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
vsdir, err = sp.communicate()
vsdir = vsdir.decode("mbcs")
vsdir = vsdir.rstrip()
vc_pdir = os.path.join(vsdir, 'VC')
return vc_pdir
else:
# No vswhere on system, no install info available
return None | [
"\n Find the MSVC product directory using vswhere.exe .\n Run it asking for specified version and get MSVS install location\n :param msvc_version:\n :return: MSVC install dir\n "
] |
Please provide a description of the function:def find_vc_pdir(msvc_version):
root = 'Software\\'
try:
hkeys = _VCVER_TO_PRODUCT_DIR[msvc_version]
except KeyError:
debug("Unknown version of MSVC: %s" % msvc_version)
raise UnsupportedVersion("Unknown version %s" % msvc_version)
for hkroot, key in hkeys:
try:
comps = None
if not key:
comps = find_vc_pdir_vswhere(msvc_version)
if not comps:
debug('find_vc_dir(): no VC found via vswhere for version {}'.format(repr(key)))
raise SCons.Util.WinError
else:
if common.is_win64():
try:
# ordinally at win64, try Wow6432Node first.
comps = common.read_reg(root + 'Wow6432Node\\' + key, hkroot)
except SCons.Util.WinError as e:
# at Microsoft Visual Studio for Python 2.7, value is not in Wow6432Node
pass
if not comps:
# not Win64, or Microsoft Visual Studio for Python 2.7
comps = common.read_reg(root + key, hkroot)
except SCons.Util.WinError as e:
debug('find_vc_dir(): no VC registry key {}'.format(repr(key)))
else:
debug('find_vc_dir(): found VC in registry: {}'.format(comps))
if os.path.exists(comps):
return comps
else:
debug('find_vc_dir(): reg says dir is {}, but it does not exist. (ignoring)'.format(comps))
raise MissingConfiguration("registry dir {} not found on the filesystem".format(comps))
return None | [
"Try to find the product directory for the given\n version.\n\n Note\n ----\n If for some reason the requested version could not be found, an\n exception which inherits from VisualCException will be raised."
] |
Please provide a description of the function:def find_batch_file(env,msvc_version,host_arch,target_arch):
pdir = find_vc_pdir(msvc_version)
if pdir is None:
raise NoVersionFound("No version of Visual Studio found")
debug('vc.py: find_batch_file() pdir:{}'.format(pdir))
# filter out e.g. "Exp" from the version name
msvc_ver_numeric = ''.join([x for x in msvc_version if x in string_digits + "."])
vernum = float(msvc_ver_numeric)
if 7 <= vernum < 8:
pdir = os.path.join(pdir, os.pardir, "Common7", "Tools")
batfilename = os.path.join(pdir, "vsvars32.bat")
elif vernum < 7:
pdir = os.path.join(pdir, "Bin")
batfilename = os.path.join(pdir, "vcvars32.bat")
elif 8 <= vernum <= 14:
batfilename = os.path.join(pdir, "vcvarsall.bat")
else: # vernum >= 14.1 VS2017 and above
batfilename = os.path.join(pdir, "Auxiliary", "Build", "vcvarsall.bat")
if not os.path.exists(batfilename):
debug("Not found: %s" % batfilename)
batfilename = None
installed_sdks=get_installed_sdks()
for _sdk in installed_sdks:
sdk_bat_file = _sdk.get_sdk_vc_script(host_arch,target_arch)
if not sdk_bat_file:
debug("vc.py:find_batch_file() not found:%s"%_sdk)
else:
sdk_bat_file_path = os.path.join(pdir,sdk_bat_file)
if os.path.exists(sdk_bat_file_path):
debug('vc.py:find_batch_file() sdk_bat_file_path:%s'%sdk_bat_file_path)
return (batfilename,sdk_bat_file_path)
return (batfilename,None) | [
"\n Find the location of the batch script which should set up the compiler\n for any TARGET_ARCH whose compilers were installed by Visual Studio/VCExpress\n "
] |
Please provide a description of the function:def compile_sgf(in_path, optimize=True, model=None):
if model is None:
model = DeviceModel()
parser = SensorGraphFileParser()
parser.parse_file(in_path)
parser.compile(model)
if optimize:
opt = SensorGraphOptimizer()
opt.optimize(parser.sensor_graph, model=model)
return parser.sensor_graph | [
"Compile and optionally optimize an SGF file.\n\n Args:\n in_path (str): The input path to the sgf file to compile.\n optimize (bool): Whether to optimize the compiled result,\n defaults to True if not passed.\n model (DeviceModel): Optional device model if we are\n compiling for a nonstandard device. Normally you should\n leave this blank.\n\n Returns:\n SensorGraph: The compiled sensorgraph object\n "
] |
Please provide a description of the function:def generate(env):
add_all_to_env(env)
add_f77_to_env(env)
fcomp = env.Detect(compilers) or 'g77'
if env['PLATFORM'] in ['cygwin', 'win32']:
env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS')
env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS')
else:
env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -fPIC')
env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS -fPIC')
env['FORTRAN'] = fcomp
env['SHFORTRAN'] = '$FORTRAN'
env['F77'] = fcomp
env['SHF77'] = '$F77'
env['INCFORTRANPREFIX'] = "-I"
env['INCFORTRANSUFFIX'] = ""
env['INCF77PREFIX'] = "-I"
env['INCF77SUFFIX'] = "" | [
"Add Builders and construction variables for g77 to an Environment."
] |
Please provide a description of the function:def execute(self, sensor_graph, scope_stack):
parent = scope_stack[-1]
alloc = parent.allocator
# The output is unused
output = alloc.allocate_stream(DataStream.UnbufferedType, attach=True)
trigger_stream, trigger_cond = parent.trigger_chain()
streamer_const = alloc.allocate_stream(DataStream.ConstantType, attach=True)
sensor_graph.add_node(u"({} {} && {} always) => {} using trigger_streamer".format(trigger_stream, trigger_cond, streamer_const, output))
sensor_graph.add_constant(streamer_const, self.index) | [
"Execute this statement on the sensor_graph given the current scope tree.\n\n This adds a single node to the sensor graph with the trigger_streamer function\n as is processing function.\n\n Args:\n sensor_graph (SensorGraph): The sensor graph that we are building or\n modifying\n scope_stack (list(Scope)): A stack of nested scopes that may influence\n how this statement allocates clocks or other stream resources.\n "
] |
Please provide a description of the function:def get_language():
global sensor_graph, statement
if sensor_graph is not None:
return sensor_graph
_create_primitives()
_create_simple_statements()
_create_block_bnf()
sensor_graph = ZeroOrMore(statement) + StringEnd()
sensor_graph.ignore(comment)
return sensor_graph | [
"Create or retrieve the parse tree for defining a sensor graph."
] |
Please provide a description of the function:def _create_mo_file_builder(env, **kw):
import SCons.Action
# FIXME: What factory use for source? Ours or their?
kw['action'] = SCons.Action.Action('$MSGFMTCOM','$MSGFMTCOMSTR')
kw['suffix'] = '$MOSUFFIX'
kw['src_suffix'] = '$POSUFFIX'
kw['src_builder'] = '_POUpdateBuilder'
kw['single_source'] = True
return _MOFileBuilder(**kw) | [
" Create builder object for `MOFiles` builder "
] |
Please provide a description of the function:def generate(env,**kw):
import SCons.Util
from SCons.Tool.GettextCommon import _detect_msgfmt
try:
env['MSGFMT'] = _detect_msgfmt(env)
except:
env['MSGFMT'] = 'msgfmt'
env.SetDefault(
MSGFMTFLAGS = [ SCons.Util.CLVar('-c') ],
MSGFMTCOM = '$MSGFMT $MSGFMTFLAGS -o $TARGET $SOURCE',
MSGFMTCOMSTR = '',
MOSUFFIX = ['.mo'],
POSUFFIX = ['.po']
)
env.Append( BUILDERS = { 'MOFiles' : _create_mo_file_builder(env) } ) | [
" Generate `msgfmt` tool "
] |
Please provide a description of the function:def RCScan():
res_re= r'^(?:\s*#\s*(?:include)|' \
'.*?\s+(?:ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)' \
'\s*.*?)' \
'\s*(<|"| )([^>"\s]+)(?:[>"\s])*$'
resScanner = SCons.Scanner.ClassicCPP("ResourceScanner",
"$RCSUFFIXES",
"CPPPATH",
res_re,
recursive=no_tlb)
return resScanner | [
"Return a prototype Scanner instance for scanning RC source files"
] |
Please provide a description of the function:def _read_linguas_from_files(env, linguas_files=None):
import SCons.Util
import SCons.Environment
global _re_comment
global _re_lang
if not SCons.Util.is_List(linguas_files) \
and not SCons.Util.is_String(linguas_files) \
and not isinstance(linguas_files, SCons.Node.FS.Base) \
and linguas_files:
# If, linguas_files==True or such, then read 'LINGUAS' file.
linguas_files = ['LINGUAS']
if linguas_files is None:
return []
fnodes = env.arg2nodes(linguas_files)
linguas = []
for fnode in fnodes:
contents = _re_comment.sub("", fnode.get_text_contents())
ls = [l for l in _re_lang.findall(contents) if l]
linguas.extend(ls)
return linguas | [
" Parse `LINGUAS` file and return list of extracted languages "
] |
Please provide a description of the function:def _translate(env, target=None, source=SCons.Environment._null, *args, **kw):
if target is None: target = []
pot = env.POTUpdate(None, source, *args, **kw)
po = env.POUpdate(target, pot, *args, **kw)
return po | [
" Function for `Translate()` pseudo-builder "
] |
Please provide a description of the function:def _init_po_files(target, source, env):
nop = lambda target, source, env: 0
if 'POAUTOINIT' in env:
autoinit = env['POAUTOINIT']
else:
autoinit = False
# Well, if everything outside works well, this loop should do single
# iteration. Otherwise we are rebuilding all the targets even, if just
# one has changed (but is this our fault?).
for tgt in target:
if not tgt.exists():
if autoinit:
action = SCons.Action.Action('$MSGINITCOM', '$MSGINITCOMSTR')
else:
msg = 'File ' + repr(str(tgt)) + ' does not exist. ' \
+ 'If you are a translator, you can create it through: \n' \
+ '$MSGINITCOM'
action = SCons.Action.Action(nop, msg)
status = action([tgt], source, env)
if status: return status
return 0 | [
" Action function for `POInit` builder. "
] |
Please provide a description of the function:def _detect_xgettext(env):
if 'XGETTEXT' in env:
return env['XGETTEXT']
xgettext = env.Detect('xgettext');
if xgettext:
return xgettext
raise SCons.Errors.StopError(XgettextNotFound, "Could not detect xgettext")
return None | [
" Detects *xgettext(1)* binary "
] |
Please provide a description of the function:def _detect_msginit(env):
if 'MSGINIT' in env:
return env['MSGINIT']
msginit = env.Detect('msginit');
if msginit:
return msginit
raise SCons.Errors.StopError(MsginitNotFound, "Could not detect msginit")
return None | [
" Detects *msginit(1)* program. "
] |
Please provide a description of the function:def _detect_msgmerge(env):
if 'MSGMERGE' in env:
return env['MSGMERGE']
msgmerge = env.Detect('msgmerge');
if msgmerge:
return msgmerge
raise SCons.Errors.StopError(MsgmergeNotFound, "Could not detect msgmerge")
return None | [
" Detects *msgmerge(1)* program. "
] |
Please provide a description of the function:def _detect_msgfmt(env):
if 'MSGFMT' in env:
return env['MSGFMT']
msgfmt = env.Detect('msgfmt');
if msgfmt:
return msgfmt
raise SCons.Errors.StopError(MsgfmtNotFound, "Could not detect msgfmt")
return None | [
" Detects *msgmfmt(1)* program. "
] |
Please provide a description of the function:def _create_node(self, name, factory, directory=None, create=1):
import SCons.Util
node = factory(name, directory, create)
node.set_noclean(self.noclean)
node.set_precious(self.precious)
if self.nodefault:
self.env.Ignore('.', node)
if self.alias:
self.env.AlwaysBuild(self.env.Alias(self.alias, node))
return node | [
" Create node, and set it up to factory settings. "
] |
Please provide a description of the function:def Entry(self, name, directory=None, create=1):
return self._create_node(name, self.env.fs.Entry, directory, create) | [
" Create `SCons.Node.FS.Entry` "
] |
Please provide a description of the function:def File(self, name, directory=None, create=1):
return self._create_node(name, self.env.fs.File, directory, create) | [
" Create `SCons.Node.FS.File` "
] |
Please provide a description of the function:def _execute(self, env, target, source, *args, **kw):
import SCons.Util
import SCons.Node
linguas_files = None
if 'LINGUAS_FILE' in env and env['LINGUAS_FILE']:
linguas_files = env['LINGUAS_FILE']
# This prevents endless recursion loop (we'll be invoked once for
# each target appended here, we must not extend the list again).
env['LINGUAS_FILE'] = None
linguas = _read_linguas_from_files(env, linguas_files)
if SCons.Util.is_List(target):
target.extend(linguas)
elif target is not None:
target = [target] + linguas
else:
target = linguas
if not target:
# Let the SCons.BuilderBase to handle this patologic situation
return BuilderBase._execute(self, env, target, source, *args, **kw)
# The rest is ours
if not SCons.Util.is_List(target):
target = [target]
result = []
for tgt in target:
r = BuilderBase._execute(self, env, [tgt], source, *args, **kw)
result.extend(r)
if linguas_files is not None:
env['LINGUAS_FILE'] = linguas_files
return SCons.Node.NodeList(result) | [
" Execute builder's actions.\n \n Here we append to `target` the languages read from `$LINGUAS_FILE` and \n apply `SCons.Builder.BuilderBase._execute()` separatelly to each target.\n The arguments and return value are same as for\n `SCons.Builder.BuilderBase._execute()`. \n "
] |
Please provide a description of the function:def allocate_stream(self, stream_type, stream_id=None, previous=None, attach=False):
if stream_type not in DataStream.TypeToString:
raise ArgumentError("Unknown stream type in allocate_stream", stream_type=stream_type)
if stream_id is not None and stream_id >= StreamAllocator.StartingID:
raise ArgumentError("Attempted to explicitly allocate a stream id in the internally managed id range", stream_id=stream_id, started_id=StreamAllocator.StartingID)
# If the stream id is not explicitly given, we need to manage and track it
# from our autoallocate range
if stream_id is None:
if stream_type not in self._next_id:
self._next_id[stream_type] = StreamAllocator.StartingID
stream_id = self._next_id[stream_type]
self._next_id[stream_type] += 1
# Keep track of how many downstream nodes are attached to this stream so
# that we know when we need to split it into two.
stream = DataStream(stream_type, stream_id)
if stream not in self._allocated_streams:
self._allocated_streams[stream] = (stream, 0, previous)
if attach:
stream = self.attach_stream(stream)
return stream | [
"Allocate a new stream of the given type.\n\n The stream is allocated with an incremental ID starting at\n StreamAllocator.StartingID. The returned data stream can always\n be used to to attach a NodeInput to this stream, however the\n attach_stream() function should always be called first since this\n stream's output may need to be split and a logically equivalent\n stream used instead to satisfy a device specific constraint on the\n maximum number of outputs attached to a given stream.\n\n You can call allocate_stream on the same stream multiple times without\n issue. Subsequent calls to allocate_stream are noops.\n\n Args:\n stream_type (int): A stream type specified in the DataStream class\n like DataStream.ConstantType\n stream_id (int): The ID we would like to use for this stream, if\n this is not specified, an ID is automatically allocated.\n previous (DataStream): If this stream was automatically derived from\n another stream, this parameter should be a link to the old\n stream.\n attach (bool): Call attach_stream immediately before returning. Convenience\n routine for streams that should immediately be attached to something.\n\n Returns:\n DataStream: The allocated data stream.\n "
] |
Please provide a description of the function:def attach_stream(self, stream):
curr_stream, count, prev = self._allocated_streams[stream]
# Check if we need to split this stream and allocate a new one
if count == (self.model.get(u'max_node_outputs') - 1):
new_stream = self.allocate_stream(curr_stream.stream_type, previous=curr_stream)
copy_desc = u"({} always) => {} using copy_all_a".format(curr_stream, new_stream)
self.sensor_graph.add_node(copy_desc)
self._allocated_streams[stream] = (new_stream, 1, curr_stream)
# If we are splitting a constant stream, make sure we also duplicate the initialization value
# FIXME: If there is no default value for the stream, that is probably a warning since all constant
# streams should be initialized with a value.
if curr_stream.stream_type == DataStream.ConstantType and curr_stream in self.sensor_graph.constant_database:
self.sensor_graph.add_constant(new_stream, self.sensor_graph.constant_database[curr_stream])
return new_stream
self._allocated_streams[stream] = (curr_stream, count + 1, prev)
return curr_stream | [
"Notify that we would like to attach a node input to this stream.\n\n The return value from this function is the DataStream that should be attached\n to since this function may internally allocate a new SGNode that copies the\n stream if there is no space in the output list to hold another input.\n\n This function should be called once for every node input before allocated a new\n sensor graph node that attaches to a stream that is managed by the StreamAllocator.\n\n Args:\n stream (DataStream): The stream (originally returned from allocate_stream)\n that we want to attach to.\n\n Returns:\n Datastream: A data stream, possible the same as stream, that should be attached\n to a node input.\n "
] |
Please provide a description of the function:def _find_v1_settings(self, settings):
if 'module_name' in settings:
modname = settings['module_name']
if 'modules' not in settings or len(settings['modules']) == 0:
raise DataError("No modules defined in module_settings.json file")
elif len(settings['modules']) > 1:
raise DataError("Multiple modules defined in module_settings.json file",
modules=[x for x in settings['modules']])
else:
modname = list(settings['modules'])[0]
if modname not in settings['modules']:
raise DataError("Module name does not correspond with an entry in the modules directory",
name=modname, modules=[x for x in settings['modules']])
release_info = self._load_release_info(settings)
modsettings = settings['modules'][modname]
architectures = settings.get('architectures', {})
target_defs = settings.get('module_targets', {})
targets = target_defs.get(modname, [])
return TileInfo(modname, modsettings, architectures, targets, release_info) | [
"Parse a v1 module_settings.json file.\n\n V1 is the older file format that requires a modules dictionary with a\n module_name and modules key that could in theory hold information on\n multiple modules in a single directory.\n "
] |
Please provide a description of the function:def _load_settings(self, info):
modname, modsettings, architectures, targets, release_info = info
self.settings = modsettings
# Name is converted to all lowercase to canonicalize it
prepend = ''
if 'domain' in modsettings:
prepend = modsettings['domain'].lower() + '/'
key = prepend + modname.lower()
# Copy over some key properties that we want easy access to
self.name = key
self.unique_id = key.replace('/', '_')
self.short_name = modname
self.targets = targets
self.full_name = "Undefined"
if "full_name" in self.settings:
self.full_name = self.settings['full_name']
# FIXME: make sure this is a list
self.authors = []
if "authors" in self.settings:
self.authors = self.settings['authors']
self.version = "0.0.0"
if "version" in self.settings:
self.version = self.settings['version']
self.parsed_version = SemanticVersion.FromString(self.version)
# Load all of the build products that can be created by this IOTile
self.products = modsettings.get('products', {})
# Load in the release information telling us how to release this component
release_steps = modsettings.get('release_steps', [])
self.release_steps = []
self.can_release = False
for step in release_steps:
if 'provider' not in step:
raise DataError("Invalid release step that did not have a provider key", step=step)
parsed_step = ReleaseStep(provider=step['provider'], args=step.get('args', {}))
self.release_steps.append(parsed_step)
if len(self.release_steps) > 0:
self.can_release = True
self.dependency_versions = {}
# If this is a release IOTile component, check for release information
if release_info is not None:
self.release = True
self.release_date = release_info.release_date
self.output_folder = self.folder
self.dependency_versions = release_info.dependency_versions
else:
self.release = False
self.output_folder = os.path.join(self.folder, 'build', 'output')
# If this tile is a development tile and it has been built at least one, add in a release date
# from the last time it was built
if os.path.exists(os.path.join(self.output_folder, 'module_settings.json')):
release_settings = os.path.join(self.output_folder, 'module_settings.json')
with open(release_settings, 'r') as infile:
release_dict = json.load(infile)
import dateutil.parser
self.release_date = dateutil.parser.parse(release_dict['release_date'])
else:
self.release_date = None
# Find all of the things that this module could possibly depend on
# Dependencies include those defined in the module itself as well as
# those defined in architectures that are present in the module_settings.json
# file.
self.dependencies = []
archs_with_deps = [y['depends'].items() for _x, y in architectures.items() if 'depends' in y]
if 'depends' in self.settings:
if not isinstance(self.settings['depends'], dict):
raise DataError("module must have a depends key that is a dictionary",
found=str(self.settings['depends']))
archs_with_deps.append(self.settings['depends'].items())
# Find all python package needed
self.support_wheel_depends = []
if 'python_depends' in self.settings:
if not isinstance(self.settings['python_depends'], list):
raise DataError("module must have a python_depends key that is a list of strings",
found=str(self.settings['python_depends']))
for python_depend in self.settings['python_depends']:
if not isinstance(python_depend, str):
raise DataError("module must have a python_depends key that is a list of strings",
found=str(self.settings['python_depends']))
self.support_wheel_depends.append(python_depend)
# Also search through overlays to architectures that are defined in this module_settings.json file
# and see if those overlays contain dependencies.
for overlay_arch in self.settings.get('overlays', {}).values():
if 'depends' in overlay_arch:
archs_with_deps.append(overlay_arch['depends'].items())
found_deps = set()
for dep, _ in itertools.chain(*archs_with_deps):
name, _, version = dep.partition(',')
unique_id = name.lower().replace('/', '_')
version = version.strip()
if version == '':
version = "*"
ver_range = SemanticVersionRange.FromString(version)
depdict = {
'name': name,
'unique_id': unique_id,
'required_version': ver_range,
'required_version_string': version
}
if name not in found_deps:
self.dependencies.append(depdict)
found_deps.add(name)
# Store any architectures that we find in this json file for future reference
self.architectures = architectures
# Setup our support package information
self.support_distribution = "iotile_support_{0}_{1}".format(self.short_name, self.parsed_version.major)
if 'python_universal' in self.settings:
py_version = "py2.py3"
elif sys.version_info[0] >= 3:
py_version = "py3"
else:
py_version = "py2"
self.support_wheel = "{0}-{1}-{2}-none-any.whl".format(self.support_distribution,
self.parsed_version.pep440_string(),
py_version)
self.has_wheel = self._check_has_wheel() | [
"Load settings for a module."
] |
Please provide a description of the function:def _ensure_product_string(cls, product):
if isinstance(product, str):
return product
if isinstance(product, list):
return os.path.join(*product)
raise DataError("Unknown object (not str or list) specified as a component product", product=product) | [
"Ensure that all product locations are strings.\n\n Older components specify paths as lists of path components. Join\n those paths into a normal path string.\n "
] |
Please provide a description of the function:def find_products(self, product_type):
if self.filter_prods and product_type in self.LIST_PRODUCTS and product_type not in self.desired_prods:
return []
if product_type in self.LIST_PRODUCTS:
found_products = self.products.get(product_type, [])
else:
found_products = [x[0] for x in self.products.items()
if x[1] == product_type and (not self.filter_prods or x[0] in self.desired_prods)]
found_products = [self._ensure_product_string(x) for x in found_products]
declaration = self.PATH_PRODUCTS.get(product_type)
if declaration is not None:
found_products = [self._process_product_path(x, declaration) for x in found_products]
return found_products | [
"Search for products of a given type.\n\n Search through the products declared by this IOTile component and\n return only those matching the given type. If the product is described\n by the path to a file, a complete normalized path will be returned.\n The path could be different depending on whether this IOTile component\n is in development or release mode.\n\n The behavior of this function when filter_products has been called is\n slightly different based on whether product_type is in LIST_PRODUCTS\n or not. If product type is in LIST_PRODUCTS, then all matching\n products are returned if product_type itself was passed. So to get\n all tilebus_definitions you would call\n ``filter_products('tilebus_definitions')``\n\n By contrast, other products are filtered product-by-product. So there\n is no way to filter and get **all libraries**. Instead you pass the\n specific product names of the libraries that you want to\n ``filter_products`` and those specific libraries are returned.\n Passing the literal string ``library`` to ``filter_products`` will not\n return only the libraries, it will return nothing since no library is\n named ``library``.\n\n Args:\n product_type (str): The type of product that we wish to return.\n\n Returns:\n list of str: The list of all products of the given type.\n\n If no such products are found, an empty list will be returned.\n If filter_products() has been called and the filter does not include\n this product type, an empty list will be returned.\n "
] |
Please provide a description of the function:def library_directories(self):
libs = self.find_products('library')
if len(libs) > 0:
return [os.path.join(self.output_folder)]
return [] | [
"Return a list of directories containing any static libraries built by this IOTile."
] |
Please provide a description of the function:def filter_products(self, desired_prods):
self.filter_prods = True
self.desired_prods = set(desired_prods) | [
"When asked for a product, filter only those on this list."
] |
Please provide a description of the function:def format_ascii(sensor_graph):
cmdfile = CommandFile("Sensor Graph", "1.0")
# Clear any old sensor graph
cmdfile.add("set_online", False)
cmdfile.add("clear")
cmdfile.add("reset")
# Load in the nodes
for node in sensor_graph.dump_nodes():
cmdfile.add('add_node', node)
# Load in the streamers
for streamer in sensor_graph.streamers:
other = 0xFF
if streamer.with_other is not None:
other = streamer.with_other
args = [streamer.selector, streamer.dest, streamer.automatic, streamer.format, streamer.report_type, other]
cmdfile.add('add_streamer', *args)
# Load all the constants
for stream, value in sorted(sensor_graph.constant_database.items(), key=lambda x: x[0].encode()):
cmdfile.add("push_reading", stream, value)
# Persist the sensor graph
cmdfile.add("persist")
cmdfile.add("set_online", True)
return cmdfile.dump() | [
"Format this sensor graph as a loadable ascii file format.\n\n This includes commands to reset and clear previously stored\n sensor graphs.\n\n NB. This format does not include any required configuration\n variables that were specified in this sensor graph, so you\n should also output tha information separately in, e.g.\n the config format.\n\n Args:\n sensor_graph (SensorGraph): the sensor graph that we want to format\n\n Returns:\n str: The ascii output lines concatenated as a single string\n "
] |
Please provide a description of the function:def clear(self):
self.roots = []
self.nodes = []
self.streamers = []
self.constant_database = {}
self.metadata_database = {}
self.config_database = {} | [
"Clear all nodes from this sensor_graph.\n\n This function is equivalent to just creating a new SensorGraph() object\n from scratch. It does not clear any data from the SensorLog, however.\n "
] |
Please provide a description of the function:def add_node(self, node_descriptor):
if self._max_nodes is not None and len(self.nodes) >= self._max_nodes:
raise ResourceUsageError("Maximum number of nodes exceeded", max_nodes=self._max_nodes)
node, inputs, processor = parse_node_descriptor(node_descriptor, self.model)
in_root = False
for i, input_data in enumerate(inputs):
selector, trigger = input_data
walker = self.sensor_log.create_walker(selector)
# Constant walkers begin life initialized to 0 so they always read correctly
if walker.selector.inexhaustible:
walker.reading = IOTileReading(0xFFFFFFFF, walker.selector.as_stream(), 0)
node.connect_input(i, walker, trigger)
if selector.input and not in_root:
self.roots.append(node)
in_root = True # Make sure we only add to root list once
else:
found = False
for other in self.nodes:
if selector.matches(other.stream):
other.connect_output(node)
found = True
if not found and selector.buffered:
raise NodeConnectionError("Node has input that refers to another node that has not been created yet", node_descriptor=node_descriptor, input_selector=str(selector), input_index=i)
# Also make sure we add this node's output to any other existing node's inputs
# this is important for constant nodes that may be written from multiple places
# FIXME: Make sure when we emit nodes, they are topologically sorted
for other_node in self.nodes:
for selector, trigger in other_node.inputs:
if selector.matches(node.stream):
node.connect_output(other_node)
# Find and load the processing function for this node
func = self.find_processing_function(processor)
if func is None:
raise ProcessingFunctionError("Could not find processing function in installed packages", func_name=processor)
node.set_func(processor, func)
self.nodes.append(node) | [
"Add a node to the sensor graph based on the description given.\n\n The node_descriptor must follow the sensor graph DSL and describe\n a node whose input nodes already exist.\n\n Args:\n node_descriptor (str): A description of the node to be added\n including its inputs, triggering conditions, processing function\n and output stream.\n "
] |
Please provide a description of the function:def add_config(self, slot, config_id, config_type, value):
if slot not in self.config_database:
self.config_database[slot] = {}
self.config_database[slot][config_id] = (config_type, value) | [
"Add a config variable assignment to this sensor graph.\n\n Args:\n slot (SlotIdentifier): The slot identifier that this config\n variable is assigned to.\n config_id (int): The 16-bit id of this config_id\n config_type (str): The type of the config variable, currently\n supported are fixed width integer types, strings and binary\n blobs.\n value (str|int|bytes): The value to assign to the config variable.\n "
] |
Please provide a description of the function:def add_streamer(self, streamer):
if self._max_streamers is not None and len(self.streamers) >= self._max_streamers:
raise ResourceUsageError("Maximum number of streamers exceeded", max_streamers=self._max_streamers)
streamer.link_to_storage(self.sensor_log)
streamer.index = len(self.streamers)
self.streamers.append(streamer) | [
"Add a streamer to this sensor graph.\n\n Args:\n streamer (DataStreamer): The streamer we want to add\n "
] |
Please provide a description of the function:def add_constant(self, stream, value):
if stream in self.constant_database:
raise ArgumentError("Attempted to set the same constant twice", stream=stream, old_value=self.constant_database[stream], new_value=value)
self.constant_database[stream] = value | [
"Store a constant value for use in this sensor graph.\n\n Constant assignments occur after all sensor graph nodes have been\n allocated since they must be propogated to all appropriate virtual\n stream walkers.\n\n Args:\n stream (DataStream): The constant stream to assign the value to\n value (int): The value to assign.\n "
] |
Please provide a description of the function:def add_metadata(self, name, value):
if name in self.metadata_database:
raise ArgumentError("Attempted to set the same metadata value twice", name=name, old_value=self.metadata_database[name], new_value=value)
self.metadata_database[name] = value | [
"Attach a piece of metadata to this sensorgraph.\n\n Metadata is not used during the simulation of a sensorgraph but allows\n it to convey additional context that may be used during code\n generation. For example, associating an `app_tag` with a sensorgraph\n allows the snippet code generator to set that app_tag on a device when\n programming the sensorgraph.\n\n Arg:\n name (str): The name of the metadata that we wish to associate with this\n sensorgraph.\n value (object): The value we wish to store.\n "
] |
Please provide a description of the function:def initialize_remaining_constants(self, value=0):
remaining = []
for node, _inputs, _outputs in self.iterate_bfs():
streams = node.input_streams() + [node.stream]
for stream in streams:
if stream.stream_type is not DataStream.ConstantType:
continue
if stream not in self.constant_database:
self.add_constant(stream, value)
remaining.append(stream)
return remaining | [
"Ensure that all constant streams referenced in the sensor graph have a value.\n\n Constant streams that are automatically created by the compiler are initialized\n as part of the compilation process but it's possible that the user references\n other constant streams but never assigns them an explicit initial value. This\n function will initialize them all to a default value (0 if not passed) and\n return the streams that were so initialized.\n\n Args:\n value (int): Optional value to use to initialize all uninitialized constants.\n Defaults to 0 if not passed.\n\n Returns:\n list(DataStream): A list of all of the constant streams that were not previously\n initialized and were initialized to the given value in this function.\n "
] |
Please provide a description of the function:def load_constants(self):
for stream, value in self.constant_database.items():
self.sensor_log.push(stream, IOTileReading(0, stream.encode(), value)) | [
"Load all constants into their respective streams.\n\n All previous calls to add_constant stored a constant value that\n should be associated with virtual stream walkers. This function\n actually calls push_stream in order to push all of the constant\n values to their walkers.\n "
] |
Please provide a description of the function:def get_config(self, slot, config_id):
if slot not in self.config_database:
raise ArgumentError("No config variables have been set on specified slot", slot=slot)
if config_id not in self.config_database[slot]:
raise ArgumentError("Config variable has not been set on specified slot", slot=slot, config_id=config_id)
return self.config_database[slot][config_id] | [
"Get a config variable assignment previously set on this sensor graph.\n\n Args:\n slot (SlotIdentifier): The slot that we are setting this config variable\n on.\n config_id (int): The 16-bit config variable identifier.\n\n Returns:\n (str, str|int): Returns a tuple with the type of the config variable and\n the value that is being set.\n\n Raises:\n ArgumentError: If the config variable is not currently set on the specified\n slot.\n "
] |
Please provide a description of the function:def is_output(self, stream):
for streamer in self.streamers:
if streamer.selector.matches(stream):
return True
return False | [
"Check if a stream is a sensor graph output.\n\n Return:\n bool\n "
] |
Please provide a description of the function:def get_tick(self, name):
name_map = {
'fast': config_fast_tick_secs,
'user1': config_tick1_secs,
'user2': config_tick2_secs
}
config = name_map.get(name)
if config is None:
raise ArgumentError("Unknown tick requested", name=name)
slot = SlotIdentifier.FromString('controller')
try:
var = self.get_config(slot, config)
return var[1]
except ArgumentError:
return 0 | [
"Check the config variables to see if there is a configurable tick.\n\n Sensor Graph has a built-in 10 second tick that is sent every 10\n seconds to allow for triggering timed events. There is a second\n 'user' tick that is generated internally by the sensorgraph compiler\n and used for fast operations and finally there are several field\n configurable ticks that can be used for setting up configurable\n timers.\n\n This is done by setting a config variable on the controller with the\n desired tick interval, which is then interpreted by this function.\n\n The appropriate config_id to use is listed in `known_constants.py`\n\n Returns:\n int: 0 if the tick is disabled, otherwise the number of seconds\n between each tick\n "
] |
Please provide a description of the function:def process_input(self, stream, value, rpc_executor):
self.sensor_log.push(stream, value)
# FIXME: This should be specified in our device model
if stream.important:
associated_output = stream.associated_stream()
self.sensor_log.push(associated_output, value)
to_check = deque([x for x in self.roots])
while len(to_check) > 0:
node = to_check.popleft()
if node.triggered():
try:
results = node.process(rpc_executor, self.mark_streamer)
for result in results:
result.raw_time = value.raw_time
self.sensor_log.push(node.stream, result)
except:
self._logger.exception("Unhandled exception in graph node processing function for node %s", str(node))
# If we generated any outputs, notify our downstream nodes
# so that they are also checked to see if they should run.
if len(results) > 0:
to_check.extend(node.outputs) | [
"Process an input through this sensor graph.\n\n The tick information in value should be correct and is transfered\n to all results produced by nodes acting on this tick.\n\n Args:\n stream (DataStream): The stream the input is part of\n value (IOTileReading): The value to process\n rpc_executor (RPCExecutor): An object capable of executing RPCs\n in case we need to do that.\n "
] |
Please provide a description of the function:def mark_streamer(self, index):
self._logger.debug("Marking streamer %d manually", index)
if index >= len(self.streamers):
raise ArgumentError("Invalid streamer index", index=index, num_streamers=len(self.streamers))
self._manually_triggered_streamers.add(index) | [
"Manually mark a streamer that should trigger.\n\n The next time check_streamers is called, the given streamer will be\n manually marked that it should trigger, which will cause it to trigger\n unless it has no data.\n\n Args:\n index (int): The index of the streamer that we should mark as\n manually triggered.\n\n Raises:\n ArgumentError: If the streamer index is invalid.\n "
] |
Please provide a description of the function:def check_streamers(self, blacklist=None):
ready = []
selected = set()
for i, streamer in enumerate(self.streamers):
if blacklist is not None and i in blacklist:
continue
if i in selected:
continue
marked = False
if i in self._manually_triggered_streamers:
marked = True
self._manually_triggered_streamers.remove(i)
if streamer.triggered(marked):
self._logger.debug("Streamer %d triggered, manual=%s", i, marked)
ready.append(streamer)
selected.add(i)
# Handle streamers triggered with another
for j, streamer2 in enumerate(self.streamers[i:]):
if streamer2.with_other == i and j not in selected and streamer2.triggered(True):
self._logger.debug("Streamer %d triggered due to with-other on %d", j, i)
ready.append(streamer2)
selected.add(j)
return ready | [
"Check if any streamers are ready to produce a report.\n\n You can limit what streamers are checked by passing a set-like\n object into blacklist.\n\n This method is the primary way to see when you should poll a given\n streamer for its next report.\n\n Note, this function is not idempotent. If a streamer is marked as\n manual and it is triggered from a node rule inside the sensor_graph,\n that trigger will only last as long as the next call to\n check_streamers() so you need to explicitly build a report on all\n ready streamers before calling check_streamers again.\n\n Args:\n blacklist (set): Optional set of streamer indices that should\n not be checked right now.\n\n Returns:\n list of DataStreamer: A list of the ready streamers.\n "
] |
Please provide a description of the function:def iterate_bfs(self):
working_set = deque(self.roots)
seen = []
while len(working_set) > 0:
curr = working_set.popleft()
# Now build input and output node lists for this node
inputs = []
for walker, _ in curr.inputs:
for other in seen:
if walker.matches(other.stream) and other not in inputs:
inputs.append(other)
outputs = [x for x in curr.outputs]
yield curr, inputs, outputs
working_set.extend(curr.outputs)
seen.append(curr) | [
"Generator that yields node, [inputs], [outputs] in breadth first order.\n\n This generator will iterate over all nodes in the sensor graph, yielding\n a 3 tuple for each node with a list of all of the nodes connected to its\n inputs and all of the nodes connected to its output.\n\n Returns:\n (SGNode, list(SGNode), list(SGNode)): A tuple for each node in the graph\n "
] |
Please provide a description of the function:def sort_nodes(self):
node_map = {id(node): i for i, node in enumerate(self.nodes)}
node_deps = {}
for node, inputs, _outputs in self.iterate_bfs():
node_index = node_map[id(node)]
deps = {node_map[id(x)] for x in inputs}
node_deps[node_index] = deps
# Now that we have our dependency tree properly built, topologically
# sort the nodes and reorder them.
node_order = toposort_flatten(node_deps)
self.nodes = [self.nodes[x] for x in node_order]
#Check root nodes all topographically sorted to the beginning
for root in self.roots:
if root not in self.nodes[0:len(self.roots)]:
raise NodeConnectionError("Inputs not sorted in the beginning", node=str(root), node_position=self.nodes.index(root)) | [
"Topologically sort all of our nodes.\n\n Topologically sorting our nodes makes nodes that are inputs to other\n nodes come first in the list of nodes. This is important to do before\n programming a sensorgraph into an embedded device whose engine assumes\n a topologically sorted graph.\n\n The sorting is done in place on self.nodes\n "
] |
Please provide a description of the function:def generate(env):
try:
bld = env['BUILDERS']['Ipkg']
except KeyError:
bld = SCons.Builder.Builder(action='$IPKGCOM',
suffix='$IPKGSUFFIX',
source_scanner=None,
target_scanner=None)
env['BUILDERS']['Ipkg'] = bld
env['IPKG'] = 'ipkg-build'
env['IPKGCOM'] = '$IPKG $IPKGFLAGS ${SOURCE}'
if env.WhereIs('id'):
env['IPKGUSER'] = os.popen('id -un').read().strip()
env['IPKGGROUP'] = os.popen('id -gn').read().strip()
env['IPKGFLAGS'] = SCons.Util.CLVar('-o $IPKGUSER -g $IPKGGROUP')
env['IPKGSUFFIX'] = '.ipk' | [
"Add Builders and construction variables for ipkg to an Environment."
] |
Please provide a description of the function:def format_trigger(self, stream):
src = u'value'
if self.use_count:
src = u'count'
return u"{}({}) {} {}".format(src, stream, self.comp_string, self.reference) | [
"Create a user understandable string like count(stream) >= X.\n\n Args:\n stream (DataStream): The stream to use to format ourselves.\n\n Returns:\n str: The formatted string\n "
] |
Please provide a description of the function:def triggered(self, walker):
if self.use_count:
comp_value = walker.count()
else:
if walker.count() == 0:
return False
comp_value = walker.peek().value
return self.comp_function(comp_value, self.reference) | [
"Check if this input is triggered on the given stream walker.\n\n Args:\n walker (StreamWalker): The walker to check\n\n Returns:\n bool: Whether this trigger is triggered or not\n "
] |
Please provide a description of the function:def connect_input(self, index, walker, trigger=None):
if trigger is None:
trigger = TrueTrigger()
if index >= len(self.inputs):
raise TooManyInputsError("Input index exceeded max number of inputs", index=index, max_inputs=len(self.inputs), stream=self.stream)
self.inputs[index] = (walker, trigger) | [
"Connect an input to a stream walker.\n\n If the input is already connected to something an exception is thrown.\n Otherwise the walker is used to read inputs for that input.\n\n A triggering condition can optionally be passed that will determine\n when this input will be considered as triggered.\n\n Args:\n index (int): The index of the input that we want to connect\n walker (StreamWalker): The stream walker to use for the input\n trigger (InputTrigger): The trigger to use for the input. If\n no trigger is specified, the input is considered to always be\n triggered (so TrueTrigger is used)\n "
] |
Please provide a description of the function:def input_streams(self):
streams = []
for walker, _trigger in self.inputs:
if walker.selector is None or not walker.selector.singular:
continue
streams.append(walker.selector.as_stream())
return streams | [
"Return a list of DataStream objects for all singular input streams.\n\n This function only returns individual streams, not the streams that would\n be selected from a selector like 'all outputs' for example.\n\n Returns:\n list(DataStream): A list of all of the individual DataStreams that are inputs\n of the node. Input selectors that select multiple streams are not included\n "
] |
Please provide a description of the function:def find_input(self, stream):
for i, input_x in enumerate(self.inputs):
if input_x[0].matches(stream):
return i | [
"Find the input that responds to this stream.\n\n Args:\n stream (DataStream): The stream to find\n\n Returns:\n (index, None): The index if found or None\n "
] |
Please provide a description of the function:def num_inputs(self):
num = 0
for walker, _ in self.inputs:
if not isinstance(walker, InvalidStreamWalker):
num += 1
return num | [
"Return the number of connected inputs.\n\n Returns:\n int: The number of connected inputs\n "
] |
Please provide a description of the function:def connect_output(self, node):
if len(self.outputs) == self.max_outputs:
raise TooManyOutputsError("Attempted to connect too many nodes to the output of a node", max_outputs=self.max_outputs, stream=self.stream)
self.outputs.append(node) | [
"Connect another node to our output.\n\n This downstream node will automatically be triggered when we update\n our output.\n\n Args:\n node (SGNode): The node that should receive our output\n "
] |
Please provide a description of the function:def triggered(self):
trigs = [x[1].triggered(x[0]) for x in self.inputs]
if self.trigger_combiner == self.OrTriggerCombiner:
return True in trigs
return False not in trigs | [
"Test if we should trigger our operation.\n\n We test the trigger condition on each of our inputs and then\n combine those triggers using our configured trigger combiner\n to get an overall result for whether this node is triggered.\n\n Returns:\n bool: True if we should trigger and False otherwise\n "
] |
Please provide a description of the function:def set_func(self, name, func):
self.func_name = name
self.func = func | [
"Set the processing function to use for this node.\n\n Args:\n name (str): The name of the function to use. This is\n just stored for reference in case we need to serialize\n the node later.\n func (callable): A function that is called to process inputs\n for this node. It should have the following signature:\n callable(input1_walker, input2_walker, ...)\n It should return a list of IOTileReadings that are then pushed into\n the node's output stream\n "
] |
Please provide a description of the function:def process(self, rpc_executor, mark_streamer=None):
if self.func is None:
raise ProcessingFunctionError('No processing function set for node', stream=self.stream)
results = self.func(*[x[0] for x in self.inputs], rpc_executor=rpc_executor, mark_streamer=mark_streamer)
if results is None:
results = []
return results | [
"Run this node's processing function.\n\n Args:\n rpc_executor (RPCExecutor): An object capable of executing RPCs\n in case we need to do that.\n mark_streamer (callable): Function that can be called to manually\n mark a streamer as triggered by index.\n\n Returns:\n list(IOTileReading): A list of IOTileReadings with the results of\n the processing function or an empty list if no results were\n produced\n "
] |
Please provide a description of the function:def main(argv=None):
if argv is None:
argv = sys.argv[1:]
parser = build_args()
args = parser.parse_args(args=argv)
verbosity = args.verbose
root = logging.getLogger()
formatter = logging.Formatter('%(levelname).6s %(name)s %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
loglevels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
if verbosity >= len(loglevels):
verbosity = len(loglevels) - 1
level = loglevels[verbosity]
root.setLevel(level)
root.addHandler(handler)
else:
root.addHandler(logging.NullHandler())
try:
with open(args.script, "rb") as infile:
binary_script = infile.read()
except IOError as exc:
print("ERROR: Unable to read script file: %s" % str(exc))
return 1
try:
script = UpdateScript.FromBinary(binary_script, allow_unknown=args.allow_unknown, show_rpcs=args.show_rpcs)
except ArgumentError as err:
print("ERROR: ArgumentError: could not parse script")
print(str(err))
return 3
except DataError as err:
print("ERROR: DataError: could not parse script")
print(str(err))
return 4
if args.format != 'text':
print("ERROR: only text format is currently supported")
return 2
if args.format == 'text':
print("\nUpdate Script")
print("-------------")
print("Path: %s" % args.script)
print("Record Count: %d" % len(script.records))
print("Total length: %d bytes" % len(binary_script))
print("\nActions")
print("-------")
for i, record in enumerate(script.records):
print("%02d: %s" % (i + 1, str(record)))
print("")
return 0 | [
"Main script entry point.\n\n Args:\n argv (list): The command line arguments, defaults to sys.argv if not passed.\n\n Returns:\n int: The return value of the script.\n "
] |
Please provide a description of the function:def FortranScan(path_variable="FORTRANPATH"):
# The USE statement regex matches the following:
#
# USE module_name
# USE :: module_name
# USE, INTRINSIC :: module_name
# USE, NON_INTRINSIC :: module_name
#
# Limitations
#
# -- While the regex can handle multiple USE statements on one line,
# it cannot properly handle them if they are commented out.
# In either of the following cases:
#
# ! USE mod_a ; USE mod_b [entire line is commented out]
# USE mod_a ! ; USE mod_b [in-line comment of second USE statement]
#
# the second module name (mod_b) will be picked up as a dependency
# even though it should be ignored. The only way I can see
# to rectify this would be to modify the scanner to eliminate
# the call to re.findall, read in the contents of the file,
# treating the comment character as an end-of-line character
# in addition to the normal linefeed, loop over each line,
# weeding out the comments, and looking for the USE statements.
# One advantage to this is that the regex passed to the scanner
# would no longer need to match a semicolon.
#
# -- I question whether or not we need to detect dependencies to
# INTRINSIC modules because these are built-in to the compiler.
# If we consider them a dependency, will SCons look for them, not
# find them, and kill the build? Or will we there be standard
# compiler-specific directories we will need to point to so the
# compiler and SCons can locate the proper object and mod files?
# Here is a breakdown of the regex:
#
# (?i) : regex is case insensitive
# ^ : start of line
# (?: : group a collection of regex symbols without saving the match as a "group"
# ^|; : matches either the start of the line or a semicolon - semicolon
# ) : end the unsaved grouping
# \s* : any amount of white space
# USE : match the string USE, case insensitive
# (?: : group a collection of regex symbols without saving the match as a "group"
# \s+| : match one or more whitespace OR .... (the next entire grouped set of regex symbols)
# (?: : group a collection of regex symbols without saving the match as a "group"
# (?: : establish another unsaved grouping of regex symbols
# \s* : any amount of white space
# , : match a comma
# \s* : any amount of white space
# (?:NON_)? : optionally match the prefix NON_, case insensitive
# INTRINSIC : match the string INTRINSIC, case insensitive
# )? : optionally match the ", INTRINSIC/NON_INTRINSIC" grouped expression
# \s* : any amount of white space
# :: : match a double colon that must appear after the INTRINSIC/NON_INTRINSIC attribute
# ) : end the unsaved grouping
# ) : end the unsaved grouping
# \s* : match any amount of white space
# (\w+) : match the module name that is being USE'd
#
#
use_regex = "(?i)(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"
# The INCLUDE statement regex matches the following:
#
# INCLUDE 'some_Text'
# INCLUDE "some_Text"
# INCLUDE "some_Text" ; INCLUDE "some_Text"
# INCLUDE kind_"some_Text"
# INCLUDE kind_'some_Text"
#
# where some_Text can include any alphanumeric and/or special character
# as defined by the Fortran 2003 standard.
#
# Limitations:
#
# -- The Fortran standard dictates that a " or ' in the INCLUDE'd
# string must be represented as a "" or '', if the quotes that wrap
# the entire string are either a ' or ", respectively. While the
# regular expression below can detect the ' or " characters just fine,
# the scanning logic, presently is unable to detect them and reduce
# them to a single instance. This probably isn't an issue since,
# in practice, ' or " are not generally used in filenames.
#
# -- This regex will not properly deal with multiple INCLUDE statements
# when the entire line has been commented out, ala
#
# ! INCLUDE 'some_file' ; INCLUDE 'some_file'
#
# In such cases, it will properly ignore the first INCLUDE file,
# but will actually still pick up the second. Interestingly enough,
# the regex will properly deal with these cases:
#
# INCLUDE 'some_file'
# INCLUDE 'some_file' !; INCLUDE 'some_file'
#
# To get around the above limitation, the FORTRAN programmer could
# simply comment each INCLUDE statement separately, like this
#
# ! INCLUDE 'some_file' !; INCLUDE 'some_file'
#
# The way I see it, the only way to get around this limitation would
# be to modify the scanning logic to replace the calls to re.findall
# with a custom loop that processes each line separately, throwing
# away fully commented out lines before attempting to match against
# the INCLUDE syntax.
#
# Here is a breakdown of the regex:
#
# (?i) : regex is case insensitive
# (?: : begin a non-saving group that matches the following:
# ^ : either the start of the line
# | : or
# ['">]\s*; : a semicolon that follows a single quote,
# double quote or greater than symbol (with any
# amount of whitespace in between). This will
# allow the regex to match multiple INCLUDE
# statements per line (although it also requires
# the positive lookahead assertion that is
# used below). It will even properly deal with
# (i.e. ignore) cases in which the additional
# INCLUDES are part of an in-line comment, ala
# " INCLUDE 'someFile' ! ; INCLUDE 'someFile2' "
# ) : end of non-saving group
# \s* : any amount of white space
# INCLUDE : match the string INCLUDE, case insensitive
# \s+ : match one or more white space characters
# (?\w+_)? : match the optional "kind-param _" prefix allowed by the standard
# [<"'] : match the include delimiter - an apostrophe, double quote, or less than symbol
# (.+?) : match one or more characters that make up
# the included path and file name and save it
# in a group. The Fortran standard allows for
# any non-control character to be used. The dot
# operator will pick up any character, including
# control codes, but I can't conceive of anyone
# putting control codes in their file names.
# The question mark indicates it is non-greedy so
# that regex will match only up to the next quote,
# double quote, or greater than symbol
# (?=["'>]) : positive lookahead assertion to match the include
# delimiter - an apostrophe, double quote, or
# greater than symbol. This level of complexity
# is required so that the include delimiter is
# not consumed by the match, thus allowing the
# sub-regex discussed above to uniquely match a
# set of semicolon-separated INCLUDE statements
# (as allowed by the F2003 standard)
include_regex =
# The MODULE statement regex finds module definitions by matching
# the following:
#
# MODULE module_name
#
# but *not* the following:
#
# MODULE PROCEDURE procedure_name
#
# Here is a breakdown of the regex:
#
# (?i) : regex is case insensitive
# ^\s* : any amount of white space
# MODULE : match the string MODULE, case insensitive
# \s+ : match one or more white space characters
# (?!PROCEDURE) : but *don't* match if the next word matches
# PROCEDURE (negative lookahead assertion),
# case insensitive
# (\w+) : match one or more alphanumeric characters
# that make up the defined module name and
# save it in a group
def_regex =
scanner = F90Scanner("FortranScan",
"$FORTRANSUFFIXES",
path_variable,
use_regex,
include_regex,
def_regex)
return scanner | [
"Return a prototype Scanner instance for scanning source files\n for Fortran USE & INCLUDE statements",
"(?i)(?:^|['\">]\\s*;)\\s*INCLUDE\\s+(?:\\w+_)?[<\"'](.+?)(?=[\"'>])",
"(?i)^\\s*MODULE\\s+(?!PROCEDURE)(\\w+)"
] |
Please provide a description of the function:def generate(env):
fortran.generate(env)
env['FORTRAN'] = 'f90'
env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'
env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'
env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'
env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'
env['OBJSUFFIX'] = '.obj'
env['FORTRANMODDIR'] = '${TARGET.dir}'
env['FORTRANMODDIRPREFIX'] = '/module:'
env['FORTRANMODDIRSUFFIX'] = '' | [
"Add Builders and construction variables for compaq visual fortran to an Environment."
] |
Please provide a description of the function:def read(self):
self.build()
if not hasattr(self, 'built_value'):
self.built_value = self.value
return self.built_value | [
"Return the value. If necessary, the value is built."
] |
Please provide a description of the function:def get_text_contents(self):
###TODO: something reasonable about universal newlines
contents = str(self.value)
for kid in self.children(None):
contents = contents + kid.get_contents().decode()
return contents | [
"By the assumption that the node.built_value is a\n deterministic product of the sources, the contents of a Value\n are the concatenation of all the contents of its sources. As\n the value need not be built when get_contents() is called, we\n cannot use the actual node.built_value."
] |
Please provide a description of the function:def get_csig(self, calc=None):
try:
return self.ninfo.csig
except AttributeError:
pass
contents = self.get_contents()
self.get_ninfo().csig = contents
return contents | [
"Because we're a Python value node and don't have a real\n timestamp, we get to ignore the calculator and just use the\n value contents."
] |
Please provide a description of the function:def restore(self, state):
own_properties = set(self.get_properties())
state_properties = set(state)
to_restore = own_properties.intersection(state_properties)
for name in to_restore:
value = state.get(name)
if name in self._complex_properties:
value = self._complex_properties[name][1](value)
setattr(self, name, value) | [
"Restore this state from the output of a previous call to dump().\n\n Only those properties in this object and listed in state will be\n updated. Other properties will not be modified and state may contain\n keys that do not correspond with properties in this object.\n\n Args:\n state (dict): A serialized representation of this object.\n "
] |
Please provide a description of the function:def mark_complex(self, name, serializer, deserializer):
self._complex_properties[name] = (serializer, deserializer) | [
"Mark a property as complex with serializer and deserializer functions.\n\n Args:\n name (str): The name of the complex property.\n serializer (callable): The function to call to serialize the property's\n value to something that can be saved in a json.\n deserializer (callable): The function to call to unserialize the property\n from a dict loaded by a json back to the original value.\n "
] |
Please provide a description of the function:def mark_typed_list(self, name, type_object):
if not hasattr(type_object, 'dump'):
raise ArgumentError("The passed type object %s is missing required method: dump()" % type_object)
if not hasattr(type_object, 'Restore'):
raise ArgumentError("The passed type object %s is missing required method: Restore()" % type_object)
def _dump_list(obj):
if obj is None:
return None
if not isinstance(obj, list):
raise DataError("Property %s marked as list was not a list: %s" % (name, repr(obj)))
return [x.dump() for x in obj]
def _restore_list(obj):
if obj is None:
return obj
return [type_object.Restore(x) for x in obj]
self.mark_complex(name, _dump_list, _restore_list) | [
"Mark a property as containing serializable objects of a given type.\n\n This convenience method allows you to avoid having to call\n ``mark_complex()`` whenever you need to serialize a list of objects.\n This method requires that all members of the given list be of a single\n class that contains a dump() method and a Restore() class method where\n type_object.Restore(x.dump()) == x.\n\n Args:\n name (str): The name of the complex property.\n type_object: The class object that will be contained inside\n this list.\n "
] |
Please provide a description of the function:def mark_typed_map(self, name, type_object):
if not hasattr(type_object, 'dump'):
raise ArgumentError("The passed type object %s is missing required method: dump()" % type_object)
if not hasattr(type_object, 'Restore'):
raise ArgumentError("The passed type object %s is missing required method: Restore()" % type_object)
def _dump_map(obj):
if obj is None:
return None
if not isinstance(obj, dict):
raise DataError("Property %s marked as list was not a dict: %s" % (name, repr(obj)))
return {key: val.dump() for key, val in obj.items()}
def _restore_map(obj):
if obj is None:
return obj
return {key: type_object.Restore(val) for key, val in obj.items()}
self.mark_complex(name, _dump_map, _restore_map) | [
"Mark a property as containing a map str to serializable object.\n\n This convenience method allows you to avoid having to call\n ``mark_complex()`` whenever you need to serialize a dict of objects.\n This method requires that all members of the given dict be of a single\n class that contains a dump() method and a Restore() class method where\n type_object.Restore(x.dump()) == x.\n\n Args:\n name (str): The name of the complex property.\n type_object: The class object that will be contained inside\n this dict.\n "
] |
Please provide a description of the function:def mark_typed_object(self, name, type_object):
if not hasattr(type_object, 'dump'):
raise ArgumentError("The passed type object %s is missing required method: dump()" % type_object)
if not hasattr(type_object, 'Restore'):
raise ArgumentError("The passed type object %s is missing required method: Restore()" % type_object)
def _dump_obj(obj):
if obj is None:
return None
return obj.dump()
def _restore_obj(obj):
if obj is None:
return obj
return type_object.Restore(obj)
self.mark_complex(name, _dump_obj, _restore_obj) | [
"Mark a property as containing a serializable object.\n\n This convenience method allows you to avoid having to call\n ``mark_complex()`` whenever you need to serialize a complex object.\n This method requires that property ``name`` be a single class that\n contains a dump() method and a Restore() class method where\n type_object.Restore(x.dump()) == x.\n\n Args:\n name (str): The name of the complex property.\n type_object: The class object that will be contained inside\n this property.\n "
] |
Please provide a description of the function:def dump_property(self, name):
if not hasattr(self, name):
raise ArgumentError("Unknown property %s" % name)
value = getattr(self, name)
if name in self._complex_properties:
value = self._complex_properties[name][0](value)
return value | [
"Serialize a property of this class by name.\n\n Args:\n name (str): The name of the property to dump.\n\n Returns:\n object: The serialized value of the property.\n "
] |
Please provide a description of the function:def get_properties(self):
names = inspect.getmembers(self, predicate=lambda x: not inspect.ismethod(x))
return [x[0] for x in names if not x[0].startswith("_") and x[0] not in self._ignored_properties] | [
"Get a list of all of the public data properties of this class.\n\n Returns:\n list of str: A list of all of the public properties in this class.\n "
] |
Please provide a description of the function:def get_default_version(env):
if 'MSVS' not in env or not SCons.Util.is_Dict(env['MSVS']):
# get all versions, and remember them for speed later
versions = [vs.version for vs in get_installed_visual_studios()]
env['MSVS'] = {'VERSIONS' : versions}
else:
versions = env['MSVS'].get('VERSIONS', [])
if 'MSVS_VERSION' not in env:
if versions:
env['MSVS_VERSION'] = versions[0] #use highest version by default
else:
debug('get_default_version: WARNING: no installed versions found, '
'using first in SupportedVSList (%s)'%SupportedVSList[0].version)
env['MSVS_VERSION'] = SupportedVSList[0].version
env['MSVS']['VERSION'] = env['MSVS_VERSION']
return env['MSVS_VERSION'] | [
"Returns the default version string to use for MSVS.\n\n If no version was requested by the user through the MSVS environment\n variable, query all the available visual studios through\n get_installed_visual_studios, and take the highest one.\n\n Return\n ------\n version: str\n the default version.\n "
] |
Please provide a description of the function:def get_default_arch(env):
arch = env.get('MSVS_ARCH', 'x86')
msvs = InstalledVSMap.get(env['MSVS_VERSION'])
if not msvs:
arch = 'x86'
elif not arch in msvs.get_supported_arch():
fmt = "Visual Studio version %s does not support architecture %s"
raise SCons.Errors.UserError(fmt % (env['MSVS_VERSION'], arch))
return arch | [
"Return the default arch to use for MSVS\n\n if no version was requested by the user through the MSVS_ARCH environment\n variable, select x86\n\n Return\n ------\n arch: str\n "
] |
Please provide a description of the function:def encrypt_report(self, device_id, root, data, **kwargs):
for _priority, provider in self.providers:
try:
return provider.encrypt_report(device_id, root, data, **kwargs)
except NotFoundError:
pass
raise NotFoundError("encrypt_report method is not implemented in any sub_providers") | [
"Encrypt a buffer of report data on behalf of a device.\n\n Args:\n device_id (int): The id of the device that we should encrypt for\n root (int): The root key type that should be used to generate the report\n data (bytearray): The data that we should encrypt.\n **kwargs: There are additional specific keyword args that are required\n depending on the root key used. Typically, you must specify\n - report_id (int): The report id\n - sent_timestamp (int): The sent timestamp of the report\n\n These two bits of information are used to construct the per report\n signing and encryption key from the specific root key type.\n\n Returns:\n dict: The encrypted data and any associated metadata about the data.\n The data itself must always be a bytearray stored under the 'data'\n key, however additional keys may be present depending on the encryption method\n used.\n\n Raises:\n NotFoundError: If the auth provider is not able to encrypt the data.\n "
] |
Please provide a description of the function:def verify_report(self, device_id, root, data, signature, **kwargs):
for _priority, provider in self.providers:
try:
return provider.verify_report(device_id, root, data, signature, **kwargs)
except NotFoundError:
pass
raise NotFoundError("verify_report method is not implemented in any sub_providers") | [
"Verify a buffer of report data on behalf of a device.\n\n Args:\n device_id (int): The id of the device that we should encrypt for\n root (int): The root key type that should be used to generate the report\n data (bytearray): The data that we should verify\n signature (bytearray): The signature attached to data that we should verify\n **kwargs: There are additional specific keyword args that are required\n depending on the root key used. Typically, you must specify\n - report_id (int): The report id\n - sent_timestamp (int): The sent timestamp of the report\n\n These two bits of information are used to construct the per report\n signing and encryption key from the specific root key type.\n\n Returns:\n dict: The result of the verification process must always be a bool under the\n 'verified' key, however additional keys may be present depending on the\n signature method used.\n\n Raises:\n NotFoundError: If the auth provider is not able to verify the data due to\n an error. If the data is simply not valid, then the function returns\n normally.\n "
] |
Please provide a description of the function:def format_rpc(self, address, rpc_id, payload):
addr_word = (rpc_id | (address << 16) | ((1 << 1) << 24))
send_length = len(payload)
if len(payload) < 20:
payload = payload + b'\0'*(20 - len(payload))
payload_words = struct.unpack("<5L", payload)
return self.base_address + self.RPC_TLS_OFFSET + 8, ([addr_word, send_length, 0] + [x for x in payload_words]) | [
"Create a formated word list that encodes this rpc."
] |
Please provide a description of the function:def format_response(self, response_data):
_addr, length = self.response_info()
if len(response_data) != length:
raise HardwareError("Invalid response read length, should be the same as what response_info() returns", expected=length, actual=len(response_data))
resp, flags, received_length, payload = struct.unpack("<HxBL4x20s", response_data)
resp = resp & 0xFF
if flags & (1 << 3):
raise HardwareError("Could not grab external gate")
if received_length > 20:
raise HardwareError("Invalid received payload length > 20 bytes", received_length=received_length)
payload = payload[:received_length]
return {
'status': resp,
'payload': payload
} | [
"Format an RPC response."
] |
Please provide a description of the function:def ProgramScanner(**kw):
kw['path_function'] = SCons.Scanner.FindPathDirs('LIBPATH')
ps = SCons.Scanner.Base(scan, "ProgramScanner", **kw)
return ps | [
"Return a prototype Scanner instance for scanning executable\n files for static-lib dependencies"
] |
Please provide a description of the function:def _subst_libs(env, libs):
if SCons.Util.is_String(libs):
libs = env.subst(libs)
if SCons.Util.is_String(libs):
libs = libs.split()
elif SCons.Util.is_Sequence(libs):
_libs = []
for l in libs:
_libs += _subst_libs(env, l)
libs = _libs
else:
# libs is an object (Node, for example)
libs = [libs]
return libs | [
"\n Substitute environment variables and split into list.\n "
] |
Please provide a description of the function:def scan(node, env, libpath = ()):
try:
libs = env['LIBS']
except KeyError:
# There are no LIBS in this environment, so just return a null list:
return []
libs = _subst_libs(env, libs)
try:
prefix = env['LIBPREFIXES']
if not SCons.Util.is_List(prefix):
prefix = [ prefix ]
except KeyError:
prefix = [ '' ]
try:
suffix = env['LIBSUFFIXES']
if not SCons.Util.is_List(suffix):
suffix = [ suffix ]
except KeyError:
suffix = [ '' ]
pairs = []
for suf in map(env.subst, suffix):
for pref in map(env.subst, prefix):
pairs.append((pref, suf))
result = []
if callable(libpath):
libpath = libpath()
find_file = SCons.Node.FS.find_file
adjustixes = SCons.Util.adjustixes
for lib in libs:
if SCons.Util.is_String(lib):
for pref, suf in pairs:
l = adjustixes(lib, pref, suf)
l = find_file(l, libpath, verbose=print_find_libs)
if l:
result.append(l)
else:
result.append(lib)
return result | [
"\n This scanner scans program files for static-library\n dependencies. It will search the LIBPATH environment variable\n for libraries specified in the LIBS variable, returning any\n files it finds as dependencies.\n "
] |
Please provide a description of the function:def ListVariable(key, help, default, names, map={}):
names_str = 'allowed names: %s' % ' '.join(names)
if SCons.Util.is_List(default):
default = ','.join(default)
help = '\n '.join(
(help, '(all|none|comma-separated list of names)', names_str))
return (key, help, default,
None, #_validator,
lambda val: _converter(val, names, map)) | [
"\n The input parameters describe a 'package list' option, thus they\n are returned with the correct converter and validator appended. The\n result is usable for input to opts.Add() .\n\n A 'package list' option may either be 'all', 'none' or a list of\n package names (separated by space).\n "
] |
Please provide a description of the function:def clear_to_reset(self, config_vars):
super(RemoteBridgeState, self).clear_to_reset(config_vars)
self.status = BRIDGE_STATUS.IDLE
self.error = 0 | [
"Clear the RemoteBridge subsystem to its reset state."
] |
Please provide a description of the function:def begin_script(self):
if self.remote_bridge.status in (BRIDGE_STATUS.RECEIVED, BRIDGE_STATUS.VALIDATED, BRIDGE_STATUS.EXECUTING):
return [1] #FIXME: Return correct error here
self.remote_bridge.status = BRIDGE_STATUS.WAITING
self.remote_bridge.error = 0
self.remote_bridge.script_error = None
self.remote_bridge.parsed_script = None
self._device.script = bytearray()
return [0] | [
"Indicate we are going to start loading a script."
] |
Please provide a description of the function:def end_script(self):
if self.remote_bridge.status not in (BRIDGE_STATUS.RECEIVED, BRIDGE_STATUS.WAITING):
return [1] #FIXME: State change
self.remote_bridge.status = BRIDGE_STATUS.RECEIVED
return [0] | [
"Indicate that we have finished receiving a script."
] |
Please provide a description of the function:def trigger_script(self):
if self.remote_bridge.status not in (BRIDGE_STATUS.RECEIVED,):
return [1] #FIXME: State change
# This is asynchronous in real life so just cache the error
try:
self.remote_bridge.parsed_script = UpdateScript.FromBinary(self._device.script)
#FIXME: Actually run the script
self.remote_bridge.status = BRIDGE_STATUS.IDLE
except Exception as exc:
self._logger.exception("Error parsing script streamed to device")
self.remote_bridge.script_error = exc
self.remote_bridge.error = 1 # FIXME: Error code
return [0] | [
"Actually process a script."
] |
Please provide a description of the function:def reset_script(self):
self.remote_bridge.status = BRIDGE_STATUS.IDLE
self.remote_bridge.error = 0
self.remote_bridge.parsed_script = None
self._device.script = bytearray()
return [0] | [
"Clear any partially received script."
] |
Please provide a description of the function:def render_template_inplace(template_path, info, dry_run=False, extra_filters=None, resolver=None):
filters = {}
if resolver is not None:
filters['find_product'] = _create_resolver_filter(resolver)
if extra_filters is not None:
filters.update(extra_filters)
basedir = os.path.dirname(template_path)
template_name = os.path.basename(template_path)
if not template_name.endswith('.tpl'):
raise ArgumentError("You must specify a filename that ends in .tpl", filepath=template_path)
out_path = os.path.join(basedir, template_name[:-4])
if basedir == '':
basedir = '.'
env = Environment(loader=FileSystemLoader(basedir),
trim_blocks=True, lstrip_blocks=True)
# Load any filters the user wants us to use
for name, func in filters.items():
env.filters[name] = func
template = env.get_template(template_name)
result = template.render(info)
if not dry_run:
with open(out_path, 'wb') as outfile:
outfile.write(result.encode('utf-8'))
return out_path | [
"Render a template file in place.\n\n This function expects template path to be a path to a file\n that ends in .tpl. It will be rendered to a file in the\n same directory with the .tpl suffix removed.\n\n Args:\n template_path (str): The path to the template file\n that we want to render in place.\n info (dict): A dictionary of variables passed into the template to\n perform substitutions.\n dry_run (bool): Whether to actually render the output file or just return\n the file path that would be generated.\n extra_filters (dict of str -> callable): An optional group of filters that\n will be made available to the template. The dict key will be the\n name at which callable is made available.\n resolver (ProductResolver): The specific ProductResolver class to use in the\n find_product filter.\n\n Returns:\n str: The path to the output file generated.\n "
] |
Please provide a description of the function:def render_template(template_name, info, out_path=None):
env = Environment(loader=PackageLoader('iotile.build', 'config/templates'),
trim_blocks=True, lstrip_blocks=True)
template = env.get_template(template_name)
result = template.render(info)
if out_path is not None:
with open(out_path, 'wb') as outfile:
outfile.write(result.encode('utf-8'))
return result | [
"Render a template using the variables in info.\n\n You can optionally render to a file by passing out_path.\n\n Args:\n template_name (str): The name of the template to load. This must\n be a file in config/templates inside this package\n out_path (str): An optional path of where to save the output\n file, otherwise it is just returned as a string.\n info (dict): A dictionary of variables passed into the template to\n perform substitutions.\n\n Returns:\n string: The rendered template data.\n "
] |
Please provide a description of the function:def render_recursive_template(template_folder, info, out_folder, preserve=None, dry_run=False):
if isinstance(preserve, str):
raise ArgumentError("You must pass a list of strings to preserve, not a string", preserve=preserve)
if preserve is None:
preserve = []
preserve = set(preserve)
template_dir = resource_path('templates', expect='folder')
indir = os.path.join(template_dir, template_folder)
if not os.path.exists(indir):
raise ArgumentError("Input template folder for recursive template not found",
template_folder=template_folder, absolute_path=indir)
elif not os.path.isdir(indir):
raise ArgumentError("Input template folder is not a directory",
template_folder=template_folder, absolute_path=indir)
create_dirs = []
file_map = {}
# Walk over all input files
for dirpath, dirs, files in os.walk(indir):
for file in files:
in_abspath = os.path.abspath(os.path.join(dirpath, file))
in_path = os.path.relpath(os.path.join(dirpath, file), start=indir)
if file.endswith(".tpl") and not in_path in preserve:
out_path = in_path[:-4]
else:
out_path = in_path
file_map[out_path] = (in_path, in_abspath)
for folder in dirs:
dir_path = os.path.relpath(os.path.join(dirpath, folder), start=indir)
create_dirs.append(dir_path)
# Actually render / copy all files if we are not doing a dry run
if not dry_run:
for folder in create_dirs:
out_path = os.path.join(out_folder, folder)
if not os.path.isdir(out_path):
os.makedirs(out_path)
for out_rel, (in_path, in_abspath) in file_map.items():
out_path = os.path.join(out_folder, out_rel)
if in_path in preserve or not in_path.endswith(".tpl"):
shutil.copyfile(in_abspath, out_path)
else:
# jinja needs to have unix path separators regardless of the platform and a relative path
# from the templates base directory
in_template_path = os.path.join(template_folder, in_path).replace(os.path.sep, '/')
render_template(in_template_path, info, out_path=out_path)
return file_map, create_dirs | [
"Copy a directory tree rendering all templates found within.\n\n This function inspects all of the files in template_folder recursively. If\n any file ends .tpl, it is rendered using render_template and the .tpl\n suffix is removed. All other files are copied without modification.\n\n out_folder is not cleaned before rendering so you must delete its contents\n yourself if you want that behavior.\n\n If you just want to see all of the file paths that would be generated,\n call with dry_run=True. This will not render anything but just inspect\n what would be generated.\n\n Args:\n template_folder (str): A relative path from config/templates with the\n folder that should be rendered recursively.\n info (dict): A dictionary of variables to be substituted into any\n templates found in template_folder.\n out_folder (str): The path to the output folder where the template will\n be generated.\n dry_run (bool): Whether to actually render output files or just return\n the files that would be generated.\n preserve (list of str): A list of file names relative to the start of the\n template folder that we are rendering that end in .tpl but should not\n be rendered and should not have their .tpl suffix removed. This allows\n you to partially render a template so that you can render a specific\n file later.\n\n Returns:\n dict, list: The dict is map of output file path (relative to\n out_folder) to the absolute path of the input file that it depends\n on. This result is suitable for using in a dependency graph like\n SCons. The list is a list of all of the directories that would need\n to be created to hold these files (not including out_folder).\n "
] |
Please provide a description of the function:def generate(env):
cplusplus.generate(env)
if acc:
env['CXX'] = acc or 'aCC'
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z')
# determine version of aCC
line = os.popen(acc + ' -V 2>&1').readline().rstrip()
if line.find('aCC: HP ANSI C++') == 0:
env['CXXVERSION'] = line.split()[-1]
if env['PLATFORM'] == 'cygwin':
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
else:
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z') | [
"Add Builders and construction variables for g++ to an Environment."
] |
Please provide a description of the function:def _find_monitor(monitors, handle):
found_devs = set()
found_events = set()
for conn_string, device in monitors.items():
for event, handles in device.items():
if handle in handles:
found_events.add(event)
found_devs.add(conn_string)
return found_devs, found_events | [
"Find all devices and events with a given monitor installed."
] |
Please provide a description of the function:def _add_monitor(monitors, handle, callback, devices, events):
for conn_string in devices:
data = monitors.get(conn_string)
if data is None:
data = dict()
monitors[conn_string] = data
for event in events:
event_dict = data.get(event)
if event_dict is None:
event_dict = dict()
data[event] = event_dict
event_dict[handle] = callback | [
"Add the given monitor to the listed devices and events."
] |
Please provide a description of the function:def _remove_monitor(monitors, handle, devices, events):
empty_devices = []
for conn_string in devices:
data = monitors.get(conn_string)
if data is None:
continue
for event in events:
event_dict = data.get(event)
if event_dict is None:
continue
if handle in event_dict:
del event_dict[handle]
if len(event_dict) == 0:
del data[event]
if len(data) == 0:
empty_devices.append(conn_string)
return empty_devices | [
"Remove the given monitor from the listed devices and events."
] |
Please provide a description of the function:def register_monitor(self, devices, events, callback):
# Ensure we don't exhaust any iterables
events = list(events)
devices = list(devices)
for event in events:
if event not in self.SUPPORTED_EVENTS:
raise ArgumentError("Unknown event type {} specified".format(event), events=events)
monitor_id = str(uuid.uuid4())
action = (monitor_id, "add", devices, events)
self._callbacks[monitor_id] = callback
if self._currently_notifying:
self._deferred_adjustments.append(action)
else:
self._adjust_monitor_internal(*action)
return monitor_id | [
"Register a callback when events happen.\n\n If this method is called, it is guaranteed to take effect before the\n next call to ``_notify_event`` after this method returns. This method\n is safe to call from within a callback that is itself called by\n ``notify_event``.\n\n See :meth:`AbstractDeviceAdapter.register_monitor`.\n "
] |
Please provide a description of the function:def iter_monitors(self):
for conn_string, events in self._monitors.items():
for event, handlers in events.items():
for handler in handlers:
yield (conn_string, event, handler) | [
"Iterate over all defined (conn_string, event, monitor) tuples."
] |
Please provide a description of the function:def adjust_monitor(self, handle, action, devices, events):
events = list(events)
devices = list(devices)
for event in events:
if event not in self.SUPPORTED_EVENTS:
raise ArgumentError("Unknown event type {} specified".format(event), events=events)
if action not in self.SUPPORTED_ADJUSTMENTS:
raise ArgumentError("Unknown adjustment {} specified".format(action))
action = (handle, action, devices, events)
if self._currently_notifying:
self._deferred_adjustments.append(action)
else:
self._adjust_monitor_internal(*action) | [
"Adjust a previously registered callback.\n\n See :meth:`AbstractDeviceAdapter.adjust_monitor`.\n "
] |
Please provide a description of the function:def remove_monitor(self, handle):
action = (handle, "delete", None, None)
if self._currently_notifying:
self._deferred_adjustments.append(action)
else:
self._adjust_monitor_internal(*action) | [
"Remove a previously registered monitor.\n\n See :meth:`AbstractDeviceAdapter.adjust_monitor`.\n "
] |
Please provide a description of the function:async def _notify_event_internal(self, conn_string, name, event):
try:
self._currently_notifying = True
conn_id = self._get_conn_id(conn_string)
event_maps = self._monitors.get(conn_string, {})
wildcard_maps = self._monitors.get(None, {})
wildcard_handlers = wildcard_maps.get(name, {})
event_handlers = event_maps.get(name, {})
for handler, func in itertools.chain(event_handlers.items(), wildcard_handlers.items()):
try:
result = func(conn_string, conn_id, name, event)
if inspect.isawaitable(result):
await result
except: #pylint:disable=bare-except;This is a background function and we are logging exceptions
self._logger.warning("Error calling notification callback id=%s, func=%s", handler, func, exc_info=True)
finally:
for action in self._deferred_adjustments:
self._adjust_monitor_internal(*action)
self._deferred_adjustments = []
self._currently_notifying = False | [
"Notify that an event has occured.\n\n This method will send a notification and ensure that all callbacks\n registered for it have completed by the time it returns. In\n particular, if the callbacks are awaitable, this method will await\n them before returning. The order in which the callbacks are called\n is undefined.\n\n This is a low level method that is not intended to be called directly.\n You should use the high level public notify_* methods for each of the\n types of events to ensure consistency in how the event objects are\n created.\n\n Args:\n conn_string (str): The connection string for the device that the\n event is associated with.\n name (str): The name of the event. Must be in SUPPORTED_EVENTS.\n event (object): The event object. The type of this object will\n depend on what is being notified.\n "
] |
Please provide a description of the function:def notify_event(self, conn_string, name, event):
return self._loop.launch_coroutine(self._notify_event_internal, conn_string, name, event) | [
"Notify an event.\n\n This method will launch a coroutine that runs all callbacks (and\n awaits all coroutines) attached to the given event that was just\n raised. Internally it uses\n :meth:`BackgroundEventLoop.launch_coroutine` which retains an\n awaitable object when called from within an event loop and a\n concurrent Future object when called outside of the event loop.\n\n Calling this method from outside of the BackgroundEventLoop is\n considered experimental and not stable behavior that can be depended\n on.\n\n Args:\n conn_string (str): The connection string for the device that the\n event is associated with.\n name (str): The name of the event. Must be in SUPPORTED_EVENTS.\n event (object): The event object. The type of this object will\n depend on what is being notified.\n\n Returns:\n awaitable: An awaitable object that can be used to wait for all callbacks.\n "
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.