Code
stringlengths 103
85.9k
| Summary
sequencelengths 0
94
|
---|---|
Please provide a description of the function:def send_rpc(self, name, rpc_id, payload, timeout=1.0):
return self._loop.run_coroutine(self._client.send_rpc(name, rpc_id, payload, timeout)) | [
"Send an RPC to a service and synchronously wait for the response.\n\n Args:\n name (str): The short name of the service to send the RPC to\n rpc_id (int): The id of the RPC we want to call\n payload (bytes): Any binary arguments that we want to send\n timeout (float): The number of seconds to wait for the RPC to finish\n before timing out and returning\n\n Returns:\n dict: A response dictionary with 1 or 2 keys set\n 'result': one of 'success', 'service_not_found',\n or 'rpc_not_found', 'timeout'\n 'response': the binary response object if the RPC was successful\n "
] |
Please provide a description of the function:def register_service(self, short_name, long_name, allow_duplicate=True):
self._loop.run_coroutine(self._client.register_service(short_name, long_name, allow_duplicate)) | [
"Register a new service with the service manager.\n\n Args:\n short_name (string): A unique short name for this service that functions\n as an id\n long_name (string): A user facing name for this service\n allow_duplicate (boolean): Don't throw an error if this service is already\n registered. This is important if the service is preregistered for example.\n Raises:\n ArgumentError: if the short_name is already taken\n "
] |
Please provide a description of the function:def register_agent(self, short_name):
self._loop.run_coroutine(self._client.register_agent(short_name)) | [
"Register to act as the RPC agent for this service.\n\n After this cal succeeds, all requests to send RPCs to this service will be routed\n through this agent.\n\n Args:\n short_name (str): A unique short name for this service that functions\n as an id\n "
] |
Please provide a description of the function:def execute_before(self, sensor_graph, scope_stack):
parent = scope_stack[-1]
new_scope = TriggerScope(sensor_graph, scope_stack, parent.clock(self.interval, basis=self.basis))
scope_stack.append(new_scope) | [
"Execute statement before children are executed.\n\n Args:\n sensor_graph (SensorGraph): The sensor graph that we are building or\n modifying\n scope_stack (list(Scope)): A stack of nested scopes that may influence\n how this statement allocates clocks or other stream resources.\n "
] |
Please provide a description of the function:def execute(self, sensor_graph, scope_stack):
if self.subtract_stream.stream_type != DataStream.ConstantType:
raise SensorGraphSemanticError("You can only subtract a constant value currently", stream=self.subtract_stream)
parent = scope_stack[-1]
alloc = parent.allocator
trigger_stream, trigger_cond = parent.trigger_chain()
sensor_graph.add_node(u"({} always && {} {}) => {} using {}".format(self.subtract_stream, trigger_stream, trigger_cond, self.stream, 'subtract_afromb'))
value = self.default
if value is None:
value = 0
if self.default is not None and self.subtract_stream in sensor_graph.constant_database:
raise SensorGraphSemanticError("Attempted to set the same constant stream twice", stream=self.subtract_stream, new_value=self.default)
elif self.default is None and self.subtract_stream in sensor_graph.constant_database:
return
sensor_graph.add_constant(self.subtract_stream, value) | [
"Execute this statement on the sensor_graph given the current scope tree.\n\n This adds a single node to the sensor graph with subtract as the function\n so that the current scope's trigger stream has the subtract_stream's value\n subtracted from it.\n\n Args:\n sensor_graph (SensorGraph): The sensor graph that we are building or\n modifying\n scope_stack (list(Scope)): A stack of nested scopes that may influence\n how this statement allocates clocks or other stream resources.\n "
] |
Please provide a description of the function:def _decode_datetime(obj):
if '__datetime__' in obj:
obj = datetime.datetime.strptime(obj['as_str'].decode(), "%Y%m%dT%H:%M:%S.%f")
return obj | [
"Decode a msgpack'ed datetime."
] |
Please provide a description of the function:def _encode_datetime(obj):
if isinstance(obj, datetime.datetime):
obj = {'__datetime__': True, 'as_str': obj.strftime("%Y%m%dT%H:%M:%S.%f").encode()}
return obj | [
"Encode a msgpck'ed datetime."
] |
Please provide a description of the function:def _versioned_lib_suffix(env, suffix, version):
Verbose = False
if Verbose:
print("_versioned_lib_suffix: suffix= ", suffix)
print("_versioned_lib_suffix: version= ", version)
cygversion = re.sub('\.', '-', version)
if not suffix.startswith('-' + cygversion):
suffix = '-' + cygversion + suffix
if Verbose:
print("_versioned_lib_suffix: return suffix= ", suffix)
return suffix | [
"Generate versioned shared library suffix from a unversioned one.\n If suffix='.dll', and version='0.1.2', then it returns '-0-1-2.dll'"
] |
Please provide a description of the function:def _versioned_implib_symlinks(env, libnode, version, prefix, suffix, **kw):
Verbose = False
if Verbose:
print("_versioned_implib_symlinks: libnode=%r" % libnode.get_path())
print("_versioned_implib_symlinks: version=%r" % version)
try: libtype = kw['libtype']
except KeyError: libtype = 'ShLib'
linkdir = os.path.dirname(libnode.get_path())
if Verbose:
print("_versioned_implib_symlinks: linkdir=%r" % linkdir)
name = SCons.Tool.ImpLibNameGenerator(env, libnode,
implib_libtype=libtype,
generator_libtype=libtype+'ImpLib')
if Verbose:
print("_versioned_implib_symlinks: name=%r" % name)
major = version.split('.')[0]
link0 = env.fs.File(os.path.join(linkdir, name))
symlinks = [(link0, libnode)]
if Verbose:
print("_versioned_implib_symlinks: return symlinks=%r" % SCons.Tool.StringizeLibSymlinks(symlinks))
return symlinks | [
"Generate link names that should be created for a versioned shared library.\n Returns a list in the form [ (link, linktarget), ... ]\n "
] |
Please provide a description of the function:def generate(env):
gnulink.generate(env)
env['LINKFLAGS'] = SCons.Util.CLVar('-Wl,-no-undefined')
env['SHLINKCOM'] = shlib_action
env['LDMODULECOM'] = ldmod_action
env.Append(SHLIBEMITTER = [shlib_emitter])
env.Append(LDMODULEEMITTER = [ldmod_emitter])
env['SHLIBPREFIX'] = 'cyg'
env['SHLIBSUFFIX'] = '.dll'
env['IMPLIBPREFIX'] = 'lib'
env['IMPLIBSUFFIX'] = '.dll.a'
# Variables used by versioned shared libraries
env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS'
env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS'
# SHLIBVERSIONFLAGS and LDMODULEVERSIONFLAGS are same as in gnulink...
# LINKCALLBACKS are NOT inherited from gnulink
env['LINKCALLBACKS'] = {
'VersionedShLibSuffix' : _versioned_lib_suffix,
'VersionedLdModSuffix' : _versioned_lib_suffix,
'VersionedImpLibSuffix' : _versioned_lib_suffix,
'VersionedShLibName' : link._versioned_shlib_name,
'VersionedLdModName' : link._versioned_ldmod_name,
'VersionedShLibImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='ShLib'),
'VersionedLdModImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='LdMod'),
'VersionedShLibImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='ShLib'),
'VersionedLdModImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='LdMod'),
}
# these variables were set by gnulink but are not used in cyglink
try: del env['_SHLIBSONAME']
except KeyError: pass
try: del env['_LDMODULESONAME']
except KeyError: pass | [
"Add Builders and construction variables for cyglink to an Environment."
] |
Please provide a description of the function:def generate(env):
path, _cc, version = get_xlc(env)
if path and _cc:
_cc = os.path.join(path, _cc)
if 'CC' not in env:
env['CC'] = _cc
cc.generate(env)
if version:
env['CCVERSION'] = version | [
"Add Builders and construction variables for xlc / Visual Age\n suite to an Environment."
] |
Please provide a description of the function:def dispatch(self, message):
for validator, callback in self.validators:
if not validator.matches(message):
continue
callback(message)
return
raise ArgumentError("No handler was registered for message", message=message) | [
"Dispatch a message to a callback based on its schema.\n\n Args:\n message (dict): The message to dispatch\n "
] |
Please provide a description of the function:def build_parser():
parser = argparse.ArgumentParser(description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('firmware_image', nargs="?", help="The firmware image that you wish to load into the emulator")
parser.add_argument('--gdb', '-g', type=int, help="Start a GDB server on the given port and wait for a connection")
return parser | [
"Create command line argument parser."
] |
Please provide a description of the function:def main(raw_args=None):
if raw_args is None:
raw_args = sys.argv[1:]
parser = build_parser()
args = parser.parse_args(raw_args)
if args.firmware_image is None and args.gdb is None:
print("You must specify either a firmware image or attach a debugger with --gdb <PORT>")
return 1
test_args = ['qemu-system-gnuarmeclipse', '-verbose', '-verbose', '-board', 'STM32F0-Discovery',
'-nographic', '-monitor', 'null', '-serial', 'null', '--semihosting-config',
'enable=on,target=native', '-d', 'unimp,guest_errors']
if args.firmware_image:
test_args += ['-image', args.firmware_image]
if args.gdb:
test_args += ['--gdb', 'tcp::%d' % args.gdb]
proc = subprocess.Popen(test_args, stdout=sys.stdout, stderr=sys.stderr)
try:
proc.communicate()
except KeyboardInterrupt:
proc.terminate()
return 0 | [
"Run the iotile-emulate script.\n\n Args:\n raw_args (list): Optional list of commmand line arguments. If not\n passed these are pulled from sys.argv.\n "
] |
Please provide a description of the function:def _detect(env):
QTDIR = None
if not QTDIR:
QTDIR = env.get('QTDIR',None)
if not QTDIR:
QTDIR = os.environ.get('QTDIR',None)
if not QTDIR:
moc = env.WhereIs('moc')
if moc:
QTDIR = os.path.dirname(os.path.dirname(moc))
SCons.Warnings.warn(
QtdirNotFound,
"Could not detect qt, using moc executable as a hint (QTDIR=%s)" % QTDIR)
else:
QTDIR = None
SCons.Warnings.warn(
QtdirNotFound,
"Could not detect qt, using empty QTDIR")
return QTDIR | [
"Not really safe, but fast method to detect the QT library"
] |
Please provide a description of the function:def generate(env):
CLVar = SCons.Util.CLVar
Action = SCons.Action.Action
Builder = SCons.Builder.Builder
env.SetDefault(QTDIR = _detect(env),
QT_BINPATH = os.path.join('$QTDIR', 'bin'),
QT_CPPPATH = os.path.join('$QTDIR', 'include'),
QT_LIBPATH = os.path.join('$QTDIR', 'lib'),
QT_MOC = os.path.join('$QT_BINPATH','moc'),
QT_UIC = os.path.join('$QT_BINPATH','uic'),
QT_LIB = 'qt', # may be set to qt-mt
QT_AUTOSCAN = 1, # scan for moc'able sources
# Some QT specific flags. I don't expect someone wants to
# manipulate those ...
QT_UICIMPLFLAGS = CLVar(''),
QT_UICDECLFLAGS = CLVar(''),
QT_MOCFROMHFLAGS = CLVar(''),
QT_MOCFROMCXXFLAGS = CLVar('-i'),
# suffixes/prefixes for the headers / sources to generate
QT_UICDECLPREFIX = '',
QT_UICDECLSUFFIX = '.h',
QT_UICIMPLPREFIX = 'uic_',
QT_UICIMPLSUFFIX = '$CXXFILESUFFIX',
QT_MOCHPREFIX = 'moc_',
QT_MOCHSUFFIX = '$CXXFILESUFFIX',
QT_MOCCXXPREFIX = '',
QT_MOCCXXSUFFIX = '.moc',
QT_UISUFFIX = '.ui',
# Commands for the qt support ...
# command to generate header, implementation and moc-file
# from a .ui file
QT_UICCOM = [
CLVar('$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE'),
CLVar('$QT_UIC $QT_UICIMPLFLAGS -impl ${TARGETS[0].file} '
'-o ${TARGETS[1]} $SOURCE'),
CLVar('$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[2]} ${TARGETS[0]}')],
# command to generate meta object information for a class
# declarated in a header
QT_MOCFROMHCOM = (
'$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[0]} $SOURCE'),
# command to generate meta object information for a class
# declarated in a cpp file
QT_MOCFROMCXXCOM = [
CLVar('$QT_MOC $QT_MOCFROMCXXFLAGS -o ${TARGETS[0]} $SOURCE'),
Action(checkMocIncluded,None)])
# ... and the corresponding builders
uicBld = Builder(action=SCons.Action.Action('$QT_UICCOM', '$QT_UICCOMSTR'),
emitter=uicEmitter,
src_suffix='$QT_UISUFFIX',
suffix='$QT_UICDECLSUFFIX',
prefix='$QT_UICDECLPREFIX',
source_scanner=uicScanner)
mocBld = Builder(action={}, prefix={}, suffix={})
for h in header_extensions:
act = SCons.Action.Action('$QT_MOCFROMHCOM', '$QT_MOCFROMHCOMSTR')
mocBld.add_action(h, act)
mocBld.prefix[h] = '$QT_MOCHPREFIX'
mocBld.suffix[h] = '$QT_MOCHSUFFIX'
for cxx in cxx_suffixes:
act = SCons.Action.Action('$QT_MOCFROMCXXCOM', '$QT_MOCFROMCXXCOMSTR')
mocBld.add_action(cxx, act)
mocBld.prefix[cxx] = '$QT_MOCCXXPREFIX'
mocBld.suffix[cxx] = '$QT_MOCCXXSUFFIX'
# register the builders
env['BUILDERS']['Uic'] = uicBld
env['BUILDERS']['Moc'] = mocBld
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
static_obj.add_src_builder('Uic')
shared_obj.add_src_builder('Uic')
# We use the emitters of Program / StaticLibrary / SharedLibrary
# to scan for moc'able files
# We can't refer to the builders directly, we have to fetch them
# as Environment attributes because that sets them up to be called
# correctly later by our emitter.
env.AppendUnique(PROGEMITTER =[AutomocStatic],
SHLIBEMITTER=[AutomocShared],
LDMODULEEMITTER=[AutomocShared],
LIBEMITTER =[AutomocStatic],
# Of course, we need to link against the qt libraries
CPPPATH=["$QT_CPPPATH"],
LIBPATH=["$QT_LIBPATH"],
LIBS=['$QT_LIB']) | [
"Add Builders and construction variables for qt to an Environment."
] |
Please provide a description of the function:def CPP_to_Python(s):
s = CPP_to_Python_Ops_Expression.sub(CPP_to_Python_Ops_Sub, s)
for expr, repl in CPP_to_Python_Eval_List:
s = expr.sub(repl, s)
return s | [
"\n Converts a C pre-processor expression into an equivalent\n Python expression that can be evaluated.\n "
] |
Please provide a description of the function:def tupleize(self, contents):
global CPP_Expression, Table
contents = line_continuations.sub('', contents)
cpp_tuples = CPP_Expression.findall(contents)
return [(m[0],) + Table[m[0]].match(m[1]).groups() for m in cpp_tuples] | [
"\n Turns the contents of a file into a list of easily-processed\n tuples describing the CPP lines in the file.\n\n The first element of each tuple is the line's preprocessor\n directive (#if, #include, #define, etc., minus the initial '#').\n The remaining elements are specific to the type of directive, as\n pulled apart by the regular expression.\n "
] |
Please provide a description of the function:def process_contents(self, contents, fname=None):
self.stack = []
self.dispatch_table = self.default_table.copy()
self.current_file = fname
self.tuples = self.tupleize(contents)
self.initialize_result(fname)
while self.tuples:
t = self.tuples.pop(0)
# Uncomment to see the list of tuples being processed (e.g.,
# to validate the CPP lines are being translated correctly).
#print(t)
self.dispatch_table[t[0]](t)
return self.finalize_result(fname) | [
"\n Pre-processes a file contents.\n\n This is the main internal entry point.\n "
] |
Please provide a description of the function:def save(self):
self.stack.append(self.dispatch_table)
self.dispatch_table = self.default_table.copy() | [
"\n Pushes the current dispatch table on the stack and re-initializes\n the current dispatch table to the default.\n "
] |
Please provide a description of the function:def eval_expression(self, t):
t = CPP_to_Python(' '.join(t[1:]))
try: return eval(t, self.cpp_namespace)
except (NameError, TypeError): return 0 | [
"\n Evaluates a C preprocessor expression.\n\n This is done by converting it to a Python equivalent and\n eval()ing it in the C preprocessor namespace we use to\n track #define values.\n "
] |
Please provide a description of the function:def find_include_file(self, t):
fname = t[2]
for d in self.searchpath[t[1]]:
if d == os.curdir:
f = fname
else:
f = os.path.join(d, fname)
if os.path.isfile(f):
return f
return None | [
"\n Finds the #include file for a given preprocessor tuple.\n "
] |
Please provide a description of the function:def start_handling_includes(self, t=None):
d = self.dispatch_table
p = self.stack[-1] if self.stack else self.default_table
for k in ('import', 'include', 'include_next'):
d[k] = p[k] | [
"\n Causes the PreProcessor object to start processing #import,\n #include and #include_next lines.\n\n This method will be called when a #if, #ifdef, #ifndef or #elif\n evaluates True, or when we reach the #else in a #if, #ifdef,\n #ifndef or #elif block where a condition already evaluated\n False.\n\n "
] |
Please provide a description of the function:def stop_handling_includes(self, t=None):
d = self.dispatch_table
d['import'] = self.do_nothing
d['include'] = self.do_nothing
d['include_next'] = self.do_nothing | [
"\n Causes the PreProcessor object to stop processing #import,\n #include and #include_next lines.\n\n This method will be called when a #if, #ifdef, #ifndef or #elif\n evaluates False, or when we reach the #else in a #if, #ifdef,\n #ifndef or #elif block where a condition already evaluated True.\n "
] |
Please provide a description of the function:def _do_if_else_condition(self, condition):
self.save()
d = self.dispatch_table
if condition:
self.start_handling_includes()
d['elif'] = self.stop_handling_includes
d['else'] = self.stop_handling_includes
else:
self.stop_handling_includes()
d['elif'] = self.do_elif
d['else'] = self.start_handling_includes | [
"\n Common logic for evaluating the conditions on #if, #ifdef and\n #ifndef lines.\n "
] |
Please provide a description of the function:def do_elif(self, t):
d = self.dispatch_table
if self.eval_expression(t):
self.start_handling_includes()
d['elif'] = self.stop_handling_includes
d['else'] = self.stop_handling_includes | [
"\n Default handling of a #elif line.\n "
] |
Please provide a description of the function:def do_define(self, t):
_, name, args, expansion = t
try:
expansion = int(expansion)
except (TypeError, ValueError):
pass
if args:
evaluator = FunctionEvaluator(name, args[1:-1], expansion)
self.cpp_namespace[name] = evaluator
else:
self.cpp_namespace[name] = expansion | [
"\n Default handling of a #define line.\n "
] |
Please provide a description of the function:def do_include(self, t):
t = self.resolve_include(t)
include_file = self.find_include_file(t)
if include_file:
#print("include_file =", include_file)
self.result.append(include_file)
contents = self.read_file(include_file)
new_tuples = [('scons_current_file', include_file)] + \
self.tupleize(contents) + \
[('scons_current_file', self.current_file)]
self.tuples[:] = new_tuples + self.tuples | [
"\n Default handling of a #include line.\n "
] |
Please provide a description of the function:def resolve_include(self, t):
s = t[1]
while not s[0] in '<"':
#print("s =", s)
try:
s = self.cpp_namespace[s]
except KeyError:
m = function_name.search(s)
s = self.cpp_namespace[m.group(1)]
if callable(s):
args = function_arg_separator.split(m.group(2))
s = s(*args)
if not s:
return None
return (t[0], s[0], s[1:-1]) | [
"Resolve a tuple-ized #include line.\n\n This handles recursive expansion of values without \"\" or <>\n surrounding the name until an initial \" or < is found, to handle\n\n #include FILE\n\n where FILE is a #define somewhere else."
] |
Please provide a description of the function:def emit_rmic_classes(target, source, env):
class_suffix = env.get('JAVACLASSSUFFIX', '.class')
classdir = env.get('JAVACLASSDIR')
if not classdir:
try:
s = source[0]
except IndexError:
classdir = '.'
else:
try:
classdir = s.attributes.java_classdir
except AttributeError:
classdir = '.'
classdir = env.Dir(classdir).rdir()
if str(classdir) == '.':
c_ = None
else:
c_ = str(classdir) + os.sep
slist = []
for src in source:
try:
classname = src.attributes.java_classname
except AttributeError:
classname = str(src)
if c_ and classname[:len(c_)] == c_:
classname = classname[len(c_):]
if class_suffix and classname[:-len(class_suffix)] == class_suffix:
classname = classname[-len(class_suffix):]
s = src.rfile()
s.attributes.java_classdir = classdir
s.attributes.java_classname = classname
slist.append(s)
stub_suffixes = ['_Stub']
if env.get('JAVAVERSION') == '1.4':
stub_suffixes.append('_Skel')
tlist = []
for s in source:
for suff in stub_suffixes:
fname = s.attributes.java_classname.replace('.', os.sep) + \
suff + class_suffix
t = target[0].File(fname)
t.attributes.java_lookupdir = target[0]
tlist.append(t)
return tlist, source | [
"Create and return lists of Java RMI stub and skeleton\n class files to be created from a set of class files.\n "
] |
Please provide a description of the function:def generate(env):
env['BUILDERS']['RMIC'] = RMICBuilder
env['RMIC'] = 'rmic'
env['RMICFLAGS'] = SCons.Util.CLVar('')
env['RMICCOM'] = '$RMIC $RMICFLAGS -d ${TARGET.attributes.java_lookupdir} -classpath ${SOURCE.attributes.java_classdir} ${SOURCES.attributes.java_classname}'
env['JAVACLASSSUFFIX'] = '.class' | [
"Add Builders and construction variables for rmic to an Environment."
] |
Please provide a description of the function:def _set_scan_parameters(self, interval=2100, window=2100, active=False):
active_num = 0
if bool(active):
active_num = 1
interval_num = int(interval*1000/625)
window_num = int(window*1000/625)
payload = struct.pack("<HHB", interval_num, window_num, active_num)
try:
response = self._send_command(6, 7, payload)
if response.payload[0] != 0:
return False, {'reason': "Could not set scanning parameters", 'error': response.payload[0]}
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for response'}
return True, None | [
"\n Set the scan interval and window in units of ms and set whether active scanning is performed\n "
] |
Please provide a description of the function:def _query_systemstate(self):
def status_filter_func(event):
if event.command_class == 3 and event.command == 0:
return True
return False
try:
response = self._send_command(0, 6, [])
maxconn, = unpack("<B", response.payload)
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for command response'}
events = self._wait_process_events(0.5, status_filter_func, lambda x: False)
conns = []
for event in events:
handle, flags, addr, addr_type, interval, timeout, lat, bond = unpack("<BB6sBHHHB", event.payload)
if flags != 0:
conns.append(handle)
return True, {'max_connections': maxconn, 'active_connections': conns} | [
"Query the maximum number of connections supported by this adapter\n "
] |
Please provide a description of the function:def _start_scan(self, active):
success, retval = self._set_scan_parameters(active=active)
if not success:
return success, retval
try:
response = self._send_command(6, 2, [2])
if response.payload[0] != 0:
self._logger.error('Error starting scan for devices, error=%d', response.payload[0])
return False, {'reason': "Could not initiate scan for ble devices, error_code=%d, response=%s" % (response.payload[0], response)}
except InternalTimeoutError:
return False, {'reason': "Timeout waiting for response"}
return True, None | [
"Begin scanning forever\n "
] |
Please provide a description of the function:def _stop_scan(self):
try:
response = self._send_command(6, 4, [])
if response.payload[0] != 0:
# Error code 129 means we just were not currently scanning
if response.payload[0] != 129:
self._logger.error('Error stopping scan for devices, error=%d', response.payload[0])
return False, {'reason': "Could not stop scan for ble devices"}
except InternalTimeoutError:
return False, {'reason': "Timeout waiting for response"}
except DeviceNotConfiguredError:
return True, {'reason': "Device not connected (did you disconnect the dongle?"}
return True, None | [
"Stop scanning for BLE devices\n "
] |
Please provide a description of the function:def _probe_services(self, handle):
code = 0x2800
def event_filter_func(event):
if (event.command_class == 4 and event.command == 2):
event_handle, = unpack("B", event.payload[0:1])
return event_handle == handle
return False
def end_filter_func(event):
if (event.command_class == 4 and event.command == 1):
event_handle, = unpack("B", event.payload[0:1])
return event_handle == handle
return False
payload = struct.pack('<BHHBH', handle, 1, 0xFFFF, 2, code)
try:
response = self._send_command(4, 1, payload)
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for command response'}
handle, result = unpack("<BH", response.payload)
if result != 0:
return False, None
events = self._wait_process_events(0.5, event_filter_func, end_filter_func)
gatt_events = [x for x in events if event_filter_func(x)]
end_events = [x for x in events if end_filter_func(x)]
if len(end_events) == 0:
return False, None
#Make sure we successfully probed the gatt table
end_event = end_events[0]
_, result, _ = unpack("<BHH", end_event.payload)
if result != 0:
self._logger.warn("Error enumerating GATT table, protocol error code = %d (0x%X)" % (result, result))
return False, None
services = {}
for event in gatt_events:
process_gatt_service(services, event)
return True, {'services': services} | [
"Probe for all primary services and characteristics in those services\n\n Args:\n handle (int): the connection handle to probe\n "
] |
Please provide a description of the function:def _probe_characteristics(self, conn, services, timeout=5.0):
for service in services.values():
success, result = self._enumerate_handles(conn, service['start_handle'],
service['end_handle'])
if not success:
return False, None
attributes = result['attributes']
service['characteristics'] = {}
last_char = None
for handle, attribute in attributes.items():
if attribute['uuid'].hex[-4:] == '0328':
success, result = self._read_handle(conn, handle, timeout)
if not success:
return False, None
value = result['data']
char = parse_characteristic_declaration(value)
service['characteristics'][char['uuid']] = char
last_char = char
elif attribute['uuid'].hex[-4:] == '0229':
if last_char is None:
return False, None
success, result = self._read_handle(conn, handle, timeout)
if not success:
return False, None
value = result['data']
assert len(value) == 2
value, = unpack("<H", value)
last_char['client_configuration'] = {'handle': handle, 'value': value}
return True, {'services': services} | [
"Probe gatt services for all associated characteristics in a BLE device\n\n Args:\n conn (int): the connection handle to probe\n services (dict): a dictionary of services produced by probe_services()\n timeout (float): the maximum number of seconds to spend in any single task\n "
] |
Please provide a description of the function:def _enable_rpcs(self, conn, services, timeout=1.0):
#FIXME: Check for characteristic existence in a try/catch and return failure if not found
success, result = self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceiveHeaderCharacteristic], True, timeout)
if not success:
return success, result
return self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceivePayloadCharacteristic], True, timeout) | [
"Prepare this device to receive RPCs\n "
] |
Please provide a description of the function:def _disable_rpcs(self, conn, services, timeout=1.0):
success, result = self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceiveHeaderCharacteristic], False, timeout)
if not success:
return success, result
return self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceivePayloadCharacteristic], False, timeout) | [
"Prevent this device from receiving more RPCs\n "
] |
Please provide a description of the function:def _write_handle(self, conn, handle, ack, value, timeout=1.0):
conn_handle = conn
char_handle = handle
def write_handle_acked(event):
if event.command_class == 4 and event.command == 1:
conn, _, char = unpack("<BHH", event.payload)
return conn_handle == conn and char_handle == char
data_len = len(value)
if data_len > 20:
return False, {'reason': 'Data too long to write'}
payload = struct.pack("<BHB%ds" % data_len, conn_handle, char_handle, data_len, value)
try:
if ack:
response = self._send_command(4, 5, payload)
else:
response = self._send_command(4, 6, payload)
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for response to command in _write_handle'}
_, result = unpack("<BH", response.payload)
if result != 0:
return False, {'reason': 'Error writing to handle', 'error_code': result}
if ack:
events = self._wait_process_events(timeout, lambda x: False, write_handle_acked)
if len(events) == 0:
return False, {'reason': 'Timeout waiting for acknowledge on write'}
_, result, _ = unpack("<BHH", events[0].payload)
if result != 0:
return False, {'reason': 'Error received during write to handle', 'error_code': result}
return True, None | [
"Write to a BLE device characteristic by its handle\n\n Args:\n conn (int): The connection handle for the device we should read from\n handle (int): The characteristics handle we should read\n ack (bool): Should this be an acknowledges write or unacknowledged\n timeout (float): How long to wait before failing\n value (bytearray): The value that we should write\n "
] |
Please provide a description of the function:def _set_advertising_data(self, packet_type, data):
payload = struct.pack("<BB%ss" % (len(data)), packet_type, len(data), bytes(data))
response = self._send_command(6, 9, payload)
result, = unpack("<H", response.payload)
if result != 0:
return False, {'reason': 'Error code from BLED112 setting advertising data', 'code': result}
return True, None | [
"Set the advertising data for advertisements sent out by this bled112\n\n Args:\n packet_type (int): 0 for advertisement, 1 for scan response\n data (bytearray): the data to set\n "
] |
Please provide a description of the function:def _set_mode(self, discover_mode, connect_mode):
payload = struct.pack("<BB", discover_mode, connect_mode)
response = self._send_command(6, 1, payload)
result, = unpack("<H", response.payload)
if result != 0:
return False, {'reason': 'Error code from BLED112 setting mode', 'code': result}
return True, None | [
"Set the mode of the BLED112, used to enable and disable advertising\n\n To enable advertising, use 4, 2.\n To disable advertising use 0, 0.\n\n Args:\n discover_mode (int): The discoverability mode, 0 for off, 4 for on (user data)\n connect_mode (int): The connectability mode, 0 for of, 2 for undirected connectable\n "
] |
Please provide a description of the function:def _send_notification(self, handle, value):
value_len = len(value)
value = bytes(value)
payload = struct.pack("<BHB%ds" % value_len, 0xFF, handle, value_len, value)
response = self._send_command(2, 5, payload)
result, = unpack("<H", response.payload)
if result != 0:
return False, {'reason': 'Error code from BLED112 notifying a value', 'code': result, 'handle': handle, 'value': value}
return True, None | [
"Send a notification to all connected clients on a characteristic\n\n Args:\n handle (int): The handle we wish to notify on\n value (bytearray): The value we wish to send\n "
] |
Please provide a description of the function:def _set_notification(self, conn, char, enabled, timeout=1.0):
if 'client_configuration' not in char:
return False, {'reason': 'Cannot enable notification without a client configuration attribute for characteristic'}
props = char['properties']
if not props.notify:
return False, {'reason': 'Cannot enable notification on a characteristic that does not support it'}
value = char['client_configuration']['value']
#Check if we don't have to do anything
current_state = bool(value & (1 << 0))
if current_state == enabled:
return
if enabled:
value |= 1 << 0
else:
value &= ~(1 << 0)
char['client_configuration']['value'] = value
valarray = struct.pack("<H", value)
return self._write_handle(conn, char['client_configuration']['handle'], True, valarray, timeout) | [
"Enable/disable notifications on a GATT characteristic\n\n Args:\n conn (int): The connection handle for the device we should interact with\n char (dict): The characteristic we should modify\n enabled (bool): Should we enable or disable notifications\n timeout (float): How long to wait before failing\n "
] |
Please provide a description of the function:def _connect(self, address):
latency = 0
conn_interval_min = 6
conn_interval_max = 100
timeout = 1.0
try:
#Allow passing either a binary address or a hex string
if isinstance(address, str) and len(address) > 6:
address = address.replace(':', '')
address = bytes(bytearray.fromhex(address)[::-1])
except ValueError:
return False, None
#Allow simple determination of whether a device has a public or private address
#This is not foolproof
private_bits = bytearray(address)[-1] >> 6
if private_bits == 0b11:
address_type = 1
else:
address_type = 0
payload = struct.pack("<6sBHHHH", address, address_type, conn_interval_min,
conn_interval_max, int(timeout*100.0), latency)
response = self._send_command(6, 3, payload)
result, handle = unpack("<HB", response.payload)
if result != 0:
return False, None
#Now wait for the connection event that says we connected or kill the attempt after timeout
def conn_succeeded(event):
if event.command_class == 3 and event.command == 0:
event_handle, = unpack("B", event.payload[0:1])
return event_handle == handle
#FIXME Hardcoded timeout
events = self._wait_process_events(4.0, lambda x: False, conn_succeeded)
if len(events) != 1:
self._stop_scan()
return False, None
handle, _, addr, _, interval, timeout, latency, _ = unpack("<BB6sBHHHB", events[0].payload)
formatted_addr = ":".join(["%02X" % x for x in bytearray(addr)])
self._logger.info('Connected to device %s with interval=%d, timeout=%d, latency=%d',
formatted_addr, interval, timeout, latency)
connection = {"handle": handle}
return True, connection | [
"Connect to a device given its uuid\n "
] |
Please provide a description of the function:def _disconnect(self, handle):
payload = struct.pack('<B', handle)
response = self._send_command(3, 0, payload)
conn_handle, result = unpack("<BH", response.payload)
if result != 0:
self._logger.info("Disconnection failed result=%d", result)
return False, None
assert conn_handle == handle
def disconnect_succeeded(event):
if event.command_class == 3 and event.command == 4:
event_handle, = unpack("B", event.payload[0:1])
return event_handle == handle
return False
#FIXME Hardcoded timeout
events = self._wait_process_events(3.0, lambda x: False, disconnect_succeeded)
if len(events) != 1:
return False, None
return True, {'handle': handle} | [
"Disconnect from a device that we have previously connected to\n "
] |
Please provide a description of the function:def _send_command(self, cmd_class, command, payload, timeout=3.0):
if len(payload) > 60:
return ValueError("Attempting to send a BGAPI packet with length > 60 is not allowed", actual_length=len(payload), command=command, command_class=cmd_class)
header = bytearray(4)
header[0] = 0
header[1] = len(payload)
header[2] = cmd_class
header[3] = command
packet = header + bytearray(payload)
self._stream.write(bytes(packet))
#Every command has a response so wait for the response here
response = self._receive_packet(timeout)
return response | [
"\n Send a BGAPI packet to the dongle and return the response\n "
] |
Please provide a description of the function:def _receive_packet(self, timeout=3.0):
while True:
response_data = self._stream.read_packet(timeout=timeout)
response = BGAPIPacket(is_event=(response_data[0] == 0x80), command_class=response_data[2], command=response_data[3], payload=response_data[4:])
if response.is_event:
if self.event_handler is not None:
self.event_handler(response)
continue
return response | [
"\n Receive a response packet to a command\n "
] |
Please provide a description of the function:def _wait_process_events(self, total_time, return_filter, end_filter):
acc = []
delta = 0.01
start_time = time.time()
end_time = start_time + total_time
while time.time() < end_time:
events = self._process_events(lambda x: return_filter(x) or end_filter(x), max_events=1)
acc += events
for event in events:
if end_filter(event):
return acc
if len(events) == 0:
time.sleep(delta)
return acc | [
"Synchronously process events until a specific event is found or we timeout\n\n Args:\n total_time (float): The aproximate maximum number of seconds we should wait for the end event\n return_filter (callable): A function that returns True for events we should return and not process\n normally via callbacks to the IOLoop\n end_filter (callable): A function that returns True for the end event that we are looking for to\n stop processing.\n\n Returns:\n list: A list of events that matched return_filter or end_filter\n "
] |
Please provide a description of the function:def async_rpc(address, rpc_id, arg_format, resp_format=None):
if rpc_id < 0 or rpc_id > 0xFFFF:
raise RPCInvalidIDError("Invalid RPC ID: {}".format(rpc_id))
def _rpc_wrapper(func):
async def _rpc_executor(self, payload):
try:
args = unpack_rpc_payload(arg_format, payload)
except struct.error as exc:
raise RPCInvalidArgumentsError(str(exc), arg_format=arg_format, payload=binascii.hexlify(payload))
resp = await func(self, *args)
if resp is None:
resp = []
if resp_format is not None:
try:
return pack_rpc_payload(resp_format, resp)
except struct.error as exc:
raise RPCInvalidReturnValueError(str(exc), resp_format=resp_format, resp=repr(resp))
return resp
_rpc_executor.rpc_id = rpc_id
_rpc_executor.rpc_addr = address
_rpc_executor.is_rpc = True
return _rpc_executor
return _rpc_wrapper | [
"Decorator to denote that a function implements an RPC with the given ID and address.\n\n The underlying function should be a member function that will take\n individual parameters after the RPC payload has been decoded according\n to arg_format.\n\n Arguments to the function are decoded from the 20 byte RPC argument payload according\n to arg_format, which should be a format string that can be passed to struct.unpack.\n\n Similarly, the function being decorated should return an iterable of results that\n will be encoded into a 20 byte response buffer by struct.pack using resp_format as\n the format string.\n\n The RPC will respond as if it were implemented by a tile at address ``address`` and\n the 16-bit RPC id ``rpc_id``.\n\n Args:\n address (int): The address of the mock tile this RPC is for\n rpc_id (int): The number of the RPC\n arg_format (string): a struct format code (without the <) for the\n parameter format for this RPC. This format code may include the final\n character V, which means that it expects a variable length bytearray.\n resp_format (string): an optional format code (without the <) for\n the response format for this RPC. This format code may include the final\n character V, which means that it expects a variable length bytearray.\n "
] |
Please provide a description of the function:def connect(self, client_id):
if self.client is not None:
raise InternalError("Connect called on an alreaded connected MQTT client")
client = AWSIoTPythonSDK.MQTTLib.AWSIoTMQTTClient(client_id, useWebsocket=self.websockets)
if self.websockets:
client.configureEndpoint(self.endpoint, 443)
client.configureCredentials(self.root)
if self.iam_session is None:
client.configureIAMCredentials(self.iam_key, self.iam_secret)
else:
client.configureIAMCredentials(self.iam_key, self.iam_secret, self.iam_session)
else:
client.configureEndpoint(self.endpoint, 8883)
client.configureCredentials(self.root, self.key, self.cert)
client.configureOfflinePublishQueueing(0)
try:
client.connect()
self.client = client
except operationError as exc:
raise InternalError("Could not connect to AWS IOT", message=exc.message)
self.sequencer.reset() | [
"Connect to AWS IOT with the given client_id\n\n Args:\n client_id (string): The client ID passed to the MQTT message broker\n "
] |
Please provide a description of the function:def disconnect(self):
if self.client is None:
return
try:
self.client.disconnect()
except operationError as exc:
raise InternalError("Could not disconnect from AWS IOT", message=exc.message) | [
"Disconnect from AWS IOT message broker\n "
] |
Please provide a description of the function:def publish(self, topic, message):
seq = self.sequencer.next_id(topic)
packet = {
'sequence': seq,
'message': message
}
# Need to encode bytes types for json.dumps
if 'key' in packet['message']:
packet['message']['key'] = packet['message']['key'].decode('utf8')
if 'payload' in packet['message']:
packet['message']['payload'] = packet['message']['payload'].decode('utf8')
if 'script' in packet['message']:
packet['message']['script'] = packet['message']['script'].decode('utf8')
if 'trace' in packet['message']:
packet['message']['trace'] = packet['message']['trace'].decode('utf8')
if 'report' in packet['message']:
packet['message']['report'] = packet['message']['report'].decode('utf8')
if 'received_time' in packet['message']:
packet['message']['received_time'] = packet['message']['received_time'].decode('utf8')
serialized_packet = json.dumps(packet)
try:
# Limit how much we log in case the message is very long
self._logger.debug("Publishing %s on topic %s", serialized_packet[:256], topic)
self.client.publish(topic, serialized_packet, 1)
except operationError as exc:
raise InternalError("Could not publish message", topic=topic, message=exc.message) | [
"Publish a json message to a topic with a type and a sequence number\n\n The actual message will be published as a JSON object:\n {\n \"sequence\": <incrementing id>,\n \"message\": message\n }\n\n Args:\n topic (string): The MQTT topic to publish in\n message (string, dict): The message to publish\n "
] |
Please provide a description of the function:def subscribe(self, topic, callback, ordered=True):
if '+' in topic or '#' in topic:
regex = re.compile(topic.replace('+', '[^/]+').replace('#', '.*'))
self.wildcard_queues.append((topic, regex, callback, ordered))
else:
self.queues[topic] = PacketQueue(0, callback, ordered)
try:
self.client.subscribe(topic, 1, self._on_receive)
except operationError as exc:
raise InternalError("Could not subscribe to topic", topic=topic, message=exc.message) | [
"Subscribe to future messages in the given topic\n\n The contents of topic should be in the format created by self.publish with a\n sequence number of message type encoded as a json string.\n\n Wildcard topics containing + and # are allowed and\n\n Args:\n topic (string): The MQTT topic to subscribe to\n callback (callable): The callback to call when a new mesage is received\n The signature of callback should be callback(sequence, topic, type, message)\n ordered (bool): Whether messages on this topic have a sequence number that must\n be checked and queued to ensure that packets are received in order\n "
] |
Please provide a description of the function:def reset_sequence(self, topic):
if topic in self.queues:
self.queues[topic].reset() | [
"Reset the expected sequence number for a topic\n\n If the topic is unknown, this does nothing. This behaviour is\n useful when you have wildcard topics that only create queues\n once they receive the first message matching the topic.\n\n Args:\n topic (string): The topic to reset the packet queue on\n "
] |
Please provide a description of the function:def unsubscribe(self, topic):
del self.queues[topic]
try:
self.client.unsubscribe(topic)
except operationError as exc:
raise InternalError("Could not unsubscribe from topic", topic=topic, message=exc.message) | [
"Unsubscribe from messages on a given topic\n\n Args:\n topic (string): The MQTT topic to unsubscribe from\n "
] |
Please provide a description of the function:def _on_receive(self, client, userdata, message):
topic = message.topic
encoded = message.payload
try:
packet = json.loads(encoded)
except ValueError:
self._logger.warn("Could not decode json packet: %s", encoded)
return
try:
seq = packet['sequence']
message_data = packet['message']
except KeyError:
self._logger.warn("Message received did not have required sequence and message keys: %s", packet)
return
# If we received a packet that does not fit into a queue, check our wildcard
# queues
if topic not in self.queues:
found = False
for _, regex, callback, ordered in self.wildcard_queues:
if regex.match(topic):
self.queues[topic] = PacketQueue(0, callback, ordered)
found = True
break
if not found:
self._logger.warn("Received message for unknown topic: %s", topic)
return
self.queues[topic].receive(seq, [seq, topic, message_data]) | [
"Callback called whenever we receive a message on a subscribed topic\n\n Args:\n client (string): The client id of the client receiving the message\n userdata (string): Any user data set with the underlying MQTT client\n message (object): The mesage with a topic and payload.\n "
] |
Please provide a description of the function:def run(self, resources):
hwman = resources['connection']
con = hwman.hwman.controller()
test_interface = con.test_interface()
try:
test_interface.synchronize_clock()
print('Time currently set at %s' % test_interface.current_time_str())
except:
raise ArgumentError('Error setting RTC time, check if controller actually has RTC or if iotile-support-lib-controller-3 is updated') | [
"Sets the RTC timestamp to UTC.\n\n Args:\n resources (dict): A dictionary containing the required resources that\n we needed access to in order to perform this step.\n "
] |
Please provide a description of the function:def add(self, command, *args):
cmd = Command(command, args)
self.commands.append(cmd) | [
"Add a command to this command file.\n\n Args:\n command (str): The command to add\n *args (str): The parameters to call the command with\n "
] |
Please provide a description of the function:def save(self, outpath):
with open(outpath, "w") as outfile:
outfile.write(self.dump()) | [
"Save this command file as an ascii file.\n\n Agrs:\n outpath (str): The output path to save.\n "
] |
Please provide a description of the function:def dump(self):
out = []
out.append(self.filetype)
out.append("Format: {}".format(self.version))
out.append("Type: ASCII")
out.append("")
for cmd in self.commands:
out.append(self.encode(cmd))
return "\n".join(out) + "\n" | [
"Dump all commands in this object to a string.\n\n Returns:\n str: An encoded list of commands separated by\n \\n characters suitable for saving to a file.\n "
] |
Please provide a description of the function:def FromString(cls, indata):
lines = [x.strip() for x in indata.split("\n") if not x.startswith('#') and not x.strip() == ""]
if len(lines) < 3:
raise DataError("Invalid CommandFile string that did not contain 3 header lines", lines=lines)
fmt_line, version_line, ascii_line = lines[:3]
if not version_line.startswith("Format: "):
raise DataError("Invalid format version that did not start with 'Format: '", line=version_line)
version = version_line[8:]
if ascii_line != "Type: ASCII":
raise DataError("Unknown file type line (expected Type: ASCII)", line=ascii_line)
cmds = [cls.decode(x) for x in lines[3:]]
return CommandFile(fmt_line, version, cmds) | [
"Load a CommandFile from a string.\n\n The string should be produced from a previous call to\n encode.\n\n Args:\n indata (str): The encoded input data.\n\n Returns:\n CommandFile: The decoded CommandFile object.\n "
] |
Please provide a description of the function:def FromFile(cls, inpath):
with open(inpath, "r") as infile:
indata = infile.read()
return cls.FromString(indata) | [
"Load a CommandFile from a path.\n\n Args:\n inpath (str): The path to the file to load\n\n Returns:\n CommandFile: The decoded CommandFile object.\n "
] |
Please provide a description of the function:def encode(cls, command):
args = []
for arg in command.args:
if not isinstance(arg, str):
arg = str(arg)
if "," in arg or arg.startswith(" ") or arg.endswith(" ") or arg.startswith("hex:"):
arg = "hex:{}".format(hexlify(arg.encode('utf-8')).decode('utf-8'))
args.append(arg)
argstr = ""
if len(args) > 0:
argstr = " {" + ",".join(args) + "}"
return command.name + argstr | [
"Encode a command as an unambiguous string.\n\n Args:\n command (Command): The command to encode.\n\n Returns:\n str: The encoded command\n "
] |
Please provide a description of the function:def decode(cls, command_str):
name, _, arg = command_str.partition(" ")
args = []
if len(arg) > 0:
if arg[0] != '{' or arg[-1] != '}':
raise DataError("Invalid command, argument is not contained in { and }", arg=arg, cmd=name)
arg = arg[1:-1]
args = arg.split(",")
proc = []
for arg in args:
if arg.startswith("hex:"):
arg = unhexlify(arg[4:]).decode('utf-8')
proc.append(arg)
return Command(name, proc) | [
"Decode a string encoded command back into a Command object.\n\n Args:\n command_str (str): The encoded command string output from a\n previous call to encode.\n\n Returns:\n Command: The decoded Command object.\n "
] |
Please provide a description of the function:def receive(self, sequence, args):
# If we are told to ignore sequence numbers, just pass the packet on
if not self._reorder:
self._callback(*args)
return
# If this packet is in the past, drop it
if self._next_expected is not None and sequence < self._next_expected:
print("Dropping out of order packet, seq=%d" % sequence)
return
self._out_of_order.append((sequence, args))
self._out_of_order.sort(key=lambda x: x[0])
# If we have received packets, attempt to process them in order
while len(self._out_of_order) > 0:
seq, args = self._out_of_order[0]
if self._next_expected is not None and seq != self._next_expected:
return
self._callback(*args)
self._out_of_order.pop(0)
self._next_expected = seq+1 | [
"Receive one packet\n\n If the sequence number is one we've already seen before, it is dropped.\n\n If it is not the next expected sequence number, it is put into the\n _out_of_order queue to be processed once the holes in sequence number\n are filled in.\n\n Args:\n sequence (int): The sequence number of the received packet\n args (list): The list of packet contents that will be passed to callback\n as callback(*args)\n "
] |
Please provide a description of the function:def set_vars(env):
desired = env.get('MWCW_VERSION', '')
# return right away if the variables are already set
if isinstance(desired, MWVersion):
return 1
elif desired is None:
return 0
versions = find_versions()
version = None
if desired:
for v in versions:
if str(v) == desired:
version = v
elif versions:
version = versions[-1]
env['MWCW_VERSIONS'] = versions
env['MWCW_VERSION'] = version
if version is None:
return 0
env.PrependENVPath('PATH', version.clpath)
env.PrependENVPath('PATH', version.dllpath)
ENV = env['ENV']
ENV['CWFolder'] = version.path
ENV['LM_LICENSE_FILE'] = version.license
plus = lambda x: '+%s' % x
ENV['MWCIncludes'] = os.pathsep.join(map(plus, version.includes))
ENV['MWLibraries'] = os.pathsep.join(map(plus, version.libs))
return 1 | [
"Set MWCW_VERSION, MWCW_VERSIONS, and some codewarrior environment vars\n\n MWCW_VERSIONS is set to a list of objects representing installed versions\n\n MWCW_VERSION is set to the version object that will be used for building.\n MWCW_VERSION can be set to a string during Environment\n construction to influence which version is chosen, otherwise\n the latest one from MWCW_VERSIONS is used.\n\n Returns true if at least one version is found, false otherwise\n "
] |
Please provide a description of the function:def find_versions():
versions = []
### This function finds CodeWarrior by reading from the registry on
### Windows. Some other method needs to be implemented for other
### platforms, maybe something that calls env.WhereIs('mwcc')
if SCons.Util.can_read_reg:
try:
HLM = SCons.Util.HKEY_LOCAL_MACHINE
product = 'SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions'
product_key = SCons.Util.RegOpenKeyEx(HLM, product)
i = 0
while True:
name = product + '\\' + SCons.Util.RegEnumKey(product_key, i)
name_key = SCons.Util.RegOpenKeyEx(HLM, name)
try:
version = SCons.Util.RegQueryValueEx(name_key, 'VERSION')
path = SCons.Util.RegQueryValueEx(name_key, 'PATH')
mwv = MWVersion(version[0], path[0], 'Win32-X86')
versions.append(mwv)
except SCons.Util.RegError:
pass
i = i + 1
except SCons.Util.RegError:
pass
return versions | [
"Return a list of MWVersion objects representing installed versions"
] |
Please provide a description of the function:def generate(env):
import SCons.Defaults
import SCons.Tool
set_vars(env)
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CXXAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction)
env['CCCOMFLAGS'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -nolink -o $TARGET $SOURCES'
env['CC'] = 'mwcc'
env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CCCOMFLAGS'
env['CXX'] = 'mwcc'
env['CXXCOM'] = '$CXX $CXXFLAGS $CCCOMFLAGS'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = '$CCFLAGS'
env['SHCFLAGS'] = '$CFLAGS'
env['SHCCCOM'] = '$SHCC $SHCFLAGS $SHCCFLAGS $CCCOMFLAGS'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = '$CXXFLAGS'
env['SHCXXCOM'] = '$SHCXX $SHCXXFLAGS $CCCOMFLAGS'
env['CFILESUFFIX'] = '.c'
env['CXXFILESUFFIX'] = '.cpp'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = '' | [
"Add Builders and construction variables for the mwcc to an Environment."
] |
Please provide a description of the function:def run(self, resources):
if not resources['connection']._port.startswith('jlink'):
raise ArgumentError("FlashBoardStep is currently only possible through jlink", invalid_port=args['port'])
hwman = resources['connection']
debug = hwman.hwman.debug(self._debug_string)
debug.flash(self._file) | [
"Runs the flash step\n\n Args:\n resources (dict): A dictionary containing the required resources that\n we needed access to in order to perform this step.\n "
] |
Please provide a description of the function:def copyto_emitter(target, source, env):
n_target = []
for t in target:
n_target = n_target + [t.File( str( s ) ) for s in source]
return (n_target, source) | [
" changes the path of the source to be under the target (which\n are assumed to be directories.\n "
] |
Please provide a description of the function:def getPharLapPath():
if not SCons.Util.can_read_reg:
raise SCons.Errors.InternalError("No Windows registry module was found")
try:
k=SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
'SOFTWARE\\Pharlap\\ETS')
val, type = SCons.Util.RegQueryValueEx(k, 'BaseDir')
# The following is a hack...there is (not surprisingly)
# an odd issue in the Phar Lap plug in that inserts
# a bunch of junk data after the phar lap path in the
# registry. We must trim it.
idx=val.find('\0')
if idx >= 0:
val = val[:idx]
return os.path.normpath(val)
except SCons.Util.RegError:
raise SCons.Errors.UserError("Cannot find Phar Lap ETS path in the registry. Is it installed properly?") | [
"Reads the registry to find the installed path of the Phar Lap ETS\n development kit.\n\n Raises UserError if no installed version of Phar Lap can\n be found."
] |
Please provide a description of the function:def getPharLapVersion():
include_path = os.path.join(getPharLapPath(), os.path.normpath("include/embkern.h"))
if not os.path.exists(include_path):
raise SCons.Errors.UserError("Cannot find embkern.h in ETS include directory.\nIs Phar Lap ETS installed properly?")
mo = REGEX_ETS_VER.search(open(include_path, 'r').read())
if mo:
return int(mo.group(1))
# Default return for Phar Lap 9.1
return 910 | [
"Returns the version of the installed ETS Tool Suite as a\n decimal number. This version comes from the ETS_VER #define in\n the embkern.h header. For example, '#define ETS_VER 1010' (which\n is what Phar Lap 10.1 defines) would cause this method to return\n 1010. Phar Lap 9.1 does not have such a #define, but this method\n will return 910 as a default.\n\n Raises UserError if no installed version of Phar Lap can\n be found."
] |
Please provide a description of the function:def addPharLapPaths(env):
ph_path = getPharLapPath()
try:
env_dict = env['ENV']
except KeyError:
env_dict = {}
env['ENV'] = env_dict
SCons.Util.AddPathIfNotExists(env_dict, 'PATH',
os.path.join(ph_path, 'bin'))
SCons.Util.AddPathIfNotExists(env_dict, 'INCLUDE',
os.path.join(ph_path, 'include'))
SCons.Util.AddPathIfNotExists(env_dict, 'LIB',
os.path.join(ph_path, 'lib'))
SCons.Util.AddPathIfNotExists(env_dict, 'LIB',
os.path.join(ph_path, os.path.normpath('lib/vclib')))
env['PHARLAP_PATH'] = getPharLapPath()
env['PHARLAP_VERSION'] = str(getPharLapVersion()) | [
"This function adds the path to the Phar Lap binaries, includes,\n and libraries, if they are not already there."
] |
Please provide a description of the function:def _update_or_init_po_files(target, source, env):
import SCons.Action
from SCons.Tool.GettextCommon import _init_po_files
for tgt in target:
if tgt.rexists():
action = SCons.Action.Action('$MSGMERGECOM', '$MSGMERGECOMSTR')
else:
action = _init_po_files
status = action([tgt], source, env)
if status : return status
return 0 | [
" Action function for `POUpdate` builder "
] |
Please provide a description of the function:def _POUpdateBuilder(env, **kw):
import SCons.Action
from SCons.Tool.GettextCommon import _POFileBuilder
action = SCons.Action.Action(_update_or_init_po_files, None)
return _POFileBuilder(env, action=action, target_alias='$POUPDATE_ALIAS') | [
" Create an object of `POUpdate` builder "
] |
Please provide a description of the function:def _POUpdateBuilderWrapper(env, target=None, source=_null, **kw):
if source is _null:
if 'POTDOMAIN' in kw:
domain = kw['POTDOMAIN']
elif 'POTDOMAIN' in env and env['POTDOMAIN']:
domain = env['POTDOMAIN']
else:
domain = 'messages'
source = [ domain ] # NOTE: Suffix shall be appended automatically
return env._POUpdateBuilder(target, source, **kw) | [
" Wrapper for `POUpdate` builder - make user's life easier "
] |
Please provide a description of the function:def generate(env,**kw):
from SCons.Tool.GettextCommon import _detect_msgmerge
try:
env['MSGMERGE'] = _detect_msgmerge(env)
except:
env['MSGMERGE'] = 'msgmerge'
env.SetDefault(
POTSUFFIX = ['.pot'],
POSUFFIX = ['.po'],
MSGMERGECOM = '$MSGMERGE $MSGMERGEFLAGS --update $TARGET $SOURCE',
MSGMERGECOMSTR = '',
MSGMERGEFLAGS = [ ],
POUPDATE_ALIAS = 'po-update'
)
env.Append(BUILDERS = { '_POUpdateBuilder':_POUpdateBuilder(env) })
env.AddMethod(_POUpdateBuilderWrapper, 'POUpdate')
env.AlwaysBuild(env.Alias('$POUPDATE_ALIAS')) | [
" Generate the `xgettext` tool "
] |
Please provide a description of the function:def _create_filter(self):
self._product_filter = {}
for chip in itertools.chain(iter(self._family.targets(self._tile.short_name)),
iter([self._family.platform_independent_target()])):
for key, prods in chip.property('depends', {}).items():
name, _, _ = key.partition(',')
for prod in prods:
if prod not in self._product_filter:
self._product_filter[prod] = set()
self._product_filter[prod].add(name) | [
"Create a filter of all of the dependency products that we have selected."
] |
Please provide a description of the function:def _create_product_map(self):
self._product_map = {}
for dep in self._tile.dependencies:
try:
dep_tile = IOTile(os.path.join('build', 'deps', dep['unique_id']))
except (ArgumentError, EnvironmentError):
raise BuildError("Could not find required dependency", name=dep['name'])
self._add_products(dep_tile)
self._add_products(self._tile, show_all=True) | [
"Create a map of all products produced by this or a dependency."
] |
Please provide a description of the function:def _add_products(self, tile, show_all=False):
products = tile.products
unique_id = tile.unique_id
base_path = tile.output_folder
for prod_path, prod_type in products.items():
# We need to handle include_directories and tilebus_definitions
# specially since those are stored reversed in module_settings.json
# for historical reasons. Currently we don't support resolving
# tilebus_definitions or include_directories in ProductResolver
if prod_path == 'tilebus_definitions' or prod_path == 'include_directories':
continue
if prod_type in self.IGNORED_PRODUCTS:
continue
prod_base = os.path.basename(prod_path)
if prod_type not in self._product_map:
self._product_map[prod_type] = {}
prod_map = self._product_map[prod_type]
if prod_base not in prod_map:
prod_map[prod_base] = []
full_path = os.path.normpath(os.path.join(base_path, prod_path))
info = ProductInfo(prod_base, full_path, unique_id, not show_all and prod_base not in self._product_filter)
prod_map[prod_base].append(info) | [
"Add all products from a tile into our product map."
] |
Please provide a description of the function:def find_all(self, product_type, short_name, include_hidden=False):
all_prods = []
# If product_type is not return products of all types
if product_type is None:
for prod_dict in self._product_map.values():
all_prods.extend([prod for prod in prod_dict.get(short_name, []) if include_hidden or not prod.hidden])
return all_prods
all_prods = self._product_map.get(product_type, {})
return [prod for prod in all_prods.get(short_name, []) if include_hidden or not prod.hidden] | [
"Find all providers of a given product by its short name.\n\n This function will return all providers of a given product. If you\n want to ensure that a product's name is unique among all dependencies,\n you should use find_unique.\n\n Args:\n product_type (str): The type of product that we are looking for, like\n firmware_image, library etc.\n short_name (str): The short name of the product that we wish to find,\n usually its os.path.basename()\n include_hidden (bool): Return products that are hidden and not selected\n as visible in the depends section of this tile's module settings.\n This defaults to False.\n\n Returns:\n list of ProductInfo: A list of all of the matching products. If no matching\n products are found, an empty list is returned. If you want to raise\n a BuildError in that case use find_unique.\n "
] |
Please provide a description of the function:def find_unique(self, product_type, short_name, include_hidden=False):
prods = self.find_all(product_type, short_name, include_hidden)
if len(prods) == 0:
raise BuildError("Could not find product by name in find_unique", name=short_name, type=product_type)
if len(prods) > 1:
raise BuildError("Multiple providers of the same product in find_unique", name=short_name, type=product_type, products=prods)
if self._tracking:
self._resolved_products.append(prods[0])
return prods[0] | [
"Find the unique provider of a given product by its short name.\n\n This function will ensure that the product is only provided by exactly\n one tile (either this tile or one of its dependencies and raise a\n BuildError if not.\n\n Args:\n product_type (str): The type of product that we are looking for, like\n firmware_image, library etc.\n short_name (str): The short name of the product that we wish to find,\n usually its os.path.basename()\n include_hidden (bool): Return products that are hidden and not selected\n as visible in the depends section of this tile's module settings.\n This defaults to False.\n\n Returns:\n ProductInfo: The information of the one unique provider of this product.\n "
] |
Please provide a description of the function:def build_parser():
parser = argparse.ArgumentParser(description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-o', '--output', help="The output file to save. If multiple files are generated "
"this is the output prefix for them all.")
parser.add_argument('-f', '--format', default="json", choices=['c_files', 'command_map_c', 'command_map_h',
'config_map_c', 'config_map_h', 'json'],
type=str, help=u"the output format for the compiled result.")
parser.add_argument('bus_definition', nargs="+", help="One or more tilebus definition files to compile")
return parser | [
"Create command line argument parser."
] |
Please provide a description of the function:def main(raw_args=None):
multifile_choices = frozenset(['c_files'])
if raw_args is None:
raw_args = sys.argv[1:]
parser = build_parser()
args = parser.parse_args(raw_args)
if args.output is None and args.format in multifile_choices:
print("You must specify an output file with -o, --output when "
"using a format that produces multiple files (-f %s)" % args.format)
return 1
desc = TBDescriptor(args.bus_definition)
if args.format == 'json':
print("JSON output is not yet supported")
return 1
block = desc.get_block()
template_map = {
'command_map_c': 'command_map_c.c.tpl',
'command_map_h': 'command_map_c.h.tpl',
'config_map_c': 'config_variables_c.c.tpl',
'config_map_h': 'config_variables_c.h.tpl'
}
template_name = template_map.get(args.format)
data = block.render_template(template_name)
print(data)
return 0 | [
"Run the iotile-tbcompile script.\n\n Args:\n raw_args (list): Optional list of command line arguments. If not\n passed these are pulled from sys.argv.\n "
] |
Please provide a description of the function:def generate(env):
"Add RPCGEN Builders and construction variables for an Environment."
client = Builder(action=rpcgen_client, suffix='_clnt.c', src_suffix='.x')
header = Builder(action=rpcgen_header, suffix='.h', src_suffix='.x')
service = Builder(action=rpcgen_service, suffix='_svc.c', src_suffix='.x')
xdr = Builder(action=rpcgen_xdr, suffix='_xdr.c', src_suffix='.x')
env.Append(BUILDERS={'RPCGenClient' : client,
'RPCGenHeader' : header,
'RPCGenService' : service,
'RPCGenXDR' : xdr})
env['RPCGEN'] = 'rpcgen'
env['RPCGENFLAGS'] = SCons.Util.CLVar('')
env['RPCGENCLIENTFLAGS'] = SCons.Util.CLVar('')
env['RPCGENHEADERFLAGS'] = SCons.Util.CLVar('')
env['RPCGENSERVICEFLAGS'] = SCons.Util.CLVar('')
env['RPCGENXDRFLAGS'] = SCons.Util.CLVar('') | [] |
Please provide a description of the function:def build_parser():
parser = argparse.ArgumentParser("Release packages to pypi")
parser.add_argument('--check', '-c', action="store_true", help="Do a dry run without uploading")
parser.add_argument('component', help="The component to release as component-version")
return parser | [
"Build argument parsers."
] |
Please provide a description of the function:def send_slack_message(message):
if 'SLACK_WEB_HOOK' not in os.environ:
raise EnvironmentError("Could not find SLACK_WEB_HOOK environment variable")
webhook = os.environ['SLACK_WEB_HOOK']
r = requests.post(webhook, json={'text':message, 'username': 'Release Bot'})
if r.status_code != 200:
raise RuntimeError("Could not post message to slack channel") | [
"Send a message to the slack channel #coretools"
] |
Please provide a description of the function:def get_release_component(comp):
name, vers = comp.split("-")
if name not in comp_names:
print("Known components:")
for comp in comp_names:
print("- %s" % comp)
raise EnvironmentError("Unknown release component name '%s'" % name)
return name, vers | [
"Split the argument passed on the command line into a component name and expected version"
] |
Please provide a description of the function:def check_compatibility(name):
comp = comp_names[name]
if sys.version_info.major < 3 and comp.compat == "python3":
return False
if sys.version_info.major >= 3 and comp.compat != "python3":
return False
return True | [
"Verify if we can release this component on the running interpreter.\n\n All components are released from python 2.7 by default unless they specify\n that they are python 3 only, in which case they are released from python 3.6\n "
] |
Please provide a description of the function:def check_version(component, expected_version):
comp = comp_names[component]
compath = os.path.realpath(os.path.abspath(comp.path))
sys.path.insert(0, compath)
import version
if version.version != expected_version:
raise EnvironmentError("Version mismatch during release, expected={}, found={}".format(expected_version, version.version)) | [
"Make sure the package version in setuptools matches what we expect it to be"
] |
Please provide a description of the function:def build_component(component):
comp = comp_names[component]
curr = os.getcwd()
os.chdir(comp.path)
args = ['-q', 'clean', 'sdist', 'bdist_wheel']
if comp.compat == 'universal':
args.append('--universal')
try:
setuptools.sandbox.run_setup('setup.py', args)
finally:
os.chdir(curr) | [
"Create an sdist and a wheel for the desired component"
] |
Please provide a description of the function:def upload_component(component):
if 'PYPI_USER' in os.environ and 'PYPI_PASS' in os.environ:
pypi_user = os.environ['PYPI_USER']
pypi_pass = os.environ['PYPI_PASS']
else:
pypi_user = None
pypi_pass = None
print("No PYPI user information in environment")
comp = comp_names[component]
distpath = os.path.join(comp.path, 'dist', '*')
distpath = os.path.realpath(os.path.abspath(distpath))
dists = glob.glob(distpath)
if pypi_user is None:
args = ['twine', 'upload', distpath]
else:
args = ['twine', 'upload', '-u', pypi_user, '-p', pypi_pass, distpath]
# Invoke upload this way since subprocess call of twine cli has cross platform issues
upload(dists, 'pypi', False, None, pypi_user, pypi_pass, None, None, '~/.pypirc', False, None, None, None) | [
"Upload a given component to pypi\n\n The pypi username and password must either be specified in a ~/.pypirc\n file or in environment variables PYPI_USER and PYPI_PASS\n "
] |
Please provide a description of the function:def uuid_to_slug(uuid):
if not isinstance(uuid, int):
raise ArgumentError("Invalid id that is not an integer", id=uuid)
if uuid < 0 or uuid > 0x7fffffff:
# For now, limiting support to a signed integer (which on some platforms, can be 32bits)
raise ArgumentError("Integer should be a positive number and smaller than 0x7fffffff", id=uuid)
return '--'.join(['d', int64gid(uuid)]) | [
"\n Return IOTile Cloud compatible Device Slug\n\n :param uuid: UUID\n :return: string in the form of d--0000-0000-0000-0001\n "
] |
Please provide a description of the function:def package(env, target, source, PACKAGEROOT, NAME, VERSION, DESCRIPTION,
SUMMARY, X_IPK_PRIORITY, X_IPK_SECTION, SOURCE_URL,
X_IPK_MAINTAINER, X_IPK_DEPENDS, **kw):
SCons.Tool.Tool('ipkg').generate(env)
# setup the Ipkg builder
bld = env['BUILDERS']['Ipkg']
target, source = stripinstallbuilder(target, source, env)
target, source = putintopackageroot(target, source, env, PACKAGEROOT)
# This should be overrideable from the construction environment,
# which it is by using ARCHITECTURE=.
# Guessing based on what os.uname() returns at least allows it
# to work for both i386 and x86_64 Linux systems.
archmap = {
'i686' : 'i386',
'i586' : 'i386',
'i486' : 'i386',
}
buildarchitecture = os.uname()[4]
buildarchitecture = archmap.get(buildarchitecture, buildarchitecture)
if 'ARCHITECTURE' in kw:
buildarchitecture = kw['ARCHITECTURE']
# setup the kw to contain the mandatory arguments to this function.
# do this before calling any builder or setup function
loc=locals()
del loc['kw']
kw.update(loc)
del kw['source'], kw['target'], kw['env']
# generate the specfile
specfile = gen_ipk_dir(PACKAGEROOT, source, env, kw)
# override the default target.
if str(target[0])=="%s-%s"%(NAME, VERSION):
target=[ "%s_%s_%s.ipk"%(NAME, VERSION, buildarchitecture) ]
# now apply the Ipkg builder
return bld(env, target, specfile, **kw) | [
" This function prepares the packageroot directory for packaging with the\n ipkg builder.\n "
] |
Please provide a description of the function:def build_specfiles(source, target, env):
#
# At first we care for the CONTROL/control file, which is the main file for ipk.
#
# For this we need to open multiple files in random order, so we store into
# a dict so they can be easily accessed.
#
#
opened_files={}
def open_file(needle, haystack):
try:
return opened_files[needle]
except KeyError:
file=filter(lambda x: x.get_path().rfind(needle)!=-1, haystack)[0]
opened_files[needle]=open(file.get_abspath(), 'w')
return opened_files[needle]
control_file=open_file('control', target)
if 'X_IPK_DESCRIPTION' not in env:
env['X_IPK_DESCRIPTION']="%s\n %s"%(env['SUMMARY'],
env['DESCRIPTION'].replace('\n', '\n '))
content =
control_file.write(env.subst(content))
#
# now handle the various other files, which purpose it is to set post-,
# pre-scripts and mark files as config files.
#
# We do so by filtering the source files for files which are marked with
# the "config" tag and afterwards we do the same for x_ipk_postrm,
# x_ipk_prerm, x_ipk_postinst and x_ipk_preinst tags.
#
# The first one will write the name of the file into the file
# CONTROL/configfiles, the latter add the content of the x_ipk_* variable
# into the same named file.
#
for f in [x for x in source if 'PACKAGING_CONFIG' in dir(x)]:
config=open_file('conffiles')
config.write(f.PACKAGING_INSTALL_LOCATION)
config.write('\n')
for str in 'POSTRM PRERM POSTINST PREINST'.split():
name="PACKAGING_X_IPK_%s"%str
for f in [x for x in source if name in dir(x)]:
file=open_file(name)
file.write(env[str])
#
# close all opened files
for f in list(opened_files.values()):
f.close()
# call a user specified function
if 'CHANGE_SPECFILE' in env:
content += env['CHANGE_SPECFILE'](target)
return 0 | [
" Filter the targets for the needed files and use the variables in env\n to create the specfile.\n ",
"\nPackage: $NAME\nVersion: $VERSION\nPriority: $X_IPK_PRIORITY\nSection: $X_IPK_SECTION\nSource: $SOURCE_URL\nArchitecture: $ARCHITECTURE\nMaintainer: $X_IPK_MAINTAINER\nDepends: $X_IPK_DEPENDS\nDescription: $X_IPK_DESCRIPTION\n"
] |
Please provide a description of the function:def generate(env):
java_javah = SCons.Tool.CreateJavaHBuilder(env)
java_javah.emitter = emit_java_headers
env['_JAVAHOUTFLAG'] = JavaHOutFlagGenerator
env['JAVAH'] = 'javah'
env['JAVAHFLAGS'] = SCons.Util.CLVar('')
env['_JAVAHCLASSPATH'] = getJavaHClassPath
env['JAVAHCOM'] = '$JAVAH $JAVAHFLAGS $_JAVAHOUTFLAG $_JAVAHCLASSPATH ${SOURCES.attributes.java_classname}'
env['JAVACLASSSUFFIX'] = '.class' | [
"Add Builders and construction variables for javah to an Environment."
] |
Please provide a description of the function:def dump(self):
return {
u'storage_data': [x.asdict() for x in self.storage_data],
u'streaming_data': [x.asdict() for x in self.streaming_data]
} | [
"Serialize the state of this InMemoryStorageEngine to a dict.\n\n Returns:\n dict: The serialized data.\n "
] |
Please provide a description of the function:def restore(self, state):
storage_data = state.get(u'storage_data', [])
streaming_data = state.get(u'streaming_data', [])
if len(storage_data) > self.storage_length or len(streaming_data) > self.streaming_length:
raise ArgumentError("Cannot restore InMemoryStorageEngine, too many readings",
storage_size=len(storage_data), storage_max=self.storage_length,
streaming_size=len(streaming_data), streaming_max=self.streaming_length)
self.storage_data = [IOTileReading.FromDict(x) for x in storage_data]
self.streaming_data = [IOTileReading.FromDict(x) for x in streaming_data] | [
"Restore the state of this InMemoryStorageEngine from a dict."
] |
Please provide a description of the function:def count_matching(self, selector, offset=0):
if selector.output:
data = self.streaming_data
elif selector.buffered:
data = self.storage_data
else:
raise ArgumentError("You can only pass a buffered selector to count_matching", selector=selector)
count = 0
for i in range(offset, len(data)):
reading = data[i]
stream = DataStream.FromEncoded(reading.stream)
if selector.matches(stream):
count += 1
return count | [
"Count the number of readings matching selector.\n\n Args:\n selector (DataStreamSelector): The selector that we want to\n count matching readings for.\n offset (int): The starting offset that we should begin counting at.\n\n Returns:\n int: The number of matching readings.\n "
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.