Code
stringlengths 103
85.9k
| Summary
sequencelengths 0
94
|
---|---|
Please provide a description of the function:def fromdict(self, dikt):
s = dikt.copy()
start_addr = s.get('start_addr')
if start_addr is not None:
del s['start_addr']
for k in dict_keys_g(s):
if type(k) not in IntTypes or k < 0:
raise ValueError('Source dictionary should have only int keys')
self._buf.update(s)
if start_addr is not None:
self.start_addr = start_addr | [
"Load data from dictionary. Dictionary should contain int keys\n representing addresses. Values should be the data to be stored in\n those addresses in unsigned char form (i.e. not strings).\n The dictionary may contain the key, ``start_addr``\n to indicate the starting address of the data as described in README.\n\n The contents of the dict will be merged with this object and will\n overwrite any conflicts. This function is not necessary if the\n object was initialized with source specified.\n "
] |
Please provide a description of the function:def frombytes(self, bytes, offset=0):
for b in bytes:
self._buf[offset] = b
offset += 1 | [
"Load data from array or list of bytes.\n Similar to loadbin() method but works directly with iterable bytes.\n "
] |
Please provide a description of the function:def _get_start_end(self, start=None, end=None, size=None):
if (start,end) == (None,None) and self._buf == {}:
raise EmptyIntelHexError
if size is not None:
if None not in (start, end):
raise ValueError("tobinarray: you can't use start,end and size"
" arguments in the same time")
if (start, end) == (None, None):
start = self.minaddr()
if start is not None:
end = start + size - 1
else:
start = end - size + 1
if start < 0:
raise ValueError("tobinarray: invalid size (%d) "
"for given end address (%d)" % (size,end))
else:
if start is None:
start = self.minaddr()
if end is None:
end = self.maxaddr()
if start > end:
start, end = end, start
return start, end | [
"Return default values for start and end if they are None.\n If this IntelHex object is empty then it's error to\n invoke this method with both start and end as None.\n "
] |
Please provide a description of the function:def tobinarray(self, start=None, end=None, pad=_DEPRECATED, size=None):
''' Convert this object to binary form as array. If start and end
unspecified, they will be inferred from the data.
@param start start address of output bytes.
@param end end address of output bytes (inclusive).
@param pad [DEPRECATED PARAMETER, please use self.padding instead]
fill empty spaces with this value
(if pad is None then this method uses self.padding).
@param size size of the block, used with start or end parameter.
@return array of unsigned char data.
'''
if not isinstance(pad, _DeprecatedParam):
print ("IntelHex.tobinarray: 'pad' parameter is deprecated.")
if pad is not None:
print ("Please, use IntelHex.padding attribute instead.")
else:
print ("Please, don't pass it explicitly.")
print ("Use syntax like this: ih.tobinarray(start=xxx, end=yyy, size=zzz)")
else:
pad = None
return self._tobinarray_really(start, end, pad, size) | [] |
Please provide a description of the function:def _tobinarray_really(self, start, end, pad, size):
if pad is None:
pad = self.padding
bin = array('B')
if self._buf == {} and None in (start, end):
return bin
if size is not None and size <= 0:
raise ValueError("tobinarray: wrong value for size")
start, end = self._get_start_end(start, end, size)
for i in range_g(start, end+1):
bin.append(self._buf.get(i, pad))
return bin | [
"Return binary array."
] |
Please provide a description of the function:def tobinstr(self, start=None, end=None, pad=_DEPRECATED, size=None):
''' Convert to binary form and return as binary string.
@param start start address of output bytes.
@param end end address of output bytes (inclusive).
@param pad [DEPRECATED PARAMETER, please use self.padding instead]
fill empty spaces with this value
(if pad is None then this method uses self.padding).
@param size size of the block, used with start or end parameter.
@return bytes string of binary data.
'''
if not isinstance(pad, _DeprecatedParam):
print ("IntelHex.tobinstr: 'pad' parameter is deprecated.")
if pad is not None:
print ("Please, use IntelHex.padding attribute instead.")
else:
print ("Please, don't pass it explicitly.")
print ("Use syntax like this: ih.tobinstr(start=xxx, end=yyy, size=zzz)")
else:
pad = None
return self._tobinstr_really(start, end, pad, size) | [] |
Please provide a description of the function:def tobinfile(self, fobj, start=None, end=None, pad=_DEPRECATED, size=None):
'''Convert to binary and write to file.
@param fobj file name or file object for writing output bytes.
@param start start address of output bytes.
@param end end address of output bytes (inclusive).
@param pad [DEPRECATED PARAMETER, please use self.padding instead]
fill empty spaces with this value
(if pad is None then this method uses self.padding).
@param size size of the block, used with start or end parameter.
'''
if not isinstance(pad, _DeprecatedParam):
print ("IntelHex.tobinfile: 'pad' parameter is deprecated.")
if pad is not None:
print ("Please, use IntelHex.padding attribute instead.")
else:
print ("Please, don't pass it explicitly.")
print ("Use syntax like this: ih.tobinfile(start=xxx, end=yyy, size=zzz)")
else:
pad = None
if getattr(fobj, "write", None) is None:
fobj = open(fobj, "wb")
close_fd = True
else:
close_fd = False
fobj.write(self._tobinstr_really(start, end, pad, size))
if close_fd:
fobj.close() | [] |
Please provide a description of the function:def todict(self):
'''Convert to python dictionary.
@return dict suitable for initializing another IntelHex object.
'''
r = {}
r.update(self._buf)
if self.start_addr:
r['start_addr'] = self.start_addr
return r | [] |
Please provide a description of the function:def write_hex_file(self, f, write_start_addr=True, eolstyle='native', byte_count=16):
if byte_count > 255 or byte_count < 1:
raise ValueError("wrong byte_count value: %s" % byte_count)
fwrite = getattr(f, "write", None)
if fwrite:
fobj = f
fclose = None
else:
fobj = open(f, 'w')
fwrite = fobj.write
fclose = fobj.close
eol = IntelHex._get_eol_textfile(eolstyle, sys.platform)
# Translation table for uppercasing hex ascii string.
# timeit shows that using hexstr.translate(table)
# is faster than hexstr.upper():
# 0.452ms vs. 0.652ms (translate vs. upper)
if sys.version_info[0] >= 3:
# Python 3
table = bytes(range_l(256)).upper()
else:
# Python 2
table = ''.join(chr(i).upper() for i in range_g(256))
# start address record if any
if self.start_addr and write_start_addr:
keys = dict_keys(self.start_addr)
keys.sort()
bin = array('B', asbytes('\0'*9))
if keys == ['CS','IP']:
# Start Segment Address Record
bin[0] = 4 # reclen
bin[1] = 0 # offset msb
bin[2] = 0 # offset lsb
bin[3] = 3 # rectyp
cs = self.start_addr['CS']
bin[4] = (cs >> 8) & 0x0FF
bin[5] = cs & 0x0FF
ip = self.start_addr['IP']
bin[6] = (ip >> 8) & 0x0FF
bin[7] = ip & 0x0FF
bin[8] = (-sum(bin)) & 0x0FF # chksum
fwrite(':' +
asstr(hexlify(array_tobytes(bin)).translate(table)) +
eol)
elif keys == ['EIP']:
# Start Linear Address Record
bin[0] = 4 # reclen
bin[1] = 0 # offset msb
bin[2] = 0 # offset lsb
bin[3] = 5 # rectyp
eip = self.start_addr['EIP']
bin[4] = (eip >> 24) & 0x0FF
bin[5] = (eip >> 16) & 0x0FF
bin[6] = (eip >> 8) & 0x0FF
bin[7] = eip & 0x0FF
bin[8] = (-sum(bin)) & 0x0FF # chksum
fwrite(':' +
asstr(hexlify(array_tobytes(bin)).translate(table)) +
eol)
else:
if fclose:
fclose()
raise InvalidStartAddressValueError(start_addr=self.start_addr)
# data
addresses = dict_keys(self._buf)
addresses.sort()
addr_len = len(addresses)
if addr_len:
minaddr = addresses[0]
maxaddr = addresses[-1]
if maxaddr > 65535:
need_offset_record = True
else:
need_offset_record = False
high_ofs = 0
cur_addr = minaddr
cur_ix = 0
while cur_addr <= maxaddr:
if need_offset_record:
bin = array('B', asbytes('\0'*7))
bin[0] = 2 # reclen
bin[1] = 0 # offset msb
bin[2] = 0 # offset lsb
bin[3] = 4 # rectyp
high_ofs = int(cur_addr>>16)
b = divmod(high_ofs, 256)
bin[4] = b[0] # msb of high_ofs
bin[5] = b[1] # lsb of high_ofs
bin[6] = (-sum(bin)) & 0x0FF # chksum
fwrite(':' +
asstr(hexlify(array_tobytes(bin)).translate(table)) +
eol)
while True:
# produce one record
low_addr = cur_addr & 0x0FFFF
# chain_len off by 1
chain_len = min(byte_count-1, 65535-low_addr, maxaddr-cur_addr)
# search continuous chain
stop_addr = cur_addr + chain_len
if chain_len:
ix = bisect_right(addresses, stop_addr,
cur_ix,
min(cur_ix+chain_len+1, addr_len))
chain_len = ix - cur_ix # real chain_len
# there could be small holes in the chain
# but we will catch them by try-except later
# so for big continuous files we will work
# at maximum possible speed
else:
chain_len = 1 # real chain_len
bin = array('B', asbytes('\0'*(5+chain_len)))
b = divmod(low_addr, 256)
bin[1] = b[0] # msb of low_addr
bin[2] = b[1] # lsb of low_addr
bin[3] = 0 # rectype
try: # if there is small holes we'll catch them
for i in range_g(chain_len):
bin[4+i] = self._buf[cur_addr+i]
except KeyError:
# we catch a hole so we should shrink the chain
chain_len = i
bin = bin[:5+i]
bin[0] = chain_len
bin[4+chain_len] = (-sum(bin)) & 0x0FF # chksum
fwrite(':' +
asstr(hexlify(array_tobytes(bin)).translate(table)) +
eol)
# adjust cur_addr/cur_ix
cur_ix += chain_len
if cur_ix < addr_len:
cur_addr = addresses[cur_ix]
else:
cur_addr = maxaddr + 1
break
high_addr = int(cur_addr>>16)
if high_addr > high_ofs:
break
# end-of-file record
fwrite(":00000001FF"+eol)
if fclose:
fclose() | [
"Write data to file f in HEX format.\n\n @param f filename or file-like object for writing\n @param write_start_addr enable or disable writing start address\n record to file (enabled by default).\n If there is no start address in obj, nothing\n will be written regardless of this setting.\n @param eolstyle can be used to force CRLF line-endings\n for output file on different platforms.\n Supported eol styles: 'native', 'CRLF'.\n @param byte_count number of bytes in the data field\n "
] |
Please provide a description of the function:def tofile(self, fobj, format):
if format == 'hex':
self.write_hex_file(fobj)
elif format == 'bin':
self.tobinfile(fobj)
else:
raise ValueError('format should be either "hex" or "bin";'
' got %r instead' % format) | [
"Write data to hex or bin file. Preferred method over tobin or tohex.\n\n @param fobj file name or file-like object\n @param format file format (\"hex\" or \"bin\")\n "
] |
Please provide a description of the function:def gets(self, addr, length):
a = array('B', asbytes('\0'*length))
try:
for i in range_g(length):
a[i] = self._buf[addr+i]
except KeyError:
raise NotEnoughDataError(address=addr, length=length)
return array_tobytes(a) | [
"Get string of bytes from given address. If any entries are blank\n from addr through addr+length, a NotEnoughDataError exception will\n be raised. Padding is not used.\n "
] |
Please provide a description of the function:def puts(self, addr, s):
a = array('B', asbytes(s))
for i in range_g(len(a)):
self._buf[addr+i] = a[i] | [
"Put string of bytes at given address. Will overwrite any previous\n entries.\n "
] |
Please provide a description of the function:def getsz(self, addr):
i = 0
try:
while True:
if self._buf[addr+i] == 0:
break
i += 1
except KeyError:
raise NotEnoughDataError(msg=('Bad access at 0x%X: '
'not enough data to read zero-terminated string') % addr)
return self.gets(addr, i) | [
"Get zero-terminated bytes string from given address. Will raise\n NotEnoughDataError exception if a hole is encountered before a 0.\n "
] |
Please provide a description of the function:def putsz(self, addr, s):
self.puts(addr, s)
self._buf[addr+len(s)] = 0 | [
"Put bytes string in object at addr and append terminating zero at end."
] |
Please provide a description of the function:def dump(self, tofile=None, width=16, withpadding=False):
if not isinstance(width,int) or width < 1:
raise ValueError('width must be a positive integer.')
# The integer can be of float type - does not work with bit operations
width = int(width)
if tofile is None:
tofile = sys.stdout
# start addr possibly
if self.start_addr is not None:
cs = self.start_addr.get('CS')
ip = self.start_addr.get('IP')
eip = self.start_addr.get('EIP')
if eip is not None and cs is None and ip is None:
tofile.write('EIP = 0x%08X\n' % eip)
elif eip is None and cs is not None and ip is not None:
tofile.write('CS = 0x%04X, IP = 0x%04X\n' % (cs, ip))
else:
tofile.write('start_addr = %r\n' % start_addr)
# actual data
addresses = dict_keys(self._buf)
if addresses:
addresses.sort()
minaddr = addresses[0]
maxaddr = addresses[-1]
startaddr = (minaddr // width) * width
endaddr = ((maxaddr // width) + 1) * width
maxdigits = max(len(hex(endaddr)) - 2, 4) # Less 2 to exclude '0x'
templa = '%%0%dX' % maxdigits
rangewidth = range_l(width)
if withpadding:
pad = self.padding
else:
pad = None
for i in range_g(startaddr, endaddr, width):
tofile.write(templa % i)
tofile.write(' ')
s = []
for j in rangewidth:
x = self._buf.get(i+j, pad)
if x is not None:
tofile.write(' %02X' % x)
if 32 <= x < 127: # GNU less does not like 0x7F (128 decimal) so we'd better show it as dot
s.append(chr(x))
else:
s.append('.')
else:
tofile.write(' --')
s.append(' ')
tofile.write(' |' + ''.join(s) + '|\n') | [
"Dump object content to specified file object or to stdout if None.\n Format is a hexdump with some header information at the beginning,\n addresses on the left, and data on right.\n\n @param tofile file-like object to dump to\n @param width number of bytes per line (i.e. columns)\n @param withpadding print padding character instead of '--'\n @raise ValueError if width is not a positive integer\n "
] |
Please provide a description of the function:def merge(self, other, overlap='error'):
# check args
if not isinstance(other, IntelHex):
raise TypeError('other should be IntelHex object')
if other is self:
raise ValueError("Can't merge itself")
if overlap not in ('error', 'ignore', 'replace'):
raise ValueError("overlap argument should be either "
"'error', 'ignore' or 'replace'")
# merge data
this_buf = self._buf
other_buf = other._buf
for i in other_buf:
if i in this_buf:
if overlap == 'error':
raise AddressOverlapError(
'Data overlapped at address 0x%X' % i)
elif overlap == 'ignore':
continue
this_buf[i] = other_buf[i]
# merge start_addr
if self.start_addr != other.start_addr:
if self.start_addr is None: # set start addr from other
self.start_addr = other.start_addr
elif other.start_addr is None: # keep existing start addr
pass
else: # conflict
if overlap == 'error':
raise AddressOverlapError(
'Starting addresses are different')
elif overlap == 'replace':
self.start_addr = other.start_addr | [
"Merge content of other IntelHex object into current object (self).\n @param other other IntelHex object.\n @param overlap action on overlap of data or starting addr:\n - error: raising OverlapError;\n - ignore: ignore other data and keep current data\n in overlapping region;\n - replace: replace data with other data\n in overlapping region.\n\n @raise TypeError if other is not instance of IntelHex\n @raise ValueError if other is the same object as self\n (it can't merge itself)\n @raise ValueError if overlap argument has incorrect value\n @raise AddressOverlapError on overlapped data\n "
] |
Please provide a description of the function:def segments(self):
addresses = self.addresses()
if not addresses:
return []
elif len(addresses) == 1:
return([(addresses[0], addresses[0]+1)])
adjacent_differences = [(b - a) for (a, b) in zip(addresses[:-1], addresses[1:])]
breaks = [i for (i, x) in enumerate(adjacent_differences) if x > 1]
endings = [addresses[b] for b in breaks]
endings.append(addresses[-1])
beginings = [addresses[b+1] for b in breaks]
beginings.insert(0, addresses[0])
return [(a, b+1) for (a, b) in zip(beginings, endings)] | [
"Return a list of ordered tuple objects, representing contiguous occupied data addresses.\n Each tuple has a length of two and follows the semantics of the range and xrange objects.\n The second entry of the tuple is always an integer greater than the first entry.\n "
] |
Please provide a description of the function:def get_memory_size(self):
n = sys.getsizeof(self)
n += sys.getsizeof(self.padding)
n += total_size(self.start_addr)
n += total_size(self._buf)
n += sys.getsizeof(self._offset)
return n | [
"Returns the approximate memory footprint for data."
] |
Please provide a description of the function:def tobinarray(self, start=None, end=None, size=None):
'''Convert this object to binary form as array (of 2-bytes word data).
If start and end unspecified, they will be inferred from the data.
@param start start address of output data.
@param end end address of output data (inclusive).
@param size size of the block (number of words),
used with start or end parameter.
@return array of unsigned short (uint16_t) data.
'''
bin = array('H')
if self._buf == {} and None in (start, end):
return bin
if size is not None and size <= 0:
raise ValueError("tobinarray: wrong value for size")
start, end = self._get_start_end(start, end, size)
for addr in range_g(start, end+1):
bin.append(self[addr])
return bin | [] |
Please provide a description of the function:def _from_bytes(bytes):
assert len(bytes) >= 4
# calculate checksum
s = (-sum(bytes)) & 0x0FF
bin = array('B', bytes + [s])
return ':' + asstr(hexlify(array_tobytes(bin))).upper() | [
"Takes a list of bytes, computes the checksum, and outputs the entire\n record as a string. bytes should be the hex record without the colon\n or final checksum.\n\n @param bytes list of byte values so far to pack into record.\n @return String representation of one HEX record\n "
] |
Please provide a description of the function:def data(offset, bytes):
assert 0 <= offset < 65536
assert 0 < len(bytes) < 256
b = [len(bytes), (offset>>8)&0x0FF, offset&0x0FF, 0x00] + bytes
return Record._from_bytes(b) | [
"Return Data record. This constructs the full record, including\n the length information, the record type (0x00), the\n checksum, and the offset.\n\n @param offset load offset of first byte.\n @param bytes list of byte values to pack into record.\n\n @return String representation of one HEX record\n "
] |
Please provide a description of the function:def start_segment_address(cs, ip):
b = [4, 0, 0, 0x03, (cs>>8)&0x0FF, cs&0x0FF,
(ip>>8)&0x0FF, ip&0x0FF]
return Record._from_bytes(b) | [
"Return Start Segment Address Record.\n @param cs 16-bit value for CS register.\n @param ip 16-bit value for IP register.\n\n @return String representation of Intel Hex SSA record.\n "
] |
Please provide a description of the function:def start_linear_address(eip):
b = [4, 0, 0, 0x05, (eip>>24)&0x0FF, (eip>>16)&0x0FF,
(eip>>8)&0x0FF, eip&0x0FF]
return Record._from_bytes(b) | [
"Return Start Linear Address Record.\n @param eip 32-bit linear address for the EIP register.\n\n @return String representation of Intel Hex SLA record.\n "
] |
Please provide a description of the function:def create_release_settings_action(target, source, env):
with open(str(source[0]), "r") as fileobj:
settings = json.load(fileobj)
settings['release'] = True
settings['release_date'] = datetime.datetime.utcnow().isoformat()
settings['dependency_versions'] = {}
#Also insert the versions of every dependency that we used to build this component
for dep in env['TILE'].dependencies:
tile = IOTile(os.path.join('build', 'deps', dep['unique_id']))
settings['dependency_versions'][dep['unique_id']] = str(tile.parsed_version)
with open(str(target[0]), "w") as fileobj:
json.dump(settings, fileobj, indent=4) | [
"Copy module_settings.json and add release and build information\n "
] |
Please provide a description of the function:def copy_include_dirs(tile):
if 'products' not in tile.settings:
return
incdirs = tile.settings['products'].get('include_directories', [])
incdirs = map(lambda x: os.path.normpath(utilities.join_path(x)), incdirs)
incdirs = sorted(incdirs, key=lambda x: len(x))
seen_dirs = pygtrie.PrefixSet(factory=lambda: pygtrie.StringTrie(separator=os.path.sep))
env = Environment(tools=[])
# all include directories are relative to the firmware/src directory
outputbase = os.path.join('build', 'output', 'include')
inputbase = os.path.join('firmware', 'src')
for inc in incdirs:
if inc in seen_dirs:
continue
relinput = os.path.join(inputbase, inc)
finaldir = os.path.join(outputbase, inc)
for folder, subdirs, filenames in os.walk(relinput):
relfolder = os.path.relpath(folder, relinput)
for filename in filenames:
if filename.endswith(".h"):
infile = os.path.join(folder, filename)
outfile = os.path.join(finaldir, relfolder, filename)
env.Command([outfile], [infile], Copy("$TARGET", "$SOURCE"))
seen_dirs.add(inc) | [
"Copy all include directories that this tile defines as products in build/output/include\n "
] |
Please provide a description of the function:def copy_extra_files(tile):
env = Environment(tools=[])
outputbase = os.path.join('build', 'output')
for src, dest in tile.settings.get('copy_files', {}).items():
outputfile = os.path.join(outputbase, dest)
env.Command([outputfile], [src], Copy("$TARGET", "$SOURCE"))
resolver = ProductResolver.Create()
for src, dest in tile.settings.get('copy_products', {}).items():
prod = resolver.find_unique(None, src)
outputfile = os.path.join(outputbase, dest)
env.Command([outputfile], [prod.full_path], Copy("$TARGET", "$SOURCE")) | [
"Copy all files listed in a copy_files and copy_products section.\n\n Files listed in copy_files will be copied from the specified location\n in the current component to the specified path under the output\n folder.\n\n Files listed in copy_products will be looked up with a ProductResolver\n and copied copied to the specified path in the output folder. There\n is not currently a way to specify what type of product is being resolved.\n The `short_name` given must be unique across all products from this\n component and its direct dependencies.\n "
] |
Please provide a description of the function:def copy_dependency_docs(tile):
env = Environment(tools=[])
outputbase = os.path.join('build', 'output', 'doc')
depbase = os.path.join('build', 'deps')
for dep in tile.dependencies:
depdir = os.path.join(depbase, dep['unique_id'], 'doc', dep['unique_id'])
outputdir = os.path.join(outputbase, dep['unique_id'])
if os.path.exists(depdir):
env.Command([outputdir], [depdir], Copy("$TARGET", "$SOURCE")) | [
"Copy all documentation from dependencies into build/output/doc folder"
] |
Please provide a description of the function:def copy_dependency_images(tile):
env = Environment(tools=[])
outputbase = os.path.join('build', 'output')
depbase = os.path.join('build', 'deps')
for dep in tile.dependencies:
depdir = os.path.join(depbase, dep['unique_id'])
outputdir = os.path.join(outputbase)
deptile = IOTile(depdir)
for image in deptile.find_products('firmware_image'):
name = os.path.basename(image)
input_path = os.path.join(depdir, name)
output_path = os.path.join(outputdir, name)
env.Command([output_path], [input_path], Copy("$TARGET", "$SOURCE")) | [
"Copy all documentation from dependencies into build/output/doc folder"
] |
Please provide a description of the function:def generate(env):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in ASSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASAction)
shared_obj.add_action(suffix, SCons.Defaults.ASAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
for suffix in ASPPSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASPPAction)
shared_obj.add_action(suffix, SCons.Defaults.ASPPAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
env['AS'] = 'ml'
env['ASFLAGS'] = SCons.Util.CLVar('/nologo')
env['ASPPFLAGS'] = '$ASFLAGS'
env['ASCOM'] = '$AS $ASFLAGS /c /Fo$TARGET $SOURCES'
env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c /Fo$TARGET $SOURCES'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1 | [
"Add Builders and construction variables for masm to an Environment."
] |
Please provide a description of the function:def median(values):
values.sort()
n = int(len(values) / 2)
return values[n] | [
"Return median value for the list of values.\n @param values: list of values for processing.\n @return: median value.\n "
] |
Please provide a description of the function:def time_coef(tc, nc, tb, nb):
tc = float(tc)
nc = float(nc)
tb = float(tb)
nb = float(nb)
q = (tc * nb) / (tb * nc)
return q | [
"Return time coefficient relative to base numbers.\n @param tc: current test time\n @param nc: current test data size\n @param tb: base test time\n @param nb: base test data size\n @return: time coef.\n "
] |
Please provide a description of the function:def main(argv=None):
import getopt
# default values
test_read = None
test_write = None
n = 3 # number of repeat
if argv is None:
argv = sys.argv[1:]
try:
opts, args = getopt.getopt(argv, 'hn:rw', [])
for o,a in opts:
if o == '-h':
print(HELP)
return 0
elif o == '-n':
n = int(a)
elif o == '-r':
test_read = True
elif o == '-w':
test_write = True
if args:
raise getopt.GetoptError('Arguments are not used.')
except getopt.GetoptError:
msg = sys.exc_info()[1] # current exception
txt = str(msg)
print(txt)
return 1
if (test_read, test_write) == (None, None):
test_read = test_write = True
m = Measure(n, test_read, test_write)
m.measure_all()
m.print_report()
return 0 | [
"Main function to run benchmarks.\n @param argv: command-line arguments.\n @return: exit code (0 is OK).\n "
] |
Please provide a description of the function:def measure_one(self, data):
_unused, hexstr, ih = data
tread, twrite = 0.0, 0.0
if self.read:
tread = run_readtest_N_times(intelhex.IntelHex, hexstr, self.n)[0]
if self.write:
twrite = run_writetest_N_times(ih.write_hex_file, self.n)[0]
return tread, twrite | [
"Do measuring of read and write operations.\n @param data: 3-tuple from get_test_data\n @return: (time readhex, time writehex)\n "
] |
Please provide a description of the function:def _get_key(cls, device_id):
var_name = "USER_KEY_{0:08X}".format(device_id)
if var_name not in os.environ:
raise NotFoundError("No user key could be found for devices", device_id=device_id,
expected_variable_name=var_name)
key_var = os.environ[var_name]
if len(key_var) != 64:
raise NotFoundError("User key in variable is not the correct length, should be 64 hex characters",
device_id=device_id, key_value=key_var)
try:
key = binascii.unhexlify(key_var)
except ValueError:
raise NotFoundError("User key in variable could not be decoded from hex", device_id=device_id,
key_value=key_var)
if len(key) != 32:
raise NotFoundError("User key in variable is not the correct length, should be 64 hex characters",
device_id=device_id, key_value=key_var)
return key | [
"Attempt to get a user key from an environment variable\n "
] |
Please provide a description of the function:def sign_report(self, device_id, root, data, **kwargs):
report_key = self._verify_derive_key(device_id, root, **kwargs)
# We sign the SHA256 hash of the message
message_hash = hashlib.sha256(data).digest()
hmac_calc = hmac.new(report_key, message_hash, hashlib.sha256)
result = bytearray(hmac_calc.digest())
return {'signature': result, 'root_key': root} | [
"Sign a buffer of report data on behalf of a device.\n\n Args:\n device_id (int): The id of the device that we should encrypt for\n root (int): The root key type that should be used to generate the report\n data (bytearray): The data that we should sign\n **kwargs: There are additional specific keyword args that are required\n depending on the root key used. Typically, you must specify\n - report_id (int): The report id\n - sent_timestamp (int): The sent timestamp of the report\n\n These two bits of information are used to construct the per report\n signing and encryption key from the specific root key type.\n\n Returns:\n dict: The signature and any associated metadata about the signature.\n The signature itself must always be a bytearray stored under the\n 'signature' key, however additional keys may be present depending\n on the signature method used.\n\n Raises:\n NotFoundError: If the auth provider is not able to sign the data.\n "
] |
Please provide a description of the function:def verify_report(self, device_id, root, data, signature, **kwargs):
report_key = self._verify_derive_key(device_id, root, **kwargs)
message_hash = hashlib.sha256(data).digest()
hmac_calc = hmac.new(report_key, message_hash, hashlib.sha256)
result = bytearray(hmac_calc.digest())
if len(signature) == 0:
verified = False
elif len(signature) > len(result):
verified = False
elif len(signature) < len(result):
trunc_result = result[:len(signature)]
verified = hmac.compare_digest(signature, trunc_result)
else:
verified = hmac.compare_digest(signature, result)
return {'verified': verified, 'bit_length': 8*len(signature)} | [
"Verify a buffer of report data on behalf of a device.\n\n Args:\n device_id (int): The id of the device that we should encrypt for\n root (int): The root key type that should be used to generate the report\n data (bytearray): The data that we should verify\n signature (bytearray): The signature attached to data that we should verify\n **kwargs: There are additional specific keyword args that are required\n depending on the root key used. Typically, you must specify\n - report_id (int): The report id\n - sent_timestamp (int): The sent timestamp of the report\n\n These two bits of information are used to construct the per report\n signing and encryption key from the specific root key type.\n\n Returns:\n dict: The result of the verification process must always be a bool under the\n 'verified' key, however additional keys may be present depending on the\n signature method used.\n\n Raises:\n NotFoundError: If the auth provider is not able to verify the data due to\n an error. If the data is simply not valid, then the function returns\n normally.\n "
] |
Please provide a description of the function:def decrypt_report(self, device_id, root, data, **kwargs):
report_key = self._verify_derive_key(device_id, root, **kwargs)
try:
from Crypto.Cipher import AES
import Crypto.Util.Counter
except ImportError:
raise NotFoundError
ctr = Crypto.Util.Counter.new(128)
# We use AES-128 for encryption
encryptor = AES.new(bytes(report_key[:16]), AES.MODE_CTR, counter=ctr)
decrypted = encryptor.decrypt(bytes(data))
return {'data': decrypted} | [
"Decrypt a buffer of report data on behalf of a device.\n\n Args:\n device_id (int): The id of the device that we should encrypt for\n root (int): The root key type that should be used to generate the report\n data (bytearray): The data that we should decrypt\n **kwargs: There are additional specific keyword args that are required\n depending on the root key used. Typically, you must specify\n - report_id (int): The report id\n - sent_timestamp (int): The sent timestamp of the report\n\n These two bits of information are used to construct the per report\n signing and encryption key from the specific root key type.\n\n Returns:\n dict: The decrypted data and any associated metadata about the data.\n The data itself must always be a bytearray stored under the 'data'\n key, however additional keys may be present depending on the encryption method\n used.\n\n Raises:\n NotFoundError: If the auth provider is not able to decrypt the data.\n "
] |
Please provide a description of the function:def encrypt_report(self, device_id, root, data, **kwargs):
report_key = self._verify_derive_key(device_id, root, **kwargs)
try:
from Crypto.Cipher import AES
import Crypto.Util.Counter
except ImportError:
raise NotFoundError
# We use AES-128 for encryption
ctr = Crypto.Util.Counter.new(128)
encryptor = AES.new(bytes(report_key[:16]), AES.MODE_CTR, counter=ctr)
encrypted = encryptor.encrypt(bytes(data))
return {'data': encrypted} | [
"Encrypt a buffer of report data on behalf of a device.\n\n Args:\n device_id (int): The id of the device that we should encrypt for\n root (int): The root key type that should be used to generate the report\n data (bytearray): The data that we should decrypt\n **kwargs: There are additional specific keyword args that are required\n depending on the root key used. Typically, you must specify\n - report_id (int): The report id\n - sent_timestamp (int): The sent timestamp of the report\n\n These two bits of information are used to construct the per report\n signing and encryption key from the specific root key type.\n\n Returns:\n dict: The encrypted data and any associated metadata about the data.\n The data itself must always be a bytearray stored under the 'data'\n key, however additional keys may be present depending on the encryption method\n used.\n\n Raises:\n NotFoundError: If the auth provider is not able to decrypt the data.\n "
] |
Please provide a description of the function:def join_path(path):
if isinstance(path, str):
return path
return os.path.join(*path) | [
"If given a string, return it, otherwise combine a list into a string using os.path.join"
] |
Please provide a description of the function:def build_defines(defines):
return ['-D"%s=%s"' % (x, str(y)) for x, y in defines.items() if y is not None] | [
"Build a list of `-D` directives to pass to the compiler.\n\n This will drop any definitions whose value is None so that\n you can get rid of a define from another architecture by\n setting its value to null in the `module_settings.json`.\n "
] |
Please provide a description of the function:def connect_async(self, connection_id, connection_string, callback):
topics = MQTTTopicValidator(self.prefix + 'devices/{}'.format(connection_string))
key = self._generate_key()
name = self.name
conn_message = {'type': 'command', 'operation': 'connect', 'key': key, 'client': name}
context = {'key': key, 'slug': connection_string, 'topics': topics}
self.conns.begin_connection(connection_id, connection_string, callback, context, self.get_config('default_timeout'))
self._bind_topics(topics)
try:
self.client.publish(topics.connect, conn_message)
except IOTileException:
self._unbind_topics(topics)
self.conns.finish_connection(connection_id, False, 'Failed to send connection message') | [
"Connect to a device by its connection_string\n\n This function looks for the device on AWS IOT using the preconfigured\n topic prefix and looking for:\n <prefix>/devices/connection_string\n\n It then attempts to lock that device for exclusive access and\n returns a callback if successful.\n\n Args:\n connection_id (int): A unique integer set by the caller for referring to this connection\n once created\n connection_string (string): A device id of the form d--XXXX-YYYY-ZZZZ-WWWW\n callback (callable): A callback function called when the connection has succeeded or\n failed\n "
] |
Please provide a description of the function:def disconnect_async(self, conn_id, callback):
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information")
return
self.conns.begin_disconnection(conn_id, callback, self.get_config('default_timeout'))
topics = context['topics']
disconn_message = {'key': context['key'], 'client': self.name, 'type': 'command', 'operation': 'disconnect'}
self.client.publish(topics.action, disconn_message) | [
"Asynchronously disconnect from a device that has previously been connected\n\n Args:\n conn_id (int): a unique identifier for this connection on the DeviceManager\n that owns this adapter.\n callback (callable): A function called as callback(conn_id, adapter_id, success, failure_reason)\n when the disconnection finishes. Disconnection can only either succeed or timeout.\n "
] |
Please provide a description of the function:def send_script_async(self, conn_id, data, progress_callback, callback):
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information")
return
topics = context['topics']
context['progress_callback'] = progress_callback
self.conns.begin_operation(conn_id, 'script', callback, 60.0)
chunks = 1
if len(data) > self.mtu:
chunks = len(data) // self.mtu
if len(data) % self.mtu != 0:
chunks += 1
# Send the script out possibly in multiple chunks if it's larger than our maximum transmit unit
for i in range(0, chunks):
start = i*self.mtu
chunk = data[start:start + self.mtu]
encoded = base64.standard_b64encode(chunk)
script_message = {'key': context['key'], 'client': self.name, 'type': 'command', 'operation': 'send_script',
'script': encoded, 'fragment_count': chunks, 'fragment_index': i}
self.client.publish(topics.action, script_message) | [
"Asynchronously send a a script to this IOTile device\n\n Args:\n conn_id (int): A unique identifer that will refer to this connection\n data (string): the script to send to the device\n progress_callback (callable): A function to be called with status on our progress, called as:\n progress_callback(done_count, total_count)\n callback (callable): A callback for when we have finished sending the script. The callback will be called as\n callback(connection_id, adapter_id, success, failure_reason)\n 'connection_id': the connection id\n 'adapter_id': this adapter's id\n 'success': a bool indicating whether we received a response to our attempted RPC\n 'failure_reason': a string with the reason for the failure if success == False\n "
] |
Please provide a description of the function:def send_rpc_async(self, conn_id, address, rpc_id, payload, timeout, callback):
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information", 0xFF, bytearray())
return
self.conns.begin_operation(conn_id, 'rpc', callback, timeout)
topics = context['topics']
encoded_payload = binascii.hexlify(payload)
rpc_message = {'key': context['key'], 'client': self.name, 'type': 'command', 'operation': 'rpc',
'address': address, 'rpc_id': rpc_id, 'payload': encoded_payload, 'timeout': timeout}
self.client.publish(topics.action, rpc_message) | [
"Asynchronously send an RPC to this IOTile device\n\n Args:\n conn_id (int): A unique identifier that will refer to this connection\n address (int): the address of the tile that we wish to send the RPC to\n rpc_id (int): the 16-bit id of the RPC we want to call\n payload (bytearray): the payload of the command\n timeout (float): the number of seconds to wait for the RPC to execute\n callback (callable): A callback for when we have finished the RPC. The callback will be called as\"\n callback(connection_id, adapter_id, success, failure_reason, status, payload)\n 'connection_id': the connection id\n 'adapter_id': this adapter's id\n 'success': a bool indicating whether we received a response to our attempted RPC\n 'failure_reason': a string with the reason for the failure if success == False\n 'status': the one byte status code returned for the RPC if success == True else None\n 'payload': a bytearray with the payload returned by RPC if success == True else None\n "
] |
Please provide a description of the function:def _open_interface(self, conn_id, iface, callback):
try:
context = self.conns.get_context(conn_id)
except ArgumentError:
callback(conn_id, self.id, False, "Could not find connection information")
return
self.conns.begin_operation(conn_id, 'open_interface', callback, self.get_config('default_timeout'))
topics = context['topics']
open_iface_message = {'key': context['key'], 'type': 'command', 'operation': 'open_interface', 'client': self.name, 'interface': iface}
self.client.publish(topics.action, open_iface_message) | [
"Open an interface on this device\n\n Args:\n conn_id (int): the unique identifier for the connection\n iface (string): the interface name to open\n callback (callback): Callback to be called when this command finishes\n callback(conn_id, adapter_id, success, failure_reason)\n "
] |
Please provide a description of the function:def stop_sync(self):
conn_ids = self.conns.get_connections()
# If we have any open connections, try to close them here before shutting down
for conn in list(conn_ids):
try:
self.disconnect_sync(conn)
except HardwareError:
pass
self.client.disconnect()
self.conns.stop() | [
"Synchronously stop this adapter\n "
] |
Please provide a description of the function:def probe_async(self, callback):
topics = MQTTTopicValidator(self.prefix)
self.client.publish(topics.probe, {'type': 'command', 'operation': 'probe', 'client': self.name})
callback(self.id, True, None) | [
"Probe for visible devices connected to this DeviceAdapter.\n\n Args:\n callback (callable): A callback for when the probe operation has completed.\n callback should have signature callback(adapter_id, success, failure_reason) where:\n success: bool\n failure_reason: None if success is True, otherwise a reason for why we could not probe\n "
] |
Please provide a description of the function:def periodic_callback(self):
while True:
try:
action = self._deferred.get(False)
action()
except queue.Empty:
break
except Exception:
self._logger.exception('Exception in periodic callback') | [
"Periodically help maintain adapter internal state\n "
] |
Please provide a description of the function:def _bind_topics(self, topics):
# FIXME: Allow for these subscriptions to fail and clean up the previous ones
# so that this function is atomic
self.client.subscribe(topics.status, self._on_status_message)
self.client.subscribe(topics.tracing, self._on_trace)
self.client.subscribe(topics.streaming, self._on_report)
self.client.subscribe(topics.response, self._on_response_message) | [
"Subscribe to all the topics we need to communication with this device\n\n Args:\n topics (MQTTTopicValidator): The topic validator for this device that\n we are connecting to.\n "
] |
Please provide a description of the function:def _unbind_topics(self, topics):
self.client.unsubscribe(topics.status)
self.client.unsubscribe(topics.tracing)
self.client.unsubscribe(topics.streaming)
self.client.unsubscribe(topics.response) | [
"Unsubscribe to all of the topics we needed for communication with device\n\n Args:\n topics (MQTTTopicValidator): The topic validator for this device that\n we have connected to.\n "
] |
Please provide a description of the function:def _find_connection(self, topic):
parts = topic.split('/')
if len(parts) < 3:
return None
slug = parts[-3]
return slug | [
"Attempt to find a connection id corresponding with a topic\n\n The device is found by assuming the topic ends in <slug>/[control|data]/channel\n\n Args:\n topic (string): The topic we received a message on\n\n Returns:\n int: The internal connect id (device slug) associated with this topic\n "
] |
Please provide a description of the function:def _on_report(self, sequence, topic, message):
try:
conn_key = self._find_connection(topic)
conn_id = self.conns.get_connection_id(conn_key)
except ArgumentError:
self._logger.warn("Dropping report message that does not correspond with a known connection, topic=%s", topic)
return
try:
rep_msg = messages.ReportNotification.verify(message)
serialized_report = {}
serialized_report['report_format'] = rep_msg['report_format']
serialized_report['encoded_report'] = rep_msg['report']
serialized_report['received_time'] = datetime.datetime.strptime(rep_msg['received_time'].encode().decode(), "%Y%m%dT%H:%M:%S.%fZ")
report = self.report_parser.deserialize_report(serialized_report)
self._trigger_callback('on_report', conn_id, report)
except Exception:
self._logger.exception("Error processing report conn_id=%d", conn_id) | [
"Process a report received from a device.\n\n Args:\n sequence (int): The sequence number of the packet received\n topic (string): The topic this message was received on\n message (dict): The message itself\n "
] |
Please provide a description of the function:def _on_trace(self, sequence, topic, message):
try:
conn_key = self._find_connection(topic)
conn_id = self.conns.get_connection_id(conn_key)
except ArgumentError:
self._logger.warn("Dropping trace message that does not correspond with a known connection, topic=%s", topic)
return
try:
tracing = messages.TracingNotification.verify(message)
self._trigger_callback('on_trace', conn_id, tracing['trace'])
except Exception:
self._logger.exception("Error processing trace conn_id=%d", conn_id) | [
"Process a trace received from a device.\n\n Args:\n sequence (int): The sequence number of the packet received\n topic (string): The topic this message was received on\n message (dict): The message itself\n "
] |
Please provide a description of the function:def _on_status_message(self, sequence, topic, message):
self._logger.debug("Received message on (topic=%s): %s" % (topic, message))
try:
conn_key = self._find_connection(topic)
except ArgumentError:
self._logger.warn("Dropping message that does not correspond with a known connection, message=%s", message)
return
if messages.ConnectionResponse.matches(message):
if self.name != message['client']:
self._logger.debug("Connection response received for a different client, client=%s, name=%s", message['client'], self.name)
return
self.conns.finish_connection(conn_key, message['success'], message.get('failure_reason', None))
else:
self._logger.warn("Dropping message that did not correspond with a known schema, message=%s", message) | [
"Process a status message received\n\n Args:\n sequence (int): The sequence number of the packet received\n topic (string): The topic this message was received on\n message (dict): The message itself\n "
] |
Please provide a description of the function:def _on_response_message(self, sequence, topic, message):
try:
conn_key = self._find_connection(topic)
context = self.conns.get_context(conn_key)
except ArgumentError:
self._logger.warn("Dropping message that does not correspond with a known connection, message=%s", message)
return
if 'client' in message and message['client'] != self.name:
self._logger.debug("Dropping message that is for another client %s, we are %s", message['client'], self.name)
if messages.DisconnectionResponse.matches(message):
self.conns.finish_disconnection(conn_key, message['success'], message.get('failure_reason', None))
elif messages.OpenInterfaceResponse.matches(message):
self.conns.finish_operation(conn_key, message['success'], message.get('failure_reason', None))
elif messages.RPCResponse.matches(message):
rpc_message = messages.RPCResponse.verify(message)
self.conns.finish_operation(conn_key, rpc_message['success'], rpc_message.get('failure_reason', None), rpc_message.get('status', None), rpc_message.get('payload', None))
elif messages.ProgressNotification.matches(message):
progress_callback = context.get('progress_callback', None)
if progress_callback is not None:
progress_callback(message['done_count'], message['total_count'])
elif messages.ScriptResponse.matches(message):
if 'progress_callback' in context:
del context['progress_callback']
self.conns.finish_operation(conn_key, message['success'], message.get('failure_reason', None))
elif messages.DisconnectionNotification.matches(message):
try:
conn_key = self._find_connection(topic)
conn_id = self.conns.get_connection_id(conn_key)
except ArgumentError:
self._logger.warn("Dropping disconnect notification that does not correspond with a known connection, topic=%s", topic)
return
self.conns.unexpected_disconnect(conn_key)
self._trigger_callback('on_disconnect', self.id, conn_id)
else:
self._logger.warn("Invalid response message received, message=%s", message) | [
"Process a response message received\n\n Args:\n sequence (int): The sequence number of the packet received\n topic (string): The topic this message was received on\n message (dict): The message itself\n "
] |
Please provide a description of the function:def build_args():
parser = argparse.ArgumentParser(description=u'Compile a sensor graph.')
parser.add_argument(u'sensor_graph', type=str, help=u"the sensor graph file to load and run.")
parser.add_argument(u'-f', u'--format', default=u"nodes", choices=[u'nodes', u'ast', u'snippet', u'ascii', u'config', u'script'], type=str, help=u"the output format for the compiled result.")
parser.add_argument(u'-o', u'--output', type=str, help=u"the output file to save the results (defaults to stdout)")
parser.add_argument(u'--disable-optimizer', action="store_true", help=u"disable the sensor graph optimizer completely")
return parser | [
"Create command line argument parser."
] |
Please provide a description of the function:def write_output(output, text=True, output_path=None):
if output_path is None and text is False:
print("ERROR: You must specify an output file using -o/--output for binary output formats")
sys.exit(1)
if output_path is not None:
if text:
outfile = open(output_path, "w", encoding="utf-8")
else:
outfile = open(output_path, "wb")
else:
outfile = sys.stdout
try:
if text and isinstance(output, bytes):
output = output.decode('utf-8')
outfile.write(output)
finally:
if outfile is not sys.stdout:
outfile.close() | [
"Write binary or text output to a file or stdout."
] |
Please provide a description of the function:def main():
arg_parser = build_args()
args = arg_parser.parse_args()
model = DeviceModel()
parser = SensorGraphFileParser()
parser.parse_file(args.sensor_graph)
if args.format == u'ast':
write_output(parser.dump_tree(), True, args.output)
sys.exit(0)
parser.compile(model)
if not args.disable_optimizer:
opt = SensorGraphOptimizer()
opt.optimize(parser.sensor_graph, model=model)
if args.format == u'nodes':
output = u'\n'.join(parser.sensor_graph.dump_nodes()) + u'\n'
write_output(output, True, args.output)
else:
if args.format not in KNOWN_FORMATS:
print("Unknown output format: {}".format(args.format))
sys.exit(1)
output_format = KNOWN_FORMATS[args.format]
output = output_format.format(parser.sensor_graph)
write_output(output, output_format.text, args.output) | [
"Main entry point for iotile-sgcompile."
] |
Please provide a description of the function:def load_external_components(typesys):
# Find all of the registered IOTile components and see if we need to add any type libraries for them
from iotile.core.dev.registry import ComponentRegistry
reg = ComponentRegistry()
modules = reg.list_components()
typelibs = reduce(lambda x, y: x+y, [reg.find_component(x).find_products('type_package') for x in modules], [])
for lib in typelibs:
if lib.endswith('.py'):
lib = lib[:-3]
typesys.load_external_types(lib) | [
"Load all external types defined by iotile plugins.\n\n This allows plugins to register their own types for type annotations and\n allows all registered iotile components that have associated type libraries to\n add themselves to the global type system.\n "
] |
Please provide a description of the function:def release(component=".", cloud=False):
comp = IOTile(component)
providers = _find_release_providers()
# If we were given a dev mode component that has been built, get its release mode version
if not comp.release and comp.release_date is not None:
comp = IOTile(comp.output_folder)
if not comp.release:
raise ArgumentError("Attempting to release a dev mode IOTile component that has not been built.",
suggestion='Use iotile build to build the component before releasing', component=comp)
if not comp.can_release:
raise ArgumentError("Attemping to release an IOTile component that does not "
"specify release_steps and hence is not releasable",
suggestion="Update module_settings.json to include release_steps", component=comp)
# A component can specify that it should only be releasable in a clean continuous integration/continuous deployment
# server. If that's the case then do not allow `iotile release` to work unless the cloud parameter is set to
# indicate that we're in such a setting.
if comp.settings.get('cloud_release', False) and not cloud:
raise ArgumentError("Attempting to release an IOTile component locally when it specifies that it "
"can only be released using a clean CI/CD server",
suggestion="Use iotile release --cloud if you are running in a CI/CD server")
configured_provs = []
for step in comp.release_steps:
if step.provider not in providers:
raise DataError("Release step for component required unknown ReleaseProvider",
provider=step.provider, known_providers=providers.keys())
prov = providers[step.provider](comp, step.args)
configured_provs.append(prov)
# Attempt to stage releases for each provider and then release them all, rolling back if there is an error
for i, prov in enumerate(configured_provs):
try:
prov.stage()
except IOTileException as exc:
try:
# There was an error, roll back
for j in range(0, i):
configured_provs[j].unstage()
except Exception as unstage_exc:
raise DirtyReleaseFailureError("Error staging release (COULD NOT ROLL BACK)",
failed_step=i, original_exception=exc, operation='staging',
failed_unstage=j, unstage_exception=unstage_exc)
raise CleanReleaseFailureError("Error staging release (cleanly rolled back)",
failed_step=i, original_exception=exc, operation='staging')
except Exception as exc:
raise DirtyReleaseFailureError("Error staging release due to unknown exception type "
"(DID NOT ATTEMPT ROLL BACK)",
failed_step=i, original_exception=exc, operation='staging')
# Stage was successful, attempt to release
for i, prov in enumerate(configured_provs):
try:
prov.release()
except IOTileException as exc:
j = None
try:
# There was an error, roll back
for j in range(0, i):
configured_provs[j].unrelease()
except Exception as unstage_exc:
raise DirtyReleaseFailureError("Error performing release (COULD NOT ROLL BACK)",
failed_step=i, original_exception=exc, operation='release',
failed_unrelease=j, unrelease_exception=unstage_exc)
raise CleanReleaseFailureError("Error performing release (cleanly rolled back)", failed_step=i,
original_exception=exc, operation='release')
except Exception as exc:
raise DirtyReleaseFailureError("Error performing release due to unknown exception type "
"(DID NOT ATTEMPT ROLL BACK)", failed_step=i,
original_exception=exc, operation='release') | [
"Release an IOTile component using release providers.\n\n Releasing an IOTile component means packaging up the products of its build process and storing\n them somewhere. The module_settings.json file of the IOTile component should have a\n \"release_steps\" key that lists the release providers that will be used to release the various\n build products. There are usually multiple release providers to, for example, send firmware\n images somewhere for future download, post the documentation and upload python support wheels\n to a PyPI index.\n "
] |
Please provide a description of the function:def verify(self, obj):
if len(self._options) == 0:
raise ValidationError("No options", reason='no options given in options verifier, matching not possible',
object=obj)
exceptions = {}
for i, option in enumerate(self._options):
try:
obj = option.verify(obj)
return obj
except ValidationError as exc:
exceptions['option_%d' % (i+1)] = exc.params['reason']
raise ValidationError("Object did not match any of a set of options",
reason="object did not match any given option (first failure = '%s')"
% exceptions['option_1'], **exceptions) | [
"Verify that the object conforms to this verifier's schema\n\n Args:\n obj (object): A python object to verify\n\n Raises:\n ValidationError: If there is a problem verifying the dictionary, a\n ValidationError is thrown with at least the reason key set indicating\n the reason for the lack of validation.\n "
] |
Please provide a description of the function:def add_recipe_folder(self, recipe_folder, whitelist=None):
if whitelist is not None:
whitelist = set(whitelist)
if recipe_folder == '':
recipe_folder = '.'
for yaml_file in [x for x in os.listdir(recipe_folder) if x.endswith('.yaml')]:
if whitelist is not None and yaml_file not in whitelist:
continue
recipe = RecipeObject.FromFile(os.path.join(recipe_folder, yaml_file), self._recipe_actions, self._recipe_resources)
self._recipes[recipe.name] = recipe
for ship_file in [x for x in os.listdir(recipe_folder) if x.endswith('.ship')]:
if whitelist is not None and ship_file not in whitelist:
continue
recipe = RecipeObject.FromArchive(os.path.join(recipe_folder, ship_file), self._recipe_actions, self._recipe_resources)
self._recipes[recipe.name] = recipe | [
"Add all recipes inside a folder to this RecipeManager with an optional whitelist.\n\n Args:\n recipe_folder (str): The path to the folder of recipes to add.\n whitelist (list): Only include files whose os.basename() matches something\n on the whitelist\n "
] |
Please provide a description of the function:def add_recipe_actions(self, recipe_actions):
for action_name, action in recipe_actions:
self._recipe_actions[action_name] = action | [
"Add additional valid recipe actions to RecipeManager\n\n args:\n recipe_actions (list): List of tuples. First value of tuple is the classname,\n second value of tuple is RecipeAction Object\n\n "
] |
Please provide a description of the function:def get_recipe(self, recipe_name):
if recipe_name.endswith('.yaml'):
recipe = self._recipes.get(RecipeObject.FromFile(recipe_name, self._recipe_actions, self._recipe_resources).name)
else:
recipe = self._recipes.get(recipe_name)
if recipe is None:
raise RecipeNotFoundError("Could not find recipe", recipe_name=recipe_name, known_recipes=[x for x in self._recipes.keys()])
return recipe | [
"Get a recipe by name.\n\n Args:\n recipe_name (str): The name of the recipe to fetch. Can be either the\n yaml file name or the name of the recipe.\n "
] |
Please provide a description of the function:def _check_time_backwards(self):
now = time.time()
if now < self.start:
self.start = now
self.end = self.start + self.length | [
"Make sure a clock reset didn't cause time to go backwards\n "
] |
Please provide a description of the function:def expired(self):
if self._expired_latch:
return True
self._check_time_backwards()
if time.time() > self.end:
self._expired_latch = True
return True
return False | [
"Boolean property if this timeout has expired\n "
] |
Please provide a description of the function:def command(self, cmd_name, callback, *args):
cmd = JLinkCommand(cmd_name, args, callback)
self._commands.put(cmd) | [
"Run an asynchronous command.\n\n Args:\n cmd_name (int): The unique code for the command to execute.\n callback (callable): The optional callback to run when the command finishes.\n The signature should be callback(cmd_name, result, exception)\n *args: Any arguments that are passed to the underlying command handler\n "
] |
Please provide a description of the function:def _send_rpc(self, device_info, control_info, address, rpc_id, payload, poll_interval, timeout):
write_address, write_data = control_info.format_rpc(address, rpc_id, payload)
self._jlink.memory_write32(write_address, write_data)
self._trigger_rpc(device_info)
start = monotonic()
now = start
poll_address, poll_mask = control_info.poll_info()
while (now - start) < timeout:
time.sleep(poll_interval)
value, = self._jlink.memory_read8(poll_address, 1)
if value & poll_mask:
break
now = monotonic()
if (now - start) >= timeout:
raise HardwareError("Timeout waiting for RPC response", timeout=timeout, poll_interval=poll_interval)
read_address, read_length = control_info.response_info()
read_data = self._read_memory(read_address, read_length, join=True)
return control_info.format_response(read_data) | [
"Write and trigger an RPC."
] |
Please provide a description of the function:def _send_script(self, device_info, control_info, script, progress_callback):
for i in range(0, len(script), 20):
chunk = script[i:i+20]
self._send_rpc(device_info, control_info, 8, 0x2101, chunk, 0.001, 1.0)
if progress_callback is not None:
progress_callback(i + len(chunk), len(script)) | [
"Send a script by repeatedly sending it as a bunch of RPCs.\n\n This function doesn't do anything special, it just sends a bunch of RPCs\n with each chunk of the script until it's finished.\n "
] |
Please provide a description of the function:def _trigger_rpc(self, device_info):
method = device_info.rpc_trigger
if isinstance(method, devices.RPCTriggerViaSWI):
self._jlink.memory_write32(method.register, [1 << method.bit])
else:
raise HardwareError("Unknown RPC trigger method", method=method) | [
"Trigger an RPC in a device specific way."
] |
Please provide a description of the function:def _find_control_structure(self, start_address, search_length):
words = self._read_memory(start_address, search_length, chunk_size=4, join=False)
found_offset = None
for i, word in enumerate(words):
if word == ControlStructure.CONTROL_MAGIC_1:
if (len(words) - i) < 4:
continue
if words[i + 1] == ControlStructure.CONTROL_MAGIC_2 and words[i + 2] == ControlStructure.CONTROL_MAGIC_3 and words[i + 3] == ControlStructure.CONTROL_MAGIC_4:
found_offset = i
break
if found_offset is None:
raise HardwareError("Could not find control structure magic value in search area")
struct_info = words[found_offset + 4]
_version, _flags, length = struct.unpack("<BBH", struct.pack("<L", struct_info))
if length % 4 != 0:
raise HardwareError("Invalid control structure length that was not a multiple of 4", length=length)
word_length = length // 4
control_data = struct.pack("<%dL" % word_length, *words[found_offset:found_offset + word_length])
logger.info("Found control stucture at address 0x%08X, word_length=%d", start_address + 4*found_offset, word_length)
return ControlStructure(start_address + 4*found_offset, control_data) | [
"Find the control structure in RAM for this device.\n\n Returns:\n ControlStructure: The decoded contents of the shared memory control structure\n used for communication with this IOTile device.\n "
] |
Please provide a description of the function:def _verify_control_structure(self, device_info, control_info=None):
if control_info is None:
control_info = self._find_control_structure(device_info.ram_start, device_info.ram_size)
#FIXME: Actually reread the memory here to verify that the control structure is still valid
return control_info | [
"Verify that a control structure is still valid or find one.\n\n Returns:\n ControlStructure: The verified or discovered control structure.\n "
] |
Please provide a description of the function:def save(self, out_path):
out = {
'selectors': [str(x) for x in self.selectors],
'trace': [{'stream': str(DataStream.FromEncoded(x.stream)), 'time': x.raw_time, 'value': x.value, 'reading_id': x.reading_id} for x in self]
}
with open(out_path, "wb") as outfile:
json.dump(out, outfile, indent=4) | [
"Save an ascii representation of this simulation trace.\n\n Args:\n out_path (str): The output path to save this simulation trace.\n "
] |
Please provide a description of the function:def FromFile(cls, in_path):
with open(in_path, "rb") as infile:
in_data = json.load(infile)
if not ('trace', 'selectors') in in_data:
raise ArgumentError("Invalid trace file format", keys=in_data.keys(), expected=('trace', 'selectors'))
selectors = [DataStreamSelector.FromString(x) for x in in_data['selectors']]
readings = [IOTileReading(x['time'], DataStream.FromString(x['stream']).encode(), x['value'], reading_id=x['reading_id']) for x in in_data['trace']]
return SimulationTrace(readings, selectors=selectors) | [
"Load a previously saved ascii representation of this simulation trace.\n\n Args:\n in_path (str): The path of the input file that we should load.\n\n Returns:\n SimulationTrace: The loaded trace object.\n "
] |
Please provide a description of the function:def _on_scan(_loop, adapter, _adapter_id, info, expiration_time):
info['validity_period'] = expiration_time
adapter.notify_event_nowait(info.get('connection_string'), 'device_seen', info) | [
"Callback when a new device is seen."
] |
Please provide a description of the function:def _on_report(_loop, adapter, conn_id, report):
conn_string = None
if conn_id is not None:
conn_string = adapter._get_property(conn_id, 'connection_string')
if isinstance(report, BroadcastReport):
adapter.notify_event_nowait(conn_string, 'broadcast', report)
elif conn_string is not None:
adapter.notify_event_nowait(conn_string, 'report', report)
else:
adapter._logger.debug("Dropping report with unknown conn_id=%s", conn_id) | [
"Callback when a report is received."
] |
Please provide a description of the function:def _on_trace(_loop, adapter, conn_id, trace):
conn_string = adapter._get_property(conn_id, 'connection_string')
if conn_string is None:
adapter._logger.debug("Dropping trace data with unknown conn_id=%s", conn_id)
return
adapter.notify_event_nowait(conn_string, 'trace', trace) | [
"Callback when tracing data is received."
] |
Please provide a description of the function:def _on_disconnect(_loop, adapter, _adapter_id, conn_id):
conn_string = adapter._get_property(conn_id, 'connection_string')
if conn_string is None:
adapter._logger.debug("Dropping disconnect notification with unknown conn_id=%s", conn_id)
return
adapter._teardown_connection(conn_id, force=True)
event = dict(reason='no reason passed from legacy adapter', expected=False)
adapter.notify_event_nowait(conn_string, 'disconnection', event) | [
"Callback when a device disconnects unexpectedly."
] |
Please provide a description of the function:def _on_progress(adapter, operation, conn_id, done, total):
conn_string = adapter._get_property(conn_id, 'connection_string')
if conn_string is None:
return
adapter.notify_progress(conn_string, operation, done, total) | [
"Callback when progress is reported."
] |
Please provide a description of the function:def get_config(self, name, default=_MISSING):
value = self._adapter.get_config(name, default)
if value is _MISSING:
raise ArgumentError("Config value did not exist", name=name)
return value | [
"Get a config value from this adapter by name\n\n Args:\n name (string): The name of the config variable\n default (object): The default value to return if config is not found\n\n Returns:\n object: the value associated with the name\n\n Raises:\n ArgumentError: if the name is not found and no default is supplied\n "
] |
Please provide a description of the function:async def start(self):
self._loop.add_task(self._periodic_loop, name="periodic task for %s" % self._adapter.__class__.__name__,
parent=self._task)
self._adapter.add_callback('on_scan', functools.partial(_on_scan, self._loop, self))
self._adapter.add_callback('on_report', functools.partial(_on_report, self._loop, self))
self._adapter.add_callback('on_trace', functools.partial(_on_trace, self._loop, self))
self._adapter.add_callback('on_disconnect', functools.partial(_on_disconnect, self._loop, self)) | [
"Start the device adapter.\n\n See :meth:`AbstractDeviceAdapter.start`.\n "
] |
Please provide a description of the function:async def stop(self, _task=None):
self._logger.info("Stopping adapter wrapper")
if self._task.stopped:
return
for task in self._task.subtasks:
await task.stop()
self._logger.debug("Stopping underlying adapter %s", self._adapter.__class__.__name__)
await self._execute(self._adapter.stop_sync) | [
"Stop the device adapter.\n\n See :meth:`AbstractDeviceAdapter.stop`.\n "
] |
Please provide a description of the function:async def connect(self, conn_id, connection_string):
self._logger.info("Inside connect, conn_id=%d, conn_string=%s", conn_id, connection_string)
try:
self._setup_connection(conn_id, connection_string)
resp = await self._execute(self._adapter.connect_sync, conn_id, connection_string)
_raise_error(conn_id, 'connect', resp)
except:
self._teardown_connection(conn_id, force=True)
raise | [
"Connect to a device.\n\n See :meth:`AbstractDeviceAdapter.connect`.\n "
] |
Please provide a description of the function:async def disconnect(self, conn_id):
resp = await self._execute(self._adapter.disconnect_sync, conn_id)
_raise_error(conn_id, 'disconnect', resp)
self._teardown_connection(conn_id, force=True) | [
"Disconnect from a connected device.\n\n See :meth:`AbstractDeviceAdapter.disconnect`.\n "
] |
Please provide a description of the function:async def open_interface(self, conn_id, interface):
resp = await self._execute(self._adapter.open_interface_sync, conn_id, interface)
_raise_error(conn_id, 'open_interface', resp) | [
"Open an interface on an IOTile device.\n\n See :meth:`AbstractDeviceAdapter.open_interface`.\n "
] |
Please provide a description of the function:async def close_interface(self, conn_id, interface):
resp = await self._execute(self._adapter.close_interface_sync, conn_id, interface)
_raise_error(conn_id, 'close_interface', resp) | [
"Close an interface on this IOTile device.\n\n See :meth:`AbstractDeviceAdapter.close_interface`.\n "
] |
Please provide a description of the function:async def probe(self):
resp = await self._execute(self._adapter.probe_sync)
_raise_error(None, 'probe', resp) | [
"Probe for devices connected to this adapter.\n\n See :meth:`AbstractDeviceAdapter.probe`.\n "
] |
Please provide a description of the function:async def send_rpc(self, conn_id, address, rpc_id, payload, timeout):
resp = await self._execute(self._adapter.send_rpc_sync, conn_id, address, rpc_id, payload, timeout)
_raise_error(conn_id, 'send_rpc', resp)
status = resp.get('status')
payload = resp.get('payload')
# This will raise an exception if needed based on status
return unpack_rpc_response(status, payload, rpc_id, address) | [
"Send an RPC to a device.\n\n See :meth:`AbstractDeviceAdapter.send_rpc`.\n "
] |
Please provide a description of the function:async def debug(self, conn_id, name, cmd_args):
progress_callback = functools.partial(_on_progress, self, 'debug', conn_id)
resp = await self._execute(self._adapter.debug_sync, conn_id, name, cmd_args, progress_callback)
_raise_error(conn_id, 'send_rpc', resp)
return resp.get('return_value') | [
"Send a debug command to a device.\n\n See :meth:`AbstractDeviceAdapter.debug`.\n "
] |
Please provide a description of the function:async def send_script(self, conn_id, data):
progress_callback = functools.partial(_on_progress, self, 'script', conn_id)
resp = await self._execute(self._adapter.send_script_sync, conn_id, data, progress_callback)
_raise_error(conn_id, 'send_rpc', resp) | [
"Send a a script to a device.\n\n See :meth:`AbstractDeviceAdapter.send_script`.\n "
] |
Please provide a description of the function:def autobuild_shiparchive(src_file):
if not src_file.endswith('.tpl'):
raise BuildError("You must pass a .tpl file to autobuild_shiparchive", src_file=src_file)
env = Environment(tools=[])
family = ArchitectureGroup('module_settings.json')
target = family.platform_independent_target()
resolver = ProductResolver.Create()
#Parse through build_step products to see what needs to imported
custom_steps = []
for build_step in family.tile.find_products('build_step'):
full_file_name = build_step.split(":")[0]
basename = os.path.splitext(os.path.basename(full_file_name))[0]
folder = os.path.dirname(full_file_name)
fileobj, pathname, description = imp.find_module(basename, [folder])
mod = imp.load_module(basename, fileobj, pathname, description)
full_file_name, class_name = build_step.split(":")
custom_steps.append((class_name, getattr(mod, class_name)))
env['CUSTOM_STEPS'] = custom_steps
env["RESOLVER"] = resolver
base_name, tpl_name = _find_basename(src_file)
yaml_name = tpl_name[:-4]
ship_name = yaml_name[:-5] + ".ship"
output_dir = target.build_dirs()['output']
build_dir = os.path.join(target.build_dirs()['build'], base_name)
tpl_path = os.path.join(build_dir, tpl_name)
yaml_path = os.path.join(build_dir, yaml_name)
ship_path = os.path.join(build_dir, ship_name)
output_path = os.path.join(output_dir, ship_name)
# We want to build up all related files in
# <build_dir>/<ship archive_folder>/
# - First copy the template yaml over
# - Then render the template yaml
# - Then find all products referenced in the template yaml and copy them
# - over
# - Then build a .ship archive
# - Then copy that archive into output_dir
ship_deps = [yaml_path]
env.Command([tpl_path], [src_file], Copy("$TARGET", "$SOURCE"))
prod_deps = _find_product_dependencies(src_file, resolver)
env.Command([yaml_path], [tpl_path], action=Action(template_shipfile_action, "Rendering $TARGET"))
for prod in prod_deps:
dest_file = os.path.join(build_dir, prod.short_name)
ship_deps.append(dest_file)
env.Command([dest_file], [prod.full_path], Copy("$TARGET", "$SOURCE"))
env.Command([ship_path], [ship_deps], action=Action(create_shipfile, "Archiving Ship Recipe $TARGET"))
env.Command([output_path], [ship_path], Copy("$TARGET", "$SOURCE")) | [
"Create a ship file archive containing a yaml_file and its dependencies.\n\n If yaml_file depends on any build products as external files, it must\n be a jinja2 template that references the file using the find_product\n filter so that we can figure out where those build products are going\n and create the right dependency graph.\n\n Args:\n src_file (str): The path to the input yaml file template. This\n file path must end .yaml.tpl and is rendered into a .yaml\n file and then packaged into a .ship file along with any\n products that are referenced in it.\n "
] |
Please provide a description of the function:def create_shipfile(target, source, env):
source_dir = os.path.dirname(str(source[0]))
recipe_name = os.path.basename(str(source[0]))[:-5]
resman = RecipeManager()
resman.add_recipe_actions(env['CUSTOM_STEPS'])
resman.add_recipe_folder(source_dir, whitelist=[os.path.basename(str(source[0]))])
recipe = resman.get_recipe(recipe_name)
recipe.archive(str(target[0])) | [
"Create a .ship file with all dependencies."
] |
Please provide a description of the function:def record_trace(self, selectors=None):
if selectors is None:
selectors = [x.selector for x in self.sensor_graph.streamers]
self.trace = SimulationTrace(selectors=selectors)
for sel in selectors:
self.sensor_graph.sensor_log.watch(sel, self._on_trace_callback) | [
"Record a trace of readings produced by this simulator.\n\n This causes the property `self.trace` to be populated with a\n SimulationTrace object that contains all of the readings that\n are produced during the course of the simulation. Only readings\n that respond to specific selectors are given.\n\n You can pass whatever selectors you want in the optional selectors\n argument. If you pass None, then the default behavior to trace\n all of the output streams of the sensor graph, which are defined\n as the streams that are selected by a DataStreamer object in the\n sensor graph. This is typically what is meant by sensor graph\n output.\n\n You can inspect the current trace by looking at the trace\n property. It will initially be an empty list and will be updated\n with each call to step() or run() that results in new readings\n responsive to the selectors you pick (or the graph streamers if\n you did not explicitly pass a list of DataStreamSelector objects).\n\n Args:\n selectors (list of DataStreamSelector): The selectors to add watch\n statements on to produce this trace. This is optional.\n If it is not specified, a the streamers of the sensor\n graph are used.\n "
] |
Please provide a description of the function:def step(self, input_stream, value):
reading = IOTileReading(input_stream.encode(), self.tick_count, value)
self.sensor_graph.process_input(input_stream, reading, self.rpc_executor) | [
"Step the sensor graph through one since input.\n\n The internal tick count is not advanced so this function may\n be called as many times as desired to input specific conditions\n without simulation time passing.\n\n Args:\n input_stream (DataStream): The input stream to push the\n value into\n value (int): The reading value to push as an integer\n "
] |
Please provide a description of the function:def run(self, include_reset=True, accelerated=True):
self._start_tick = self.tick_count
if self._check_stop_conditions(self.sensor_graph):
return
if include_reset:
pass # TODO: include a reset event here
# Process all stimuli that occur at the start of the simulation
i = None
for i, stim in enumerate(self.stimuli):
if stim.time != 0:
break
reading = IOTileReading(self.tick_count, stim.stream.encode(), stim.value)
self.sensor_graph.process_input(stim.stream, reading, self.rpc_executor)
if i is not None and i > 0:
self.stimuli = self.stimuli[i:]
while not self._check_stop_conditions(self.sensor_graph):
# Process one more one second tick
now = monotonic()
next_tick = now + 1.0
# To match what is done in actual hardware, we increment tick count so the first tick
# is 1.
self.tick_count += 1
# Process all stimuli that occur at this tick of the simulation
i = None
for i, stim in enumerate(self.stimuli):
if stim.time != self.tick_count:
break
reading = IOTileReading(self.tick_count, stim.stream.encode(), stim.value)
self.sensor_graph.process_input(stim.stream, reading, self.rpc_executor)
if i is not None and i > 0:
self.stimuli = self.stimuli[i:]
self._check_additional_ticks(self.tick_count)
if (self.tick_count % 10) == 0:
reading = IOTileReading(self.tick_count, system_tick.encode(), self.tick_count)
self.sensor_graph.process_input(system_tick, reading, self.rpc_executor)
# Every 10 seconds the battery voltage is reported in 16.16 fixed point format in volts
reading = IOTileReading(self.tick_count, battery_voltage.encode(), int(self.voltage * 65536))
self.sensor_graph.process_input(battery_voltage, reading, self.rpc_executor)
now = monotonic()
# If we are trying to execute this sensor graph in realtime, wait for
# the remaining slice of this tick.
if (not accelerated) and (now < next_tick):
time.sleep(next_tick - now) | [
"Run this sensor graph until a stop condition is hit.\n\n Multiple calls to this function are useful only if\n there has been some change in the stop conditions that would\n cause the second call to not exit immediately.\n\n Args:\n include_reset (bool): Start the sensor graph run with\n a reset event to match what would happen when an\n actual device powers on.\n accelerated (bool): Whether to run this sensor graph as\n fast as possible or to delay tick events to simulate\n the actual passage of wall clock time.\n "
] |
Please provide a description of the function:def _check_stop_conditions(self, sensor_graph):
for stop in self.stop_conditions:
if stop.should_stop(self.tick_count, self.tick_count - self._start_tick, sensor_graph):
return True
return False | [
"Check if any of our stop conditions are met.\n\n Args:\n sensor_graph (SensorGraph): The sensor graph we are currently simulating\n\n Returns:\n bool: True if we should stop the simulation\n "
] |
Please provide a description of the function:def stimulus(self, stimulus):
if not isinstance(stimulus, SimulationStimulus):
stimulus = SimulationStimulus.FromString(stimulus)
self.stimuli.append(stimulus)
self.stimuli.sort(key=lambda x:x.time) | [
"Add a simulation stimulus at a given time.\n\n A stimulus is a specific input given to the graph at a specific\n time to a specific input stream. The format for specifying a\n stimulus is:\n [time: ][system ]input X = Y\n where X and Y are integers.\n\n This will cause the simulator to inject this input at the given time.\n If you specify a time of 0 seconds, it will happen before the simulation\n starts. Similarly, if you specify a time of 1 second it will also happen\n before anything else since the simulations start with a tick value of 1.\n\n The stimulus is injected before any other things happen at each new tick.\n\n Args:\n stimulus (str or SimulationStimulus): A prebuilt stimulus object or\n a string description of the stimulus of the format:\n [time: ][system ]input X = Y\n where time is optional and defaults to 0 seconds if not specified.\n\n Examples:\n sim.stimulus('system input 10 = 15')\n sim.stimulus('1 minute: input 1 = 5')\n "
] |
Please provide a description of the function:def stop_condition(self, condition):
# Try to parse this into a stop condition with each of our registered
# condition types
for cond_format in self._known_conditions:
try:
cond = cond_format.FromString(condition)
self.stop_conditions.append(cond)
return
except ArgumentError:
continue
raise ArgumentError("Stop condition could not be processed by any known StopCondition type", condition=condition, suggestion="It may be mistyped or otherwise invalid.") | [
"Add a stop condition to this simulation.\n\n Stop conditions are specified as strings and parsed into\n the appropriate internal structures.\n\n Args:\n condition (str): a string description of the stop condition\n "
] |
Please provide a description of the function:def dump(self):
walker = self.dump_walker
if walker is not None:
walker = walker.dump()
state = {
'storage': self.storage.dump(),
'dump_walker': walker,
'next_id': self.next_id
}
return state | [
"Serialize the state of this subsystem into a dict.\n\n Returns:\n dict: The serialized state\n "
] |
Please provide a description of the function:def restore(self, state):
self.storage.restore(state.get('storage'))
dump_walker = state.get('dump_walker')
if dump_walker is not None:
dump_walker = self.storage.restore_walker(dump_walker)
self.dump_walker = dump_walker
self.next_id = state.get('next_id', 1) | [
"Restore the state of this subsystem from a prior call to dump().\n\n Calling restore must be properly sequenced with calls to other\n subsystems that include stream walkers so that their walkers are\n properly restored.\n\n Args:\n state (dict): The results of a prior call to dump().\n "
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.