function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def __init__(self, partition, prefix='Rte', include=None, mode_switch=True, os_enable=True): self.partition=partition self.includes = [] #array of tuples, first element is the name of include header, second element is True if this is a sysinclude self.prefix=prefix self.com_component = None self.header_file_name = None self.data_elements = [] self.extra_static_vars={} self.extra_public_functions={} self.extra_rte_start=C.sequence() self.mode_switch_enable=mode_switch self.os_enable = os_enable #self.com_access = {'receive': {}, 'send': {}} if include is not None: for elem in include: if isinstance(elem, str) or isinstance(elem, tuple): self.includes.append(elem) else: raise ValueError("include items must be of type str or tuple(str,boolean)") for component in partition.components: if isinstance(component.inner, autosar.bsw.com.ComComponent): if self.com_component is None: self.com_component = component else: raise RuntimeError("More than one Com component allowed in a partition")
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def generate(self, dest_dir='.'): if self.os_enable: self.extra_static_vars.update(self.partition.static_vars) self._generate_header(dest_dir, 'RteApi.h') self._generate_source(dest_dir, 'RteApi.c')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _generate_header(self, dest_dir='.', file_name=None): if file_name is None: file_name = 'RteApi.h' self.includes.append((file_name, False)) file_path = os.path.join(dest_dir,file_name) with io.open(file_path, 'w', newline='\n') as fp: header = C.hfile(file_path) self._write_header_includes(header.code) self._write_header_public_func(header.code) for line in header.lines(): fp.write(line) fp.write('\n') fp.write('\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _generate_source(self, dest_dir='.', file_name=None): if file_name is None: file_name = 'RteApi.c' file_path = os.path.join(dest_dir,file_name) with io.open(file_path, 'w', newline='\n') as fp: self._write_includes(fp) self._write_constants_and_typedefs(fp) self._write_local_vars(fp) self._write_public_funcs(fp)
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_includes(self, fp): lines = _genCommentHeader('Includes') fp.write('\n'.join(lines)+'\n') code = C.sequence() for include in self.includes: code.append(C.include(*include)) if self.com_component is not None: code.append(C.include(self.com_component.name+'.h')) if self.os_enable: code.append(C.include('os.h')) fp.write('\n'.join(code.lines())+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_constants_and_typedefs(self, fp): fp.write('\n'.join(_genCommentHeader('Constants and Types'))+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_local_vars(self, fp): fp.write('\n'.join(_genCommentHeader('Local Variables'))+'\n') code = C.sequence() for data_element in sorted(self.partition.data_element_map.values(), key=lambda x: x.symbol): var = C.variable(data_element.symbol, data_element.dataType.name, True) code.append(C.statement(var)) for key in sorted(self.extra_static_vars.keys()): code.append(C.statement(self.extra_static_vars[key])) fp.write('\n'.join(code.lines())+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_public_funcs(self, fp): fp.write('\n'.join(_genCommentHeader('Public Functions'))+'\n') self._write_rte_start(fp) if len(self.partition.upperLayerAPI.read)>0: self._genRead(fp, sorted(self.partition.upperLayerAPI.final['read'], key=lambda x: x.shortname)) if len(self.partition.upperLayerAPI.write)>0: self._genWrite(fp, sorted(self.partition.upperLayerAPI.final['write'], key=lambda x: x.shortname)) if len(self.partition.upperLayerAPI.receive)>0: self._genReceive(fp, sorted(self.partition.upperLayerAPI.final['receive'], key=lambda x: x.shortname)) if len(self.partition.upperLayerAPI.send)>0: self._genSend(fp, sorted(self.partition.upperLayerAPI.final['send'], key=lambda x: x.shortname)) #if len(self.partition.upperLayerAPI.call)>0: # self._genCall(fp, sorted(self.partition.upperLayerAPI.final['call'], key=lambda x: x.shortname)) if len(self.partition.upperLayerAPI.get)>0: self._genGet(fp, sorted(self.partition.upperLayerAPI.final['get'], key=lambda x: x.shortname)) if len(self.partition.upperLayerAPI.setReadData)>0: self._genFunctionBodies(fp, sorted(self.partition.upperLayerAPI.final['setReadData'], key=lambda x: x.shortname)) if len(self.partition.upperLayerAPI.setReadResult)>0: self._genFunctionBodies(fp, sorted(self.partition.upperLayerAPI.setReadResult.values(), key=lambda x: x.shortname)) if self.mode_switch_enable and len(self.partition.mode_switch_functions)>0: self._genFunctionBodies(fp, [self.partition.mode_switch_functions[key] for key in sorted(self.partition.mode_switch_functions.keys())]) if len(self.extra_public_functions)>0: self._genFunctionBodies(fp, [self.extra_public_functions[key] for key in sorted(self.extra_public_functions.keys())])
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_rte_start(self, fp): func = C.function(self.prefix+'_Start', 'void') body = C.block(innerIndent=innerIndentDefault) self._write_init_values(body) if len(self.extra_rte_start)>0: body.extend(self.extra_rte_start) fp.write(str(func)+'\n') fp.write('\n'.join(body.lines())+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_init_values(self, body): for data_element in sorted(self.partition.data_element_map.values(), key=lambda x: x.symbol): if data_element.initValue is not None: init_str = autosar.constant.initializer_string(data_element.initValue) body.code.append(C.statement('%s = %s'%(data_element.symbol, init_str)))
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _genRead(self, fp, prototypes): """Generates all Rte_Read functions""" for port_func in prototypes: body = C.block(innerIndent=innerIndentDefault) if port_func.data_element.com_access['Receive'] is not None: com_func = port_func.data_element.com_access['Receive'] body.code.append(C.statement('return '+str(C.fcall(com_func.name, params=[port_func.proto.args[0].name])))) else: body.code.append(C.statement('*%s = %s'%(port_func.proto.args[0].name, port_func.data_element.symbol))) if port_func.data_element.result_var is not None: body.code.append(C.statement('return %s'%port_func.data_element.result_var.name)) else: body.code.append(C.statement('return RTE_E_OK'))
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _genWrite(self, fp, prototypes): for port_func in prototypes: hasComSignal = False body = C.block(innerIndent=innerIndentDefault) if port_func.data_element.symbol is not None: body.code.append(C.statement('%s = %s'%(port_func.data_element.symbol, port_func.proto.args[0].name))) if port_func.data_element.com_access['Send'] is not None: com_func = port_func.data_element.com_access['Send'] body.code.append(C.statement('return '+str(C.fcall(com_func.name, params=[port_func.proto.args[0].name])))) else: if port_func.data_element.result_var is not None: body.code.append(C.statement('return %s'%port_func.data_element.result_var.name)) else: body.code.append(C.statement('return RTE_E_OK')) fp.write(str(port_func.proto)+'\n') fp.write('\n'.join(body.lines())+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _genReceive(self, fp, prototypes): for proto in prototypes: body = C.block(innerIndent=innerIndentDefault) body.code.append(C.statement('return RTE_E_OK')) fp.write(str(proto.func)+'\n') fp.write('\n'.join(body.lines())+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _genSend(self, fp, prototypes): for proto in prototypes: body = C.block(innerIndent=innerIndentDefault) body.code.append(C.statement('return RTE_E_OK')) fp.write(str(proto.func)+'\n') fp.write('\n'.join(body.lines())+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _genCall(self, fp, prototypes): for proto in prototypes: body = C.block(innerIndent=innerIndentDefault) body.code.append(C.statement('return RTE_E_OK')) fp.write(str(proto.func)+'\n') fp.write('\n'.join(body.lines())+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _genGet(self, fp, prototypes): for port_func in prototypes: body = C.block(innerIndent=innerIndentDefault) prefix = '&' if port_func.data_element.dataType.isComplexType else '' suffix = '[0]' if isinstance(port_func.data_element.dataType, autosar.datatype.ArrayDataType) else '' body.code.append(C.statement('return %s%s%s'%(prefix, port_func.data_element.symbol, suffix))) fp.write(str(port_func.proto)+'\n') fp.write('\n'.join(body.lines())+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _genFunctionBodies(self, fp, prototypes): for func in prototypes: fp.write(str(func.proto)+'\n') fp.write('\n'.join(func.body.lines())+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_header_includes(self, code): code.extend(_genCommentHeader2("INCLUDES")) code.append(C.include('Rte_Type.h')) code.append(C.include('Rte.h'))
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_header_public_func(self, code): code.append('') code.extend(_genCommentHeader2("PUBLIC FUNCTION PROTOTYPES")) code.append(C.statement(C.function('Rte_Start', 'void'))) if self.mode_switch_enable and len(self.partition.mode_switch_functions)>0: for func in [self.partition.mode_switch_functions[key] for key in sorted(self.partition.mode_switch_functions.keys())]: code.append(C.statement(func.proto))
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def __init__(self, partition): self.partition = partition self.useMockedAPI=False
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def generate(self, destdir, mocked=None): if mocked is not None: self.useMockedAPI=bool(mocked) for component in self.partition.components: if not isinstance(component.inner, autosar.bsw.com.ComComponent): with io.open(os.path.join(destdir, 'Rte_%s.h'%component.inner.name), 'w', newline='\n') as fp: self._genComponentHeader(fp, component)
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _genComponentHeader(self, fp, component): ws = component.inner.rootWS() assert(ws is not None) hfile=C.hfile(None, guard='RTE_%s_H'%(component.inner.name.upper())) hfile.code.append(C.include('Rte.h')) hfile.code.append(C.include('Rte_Type.h')) hfile.code.append(C.blank()) lines = self._genInitValues(ws, component.inner.requirePorts+component.inner.providePorts) if len(lines)>0: hfile.code.extend([C.line(x) for x in _genCommentHeader('Init Values')]) hfile.code.extend(lines)
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _genInitValues(self, ws, ports): ports = sorted(ports, key=lambda port: port.name) code = C.sequence() for port in ports: for comspec in port.comspec: if isinstance(comspec, autosar.port.DataElementComSpec): if comspec.initValueRef is not None: initValue = ws.find(comspec.initValueRef) if isinstance(initValue, autosar.constant.Constant): #in case the ref is pointing to a Constant (the parent), grab the child instance using .value initValue=initValue.value if initValue is not None: dataType = ws.find(initValue.typeRef) if dataType is not None: prefix = 'Rte_InitValue_%s_%s'%(port.name, comspec.name) code.extend(self._getInitValue(ws, prefix, initValue, dataType)) return code
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _getInitValue(self, ws, def_name, value, dataType): """ returns a list or sequence """ code = C.sequence() if isinstance(value, autosar.constant.IntegerValue): if dataType.minVal>=0: suffix='u' else: suffix='' code.append(C.define(def_name,'((%s)%s%s)'%(dataType.name, value.value,suffix))) elif isinstance(value, autosar.constant.StringValue): code.append(C.define(def_name,'"%s"'%(value.value))) elif isinstance(value, autosar.constant.BooleanValue): if value.value: text='((boolean) TRUE)' else: text='((boolean) FALSE)' code.append(C.define(def_name,text)) elif isinstance(value, autosar.constant.RecordValue): for element in value.elements: prefix = '%s_%s'%(def_name, element.name) dataType = ws.find(element.typeRef) if dataType is not None: code.extend(self._getInitValue(ws, prefix, element, dataType)) elif isinstance(value, autosar.constant.ArrayValue): pass else: raise NotImplementedError(type(value)) return code
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _writeRunnableProto(self, runnable): lines = [] lines.extend([C.line(x) for x in _genCommentHeader('Runnable %s'%runnable.name)]) lines.append(C.statement(runnable.prototype)) lines.append(C.blank()) return lines
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def __init__(self, partition, api_prefix='Rte', file_prefix = 'MockRte', include=None, mode_switch=False, os_enable=False): super().__init__(partition, api_prefix, include, mode_switch) self.includes.append((file_prefix+'.h', False)) self.api_prefix = api_prefix self.file_prefix = file_prefix self.os_enable = os_enable self.typedefs={} for port in partition.unconnectedPorts(): if isinstance(port, autosar.rte.base.ProvidePort): self._create_port_getter_api(port) else: if len(port.data_elements)>0 or len(port.operations)>0: self._create_port_setter_api(port) self.partition.upperLayerAPI.finalize()
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def generate(self, dest_dir): self._generateHeader(dest_dir) super()._generate_source(dest_dir, self.file_prefix+'.c')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _create_port_getter_api(self, port): component = port.parent for data_element in port.data_elements: if "%s/%s"%(port.name, data_element.name) in component.data_element_port_access: self._create_data_element_getter(component, port, data_element)
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _create_data_element_getter(self, component, port, data_element): data_type = data_element.dataType func_name='%s_GetWriteData_%s_%s_%s'%(self.prefix, component.name, port.name, data_element.name) short_name='%s_GetWriteData_%s_%s'%(self.prefix, port.name, data_element.name) suffix = '*' if data_type.isComplexType else '' proto=C.function(func_name, data_type.name+suffix) rte_func = autosar.rte.base.DataElementFunction(proto, port, data_element) #self._createPortVariable(component, port, data_element) var_name = self._createDataElementVariable(component, port, data_element) self.partition.upperLayerAPI.get[short_name] = autosar.rte.base.GetPortFunction(short_name, proto, data_element)
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _create_port_setter_api(self, port): component = port.parent for data_element in port.data_elements: if "%s/%s"%(port.name, data_element.name) in component.data_element_port_access: self._create_data_element_setter(component, port, data_element) for operation in port.operations: key = "%s/%s"%(port.name, operation.name) if key in component.operation_port_access: self._create_operation_setter(component, port, operation, component.operation_port_access[key])
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _create_data_element_setter(self, component, port, data_element): var_name = self._createDataElementVariable(component, port, data_element) port_func = autosar.rte.base.SetReadDataFunction(self.prefix, component, port, data_element, var_name) self.partition.upperLayerAPI.setReadData[port_func.shortname] = port_func port_func = autosar.rte.base.SetReadResultFunction(self.prefix, component, port, data_element) self.partition.upperLayerAPI.setReadResult[port_func.shortname]=port_func self.extra_static_vars[port_func.static_var.name]=port_func.static_var self.extra_rte_start.append(C.statement('%s = RTE_E_OK'%(data_element.result_var.name)))
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _create_operation_setter(self, component, port, operation, port_access): func_name='%s_SetCallHandler_%s_%s_%s'%(self.prefix, component.name, port.name, operation.name) short_name='%s_SetCallHandler_%s_%s'%(self.prefix, port.name, operation.name)
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _createMockServerCallFunction(self, proto, var_name): body = C.block(innerIndent=innerIndentDefault) body.append(C.line('if (%s != 0)'%(var_name))) inner = C.block(innerIndent=innerIndentDefault) fcall = C.fcall(var_name) for arg in proto.args: fcall.add_param(arg.name) if proto.typename != 'void': inner.append(C.statement('return %s'%str(fcall))) else: inner.append(C.statement(fcall)) body.append(inner) if proto.typename != 'void': body.append(C.statement('return RTE_E_OK')) return body
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _createDataElementVariable(self, component, port, data_element): data_element_map = self.partition.data_element_map variable_name = '_'.join([component.name, port.name, data_element.name]) if variable_name not in data_element_map: data_element.symbol = variable_name data_element_map[variable_name] = data_element return variable_name
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _generateHeader(self, dest_dir): filepath = os.path.join(dest_dir,self.file_prefix+'.h') with io.open(filepath, 'w', newline='\n') as fp: for line in self._createHeaderLines(filepath): fp.write(line) fp.write('\n') fp.write('\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _createHeaderLines(self, filepath): hfile = C.hfile(filepath) code = hfile.code code.extend([C.line(x) for x in _genCommentHeader('Includes')]) code.append(C.include("Std_Types.h")) code.append(C.include("Rte_Type.h")) code.append(C.include("Rte.h")) code.append(C.blank()) code.extend(_genCommentHeader('Constants and Types')) for key in sorted(self.typedefs.keys()): code.append(C.statement(self.typedefs[key])) code.append(C.blank()) code.extend([C.line(x) for x in _genCommentHeader('Public Function Declarations')]) code.append(C.blank())
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def __init__(self, partition, os_cfg, prefix='RteTask', include=None): self.partition = partition self.prefix = prefix self.os_cfg = os_cfg self.includes = [ #array of tuples, first element is the name of include header, second element is True if this is a sysinclude ('stdio.h', True), ('Rte.h', False), ('Rte_Type.h', False), #('%s.h'%self.prefix, False), ('os.h', False), ] for component in self.partition.components: if not isinstance(component.inner, autosar.bsw.com.ComComponent): self.includes.append(('Rte_%s.h'%component.name, False)) if include is not None: for elem in List(include): if isinstance(elem, str): self.includes.append((elem, False)) elif isinstance(elem, tuple): self.includes.append(elem) else: raise ValueError("elem: expected string or tuple, got "+str(type(elem)))
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def generate(self, dest_dir='.'): #self._generate_header(dest_dir) self._generate_source(dest_dir)
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _generate_source(self, dest_dir): file_name = self.prefix+'.c' file_path = os.path.join(dest_dir,file_name)
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_source_includes(self): code = C.sequence() code.extend(_genCommentHeader2('INCLUDES')) code.append(C.blank()) for include in self.includes: code.append(C.include(*include)) code.append(C.blank()) return code
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_source_constants_and_typedefs(self): code = C.sequence() code.extend(_genCommentHeader2('CONSTANTS AND DATA TYPES')) code.append(C.blank()) return code
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_source_local_funcs(self): code = C.sequence() code.extend(_genCommentHeader2('LOCAL FUNCTION PROTOTYPES')) code.append(C.blank()) return code
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_source_global_funcs(self): code = C.sequence() code.extend(_genCommentHeader2('GLOBAL FUNCTIONS')) code.append(C.blank()) for task in sorted(self.os_cfg.tasks, key=lambda x: x.name): code.append(C.line('OS_TASK_HANDLER({0.name}, arg)'.format(task))) code.append(self._generate_task_body(task)) code.append(C.blank(2)) return code
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _generate_task_body(self, task): code = C.block(innerIndent=innerIndentDefault) isRunning=C.variable('isRunning', 'boolean') code.append(C.statement('{0} = TRUE'.format(str(isRunning)))) code.append(C.statement('os_task_t *self = (os_task_t*)arg')) code.append('') code.append(C.line('if (self == 0)')) body = C.block(innerIndent=innerIndentDefault) body.append(C.statement('THREAD_RETURN(1)')) code.append(body) code.append('') code.append(C.line('while (isRunning == TRUE)'))
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _generate_event_mask_triggers(self, task): code = C.sequence() for runnable in task.runnables: if runnable.processed: continue matching_runnables = self._find_compatible_runnables(task, runnable) self._generate_runnable_calls(code, matching_runnables) for matching in matching_runnables: matching.processed=True return code
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _find_compatible_runnables(self, task, current): result = [current] for other in task.runnables: if (other is not current) and (not other.processed): if len(current.event_triggers) == len(other.event_triggers): is_compatible = True for current_event in current.event_triggers: found = False for other_event in other.event_triggers: if current_event.symbol == other_event.symbol: found = True break if not found: is_compatible = False break if is_compatible: result.append(other) return result
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _generate_runnable_calls(self, code, matching_runnables): events = matching_runnables[0].event_triggers if len(events) == 1: event = events[0] if not isinstance(event, autosar.rte.base.OperationInvokedEvent): code.append(C.line('if (eventMask & %s)'%event.symbol)) block = C.block(innerIndent = innerIndentDefault) for runnable in matching_runnables: block.append(C.statement(C.fcall(runnable.symbol))) code.append(block) elif len(events) > 1: raise NotImplementedError('multiple events')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _generate_header(self, dest_dir): file_name = self.prefix+'.h' file_path = os.path.join(dest_dir,file_name) with io.open(file_path, 'w', newline='\n') as fp: print("#ifndef RTE_TASK_H", file=fp) print("#define RTE_TASK_H", file=fp) self._write_header_includes(fp) self._write_header_constants_and_typedefs(fp) self._write_header_global_var(fp) self._write_header_global_proto(fp) print("#endif //RTE_TASK_H", file=fp)
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_header_includes(self, fp): lines = _genCommentHeader('INCLUDES') lines.append('#ifdef _MSC_VER') lines.append('#include <Windows.h>') lines.append('#else') lines.append('#include <pthread.h>') lines.append('#endif //_MSC_VER') lines.append('#include "osmacro.h"') fp.write('\n'.join(lines)+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_header_constants_and_typedefs(self, fp): lines = _genCommentHeader('CONSTANTS AND DATA TYPES') fp.write('\n'.join(lines)+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def _write_header_global_var(self, fp): lines = _genCommentHeader('GLOBAL VARIABLES') fp.write('\n'.join(lines)+'\n\n')
cogu/autosar
[ 233, 130, 233, 19, 1469005526 ]
def __init__(self, editwin): self.editwin = editwin
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def format_paragraph_event(self, event, limit=None): """Formats paragraph to a max width specified in idleConf. If text is selected, format_paragraph_event will start breaking lines at the max width, starting from the beginning selection. If no text is selected, format_paragraph_event uses the current cursor location to determine the paragraph (lines of text surrounded by blank lines) and formats it. The length limit parameter is for testing with a known value. """ if limit is None: # The default length limit is that defined by pep8 limit = idleConf.GetOption( 'extensions', 'FormatParagraph', 'max-width', type='int', default=72) text = self.editwin.text first, last = self.editwin.get_selection_indices() if first and last: data = text.get(first, last) comment_header = get_comment_header(data) else: first, last, comment_header, data = \ find_paragraph(text, text.index("insert")) if comment_header: newdata = reformat_comment(data, limit, comment_header) else: newdata = reformat_paragraph(data, limit) text.tag_remove("sel", "1.0", "end") if newdata != data: text.mark_set("insert", first) text.undo_block_start() text.delete(first, last) text.insert(first, newdata) text.undo_block_stop() else: text.mark_set("insert", last) text.see("insert") return "break"
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def reformat_paragraph(data, limit): """Return data reformatted to specified width (limit).""" lines = data.split("\n") i = 0 n = len(lines) while i < n and is_all_white(lines[i]): i = i+1 if i >= n: return data indent1 = get_indent(lines[i]) if i+1 < n and not is_all_white(lines[i+1]): indent2 = get_indent(lines[i+1]) else: indent2 = indent1 new = lines[:i] partial = indent1 while i < n and not is_all_white(lines[i]): # XXX Should take double space after period (etc.) into account words = re.split("(\s+)", lines[i]) for j in range(0, len(words), 2): word = words[j] if not word: continue # Can happen when line ends in whitespace if len((partial + word).expandtabs()) > limit and \ partial != indent1: new.append(partial.rstrip()) partial = indent2 partial = partial + word + " " if j+1 < len(words) and words[j+1] != " ": partial = partial + " " i = i+1 new.append(partial.rstrip()) # XXX Should reformat remaining paragraphs as well new.extend(lines[i:]) return "\n".join(new)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def is_all_white(line): """Return True if line is empty or all whitespace.""" return re.match(r"^\s*$", line) is not None
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def get_comment_header(line): """Return string with leading whitespace and '#' from line or ''. A null return indicates that the line is not a comment line. A non- null return, such as ' #', will be used to find the other lines of a comment block with the same indent. """ m = re.match(r"^([ \t]*#*)", line) if m is None: return "" return m.group(1)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def __init__(self, editwin): self.editwin = editwin
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def format_paragraph_event(self, event, limit=None): """Formats paragraph to a max width specified in idleConf. If text is selected, format_paragraph_event will start breaking lines at the max width, starting from the beginning selection. If no text is selected, format_paragraph_event uses the current cursor location to determine the paragraph (lines of text surrounded by blank lines) and formats it. The length limit parameter is for testing with a known value. """ if limit is None: # The default length limit is that defined by pep8 limit = idleConf.GetOption( 'extensions', 'FormatParagraph', 'max-width', type='int', default=72) text = self.editwin.text first, last = self.editwin.get_selection_indices() if first and last: data = text.get(first, last) comment_header = get_comment_header(data) else: first, last, comment_header, data = \ find_paragraph(text, text.index("insert")) if comment_header: newdata = reformat_comment(data, limit, comment_header) else: newdata = reformat_paragraph(data, limit) text.tag_remove("sel", "1.0", "end") if newdata != data: text.mark_set("insert", first) text.undo_block_start() text.delete(first, last) text.insert(first, newdata) text.undo_block_stop() else: text.mark_set("insert", last) text.see("insert") return "break"
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def reformat_paragraph(data, limit): """Return data reformatted to specified width (limit).""" lines = data.split("\n") i = 0 n = len(lines) while i < n and is_all_white(lines[i]): i = i+1 if i >= n: return data indent1 = get_indent(lines[i]) if i+1 < n and not is_all_white(lines[i+1]): indent2 = get_indent(lines[i+1]) else: indent2 = indent1 new = lines[:i] partial = indent1 while i < n and not is_all_white(lines[i]): # XXX Should take double space after period (etc.) into account words = re.split("(\s+)", lines[i]) for j in range(0, len(words), 2): word = words[j] if not word: continue # Can happen when line ends in whitespace if len((partial + word).expandtabs()) > limit and \ partial != indent1: new.append(partial.rstrip()) partial = indent2 partial = partial + word + " " if j+1 < len(words) and words[j+1] != " ": partial = partial + " " i = i+1 new.append(partial.rstrip()) # XXX Should reformat remaining paragraphs as well new.extend(lines[i:]) return "\n".join(new)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def is_all_white(line): """Return True if line is empty or all whitespace.""" return re.match(r"^\s*$", line) is not None
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def get_comment_header(line): """Return string with leading whitespace and '#' from line or ''. A null return indicates that the line is not a comment line. A non- null return, such as ' #', will be used to find the other lines of a comment block with the same indent. """ m = re.match(r"^([ \t]*#*)", line) if m is None: return "" return m.group(1)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def __init__(self, editwin): self.editwin = editwin
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def format_paragraph_event(self, event, limit=None): """Formats paragraph to a max width specified in idleConf. If text is selected, format_paragraph_event will start breaking lines at the max width, starting from the beginning selection. If no text is selected, format_paragraph_event uses the current cursor location to determine the paragraph (lines of text surrounded by blank lines) and formats it. The length limit parameter is for testing with a known value. """ if limit is None: # The default length limit is that defined by pep8 limit = idleConf.GetOption( 'extensions', 'FormatParagraph', 'max-width', type='int', default=72) text = self.editwin.text first, last = self.editwin.get_selection_indices() if first and last: data = text.get(first, last) comment_header = get_comment_header(data) else: first, last, comment_header, data = \ find_paragraph(text, text.index("insert")) if comment_header: newdata = reformat_comment(data, limit, comment_header) else: newdata = reformat_paragraph(data, limit) text.tag_remove("sel", "1.0", "end") if newdata != data: text.mark_set("insert", first) text.undo_block_start() text.delete(first, last) text.insert(first, newdata) text.undo_block_stop() else: text.mark_set("insert", last) text.see("insert") return "break"
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def reformat_paragraph(data, limit): """Return data reformatted to specified width (limit).""" lines = data.split("\n") i = 0 n = len(lines) while i < n and is_all_white(lines[i]): i = i+1 if i >= n: return data indent1 = get_indent(lines[i]) if i+1 < n and not is_all_white(lines[i+1]): indent2 = get_indent(lines[i+1]) else: indent2 = indent1 new = lines[:i] partial = indent1 while i < n and not is_all_white(lines[i]): # XXX Should take double space after period (etc.) into account words = re.split("(\s+)", lines[i]) for j in range(0, len(words), 2): word = words[j] if not word: continue # Can happen when line ends in whitespace if len((partial + word).expandtabs()) > limit and \ partial != indent1: new.append(partial.rstrip()) partial = indent2 partial = partial + word + " " if j+1 < len(words) and words[j+1] != " ": partial = partial + " " i = i+1 new.append(partial.rstrip()) # XXX Should reformat remaining paragraphs as well new.extend(lines[i:]) return "\n".join(new)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def is_all_white(line): """Return True if line is empty or all whitespace.""" return re.match(r"^\s*$", line) is not None
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def get_comment_header(line): """Return string with leading whitespace and '#' from line or ''. A null return indicates that the line is not a comment line. A non- null return, such as ' #', will be used to find the other lines of a comment block with the same indent. """ m = re.match(r"^([ \t]*#*)", line) if m is None: return "" return m.group(1)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def tearDown(self): try: os.unlink(support.TESTFN) except: pass
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_create(self): pl = self._create() self.assertEqual(pl["aString"], "Doodah") self.assertEqual(pl["aDict"]["aFalseValue"], False)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_invalid_type(self): pl = [ object() ] for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): self.assertRaises(TypeError, plistlib.dumps, pl, fmt=fmt)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_bytes(self): pl = self._create() data = plistlib.dumps(pl) pl2 = plistlib.loads(data) self.assertNotIsInstance(pl, plistlib._InternalDict) self.assertEqual(dict(pl), dict(pl2)) data2 = plistlib.dumps(pl2) self.assertEqual(data, data2)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_indentation_dict(self): data = {'1': {'2': {'3': {'4': {'5': {'6': {'7': {'8': {'9': b'aaaaaa'}}}}}}}}} self.assertEqual(plistlib.loads(plistlib.dumps(data)), data)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_appleformatting(self): for use_builtin_types in (True, False): for fmt in ALL_FORMATS: with self.subTest(fmt=fmt, use_builtin_types=use_builtin_types): pl = plistlib.loads(TESTDATA[fmt], use_builtin_types=use_builtin_types) data = plistlib.dumps(pl, fmt=fmt) self.assertEqual(data, TESTDATA[fmt], "generated data was not identical to Apple's output")
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_bytesio(self): for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): b = BytesIO() pl = self._create(fmt=fmt) plistlib.dump(pl, b, fmt=fmt) pl2 = plistlib.load(BytesIO(b.getvalue()), fmt=fmt) self.assertEqual(dict(pl), dict(pl2)) pl2 = plistlib.load(BytesIO(b.getvalue())) self.assertEqual(dict(pl), dict(pl2))
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_keysort(self): pl = collections.OrderedDict() pl['b'] = 1 pl['a'] = 2 pl['c'] = 3 for fmt in ALL_FORMATS: for sort_keys in (False, True): with self.subTest(fmt=fmt, sort_keys=sort_keys): data = plistlib.dumps(pl, fmt=fmt, sort_keys=sort_keys) pl2 = plistlib.loads(data, dict_type=collections.OrderedDict) self.assertEqual(dict(pl), dict(pl2)) if sort_keys: self.assertEqual(list(pl2.keys()), ['a', 'b', 'c']) else: self.assertEqual(list(pl2.keys()), ['b', 'a', 'c'])
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_skipkeys(self): pl = { 42: 'aNumber', 'snake': 'aWord', } for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): data = plistlib.dumps( pl, fmt=fmt, skipkeys=True, sort_keys=False) pl2 = plistlib.loads(data) self.assertEqual(pl2, {'snake': 'aWord'}) fp = BytesIO() plistlib.dump( pl, fp, fmt=fmt, skipkeys=True, sort_keys=False) data = fp.getvalue() pl2 = plistlib.loads(fp.getvalue()) self.assertEqual(pl2, {'snake': 'aWord'})
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_list_members(self): pl = { 'first': [1, 2], 'second': [1, 2], 'third': [3, 4], } for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): data = plistlib.dumps(pl, fmt=fmt) pl2 = plistlib.loads(data) self.assertEqual(pl2, { 'first': [1, 2], 'second': [1, 2], 'third': [3, 4], }) self.assertIsNot(pl2['first'], pl2['second'])
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_controlcharacters(self): for i in range(128): c = chr(i) testString = "string containing %s" % c if i >= 32 or c in "\r\n\t": # \r, \n and \t are the only legal control chars in XML plistlib.dumps(testString, fmt=plistlib.FMT_XML) else: self.assertRaises(ValueError, plistlib.dumps, testString)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_invalidarray(self): for i in ["<key>key inside an array</key>", "<key>key inside an array2</key><real>3</real>", "<true/><key>key inside an array3</key>"]: self.assertRaises(ValueError, plistlib.loads, ("<plist><array>%s</array></plist>"%i).encode())
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_invalidinteger(self): self.assertRaises(ValueError, plistlib.loads, b"<plist><integer>not integer</integer></plist>")
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_xml_encodings(self): base = TESTDATA[plistlib.FMT_XML] for xml_encoding, encoding, bom in [ (b'utf-8', 'utf-8', codecs.BOM_UTF8), (b'utf-16', 'utf-16-le', codecs.BOM_UTF16_LE), (b'utf-16', 'utf-16-be', codecs.BOM_UTF16_BE), # Expat does not support UTF-32 #(b'utf-32', 'utf-32-le', codecs.BOM_UTF32_LE), #(b'utf-32', 'utf-32-be', codecs.BOM_UTF32_BE), ]: pl = self._create(fmt=plistlib.FMT_XML) with self.subTest(encoding=encoding): data = base.replace(b'UTF-8', xml_encoding) data = bom + data.decode('utf-8').encode(encoding) pl2 = plistlib.loads(data) self.assertEqual(dict(pl), dict(pl2))
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_io_deprecated(self): pl_in = { 'key': 42, 'sub': { 'key': 9, 'alt': 'value', 'data': b'buffer', } } pl_out = plistlib._InternalDict({ 'key': 42, 'sub': plistlib._InternalDict({ 'key': 9, 'alt': 'value', 'data': plistlib.Data(b'buffer'), }) }) self.addCleanup(support.unlink, support.TESTFN) with self.assertWarns(DeprecationWarning): plistlib.writePlist(pl_in, support.TESTFN) with self.assertWarns(DeprecationWarning): pl2 = plistlib.readPlist(support.TESTFN) self.assertEqual(pl_out, pl2) os.unlink(support.TESTFN) with open(support.TESTFN, 'wb') as fp: with self.assertWarns(DeprecationWarning): plistlib.writePlist(pl_in, fp) with open(support.TESTFN, 'rb') as fp: with self.assertWarns(DeprecationWarning): pl2 = plistlib.readPlist(fp) self.assertEqual(pl_out, pl2)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_dataobject_deprecated(self): in_data = { 'key': plistlib.Data(b'hello') } out_data = { 'key': b'hello' } buf = plistlib.dumps(in_data) cur = plistlib.loads(buf) self.assertEqual(cur, out_data) self.assertNotEqual(cur, in_data) cur = plistlib.loads(buf, use_builtin_types=False) self.assertNotEqual(cur, out_data) self.assertEqual(cur, in_data) with self.assertWarns(DeprecationWarning): cur = plistlib.readPlistFromBytes(buf) self.assertNotEqual(cur, out_data) self.assertEqual(cur, in_data)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def tearDown(self): try: os.unlink(support.TESTFN) except: pass
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_create(self): pl = self._create() self.assertEqual(pl["aString"], "Doodah") self.assertEqual(pl["aDict"]["aFalseValue"], False)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_invalid_type(self): pl = [ object() ] for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): self.assertRaises(TypeError, plistlib.dumps, pl, fmt=fmt)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_bytes(self): pl = self._create() data = plistlib.dumps(pl) pl2 = plistlib.loads(data) self.assertNotIsInstance(pl, plistlib._InternalDict) self.assertEqual(dict(pl), dict(pl2)) data2 = plistlib.dumps(pl2) self.assertEqual(data, data2)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_indentation_dict(self): data = {'1': {'2': {'3': {'4': {'5': {'6': {'7': {'8': {'9': b'aaaaaa'}}}}}}}}} self.assertEqual(plistlib.loads(plistlib.dumps(data)), data)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_appleformatting(self): for use_builtin_types in (True, False): for fmt in ALL_FORMATS: with self.subTest(fmt=fmt, use_builtin_types=use_builtin_types): pl = plistlib.loads(TESTDATA[fmt], use_builtin_types=use_builtin_types) data = plistlib.dumps(pl, fmt=fmt) self.assertEqual(data, TESTDATA[fmt], "generated data was not identical to Apple's output")
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_bytesio(self): for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): b = BytesIO() pl = self._create(fmt=fmt) plistlib.dump(pl, b, fmt=fmt) pl2 = plistlib.load(BytesIO(b.getvalue()), fmt=fmt) self.assertEqual(dict(pl), dict(pl2)) pl2 = plistlib.load(BytesIO(b.getvalue())) self.assertEqual(dict(pl), dict(pl2))
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_keysort(self): pl = collections.OrderedDict() pl['b'] = 1 pl['a'] = 2 pl['c'] = 3 for fmt in ALL_FORMATS: for sort_keys in (False, True): with self.subTest(fmt=fmt, sort_keys=sort_keys): data = plistlib.dumps(pl, fmt=fmt, sort_keys=sort_keys) pl2 = plistlib.loads(data, dict_type=collections.OrderedDict) self.assertEqual(dict(pl), dict(pl2)) if sort_keys: self.assertEqual(list(pl2.keys()), ['a', 'b', 'c']) else: self.assertEqual(list(pl2.keys()), ['b', 'a', 'c'])
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_skipkeys(self): pl = { 42: 'aNumber', 'snake': 'aWord', } for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): data = plistlib.dumps( pl, fmt=fmt, skipkeys=True, sort_keys=False) pl2 = plistlib.loads(data) self.assertEqual(pl2, {'snake': 'aWord'}) fp = BytesIO() plistlib.dump( pl, fp, fmt=fmt, skipkeys=True, sort_keys=False) data = fp.getvalue() pl2 = plistlib.loads(fp.getvalue()) self.assertEqual(pl2, {'snake': 'aWord'})
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_list_members(self): pl = { 'first': [1, 2], 'second': [1, 2], 'third': [3, 4], } for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): data = plistlib.dumps(pl, fmt=fmt) pl2 = plistlib.loads(data) self.assertEqual(pl2, { 'first': [1, 2], 'second': [1, 2], 'third': [3, 4], }) self.assertIsNot(pl2['first'], pl2['second'])
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_controlcharacters(self): for i in range(128): c = chr(i) testString = "string containing %s" % c if i >= 32 or c in "\r\n\t": # \r, \n and \t are the only legal control chars in XML plistlib.dumps(testString, fmt=plistlib.FMT_XML) else: self.assertRaises(ValueError, plistlib.dumps, testString)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_invalidarray(self): for i in ["<key>key inside an array</key>", "<key>key inside an array2</key><real>3</real>", "<true/><key>key inside an array3</key>"]: self.assertRaises(ValueError, plistlib.loads, ("<plist><array>%s</array></plist>"%i).encode())
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_invalidinteger(self): self.assertRaises(ValueError, plistlib.loads, b"<plist><integer>not integer</integer></plist>")
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_xml_encodings(self): base = TESTDATA[plistlib.FMT_XML] for xml_encoding, encoding, bom in [ (b'utf-8', 'utf-8', codecs.BOM_UTF8), (b'utf-16', 'utf-16-le', codecs.BOM_UTF16_LE), (b'utf-16', 'utf-16-be', codecs.BOM_UTF16_BE), # Expat does not support UTF-32 #(b'utf-32', 'utf-32-le', codecs.BOM_UTF32_LE), #(b'utf-32', 'utf-32-be', codecs.BOM_UTF32_BE), ]: pl = self._create(fmt=plistlib.FMT_XML) with self.subTest(encoding=encoding): data = base.replace(b'UTF-8', xml_encoding) data = bom + data.decode('utf-8').encode(encoding) pl2 = plistlib.loads(data) self.assertEqual(dict(pl), dict(pl2))
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_io_deprecated(self): pl_in = { 'key': 42, 'sub': { 'key': 9, 'alt': 'value', 'data': b'buffer', } } pl_out = plistlib._InternalDict({ 'key': 42, 'sub': plistlib._InternalDict({ 'key': 9, 'alt': 'value', 'data': plistlib.Data(b'buffer'), }) }) self.addCleanup(support.unlink, support.TESTFN) with self.assertWarns(DeprecationWarning): plistlib.writePlist(pl_in, support.TESTFN) with self.assertWarns(DeprecationWarning): pl2 = plistlib.readPlist(support.TESTFN) self.assertEqual(pl_out, pl2) os.unlink(support.TESTFN) with open(support.TESTFN, 'wb') as fp: with self.assertWarns(DeprecationWarning): plistlib.writePlist(pl_in, fp) with open(support.TESTFN, 'rb') as fp: with self.assertWarns(DeprecationWarning): pl2 = plistlib.readPlist(fp) self.assertEqual(pl_out, pl2)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_dataobject_deprecated(self): in_data = { 'key': plistlib.Data(b'hello') } out_data = { 'key': b'hello' } buf = plistlib.dumps(in_data) cur = plistlib.loads(buf) self.assertEqual(cur, out_data) self.assertNotEqual(cur, in_data) cur = plistlib.loads(buf, use_builtin_types=False) self.assertNotEqual(cur, out_data) self.assertEqual(cur, in_data) with self.assertWarns(DeprecationWarning): cur = plistlib.readPlistFromBytes(buf) self.assertNotEqual(cur, out_data) self.assertEqual(cur, in_data)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def tearDown(self): try: os.unlink(support.TESTFN) except: pass
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_create(self): pl = self._create() self.assertEqual(pl["aString"], "Doodah") self.assertEqual(pl["aDict"]["aFalseValue"], False)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]