_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q3500
Asm.argval
train
def argval(self): """ Solve args values or raise errors if not defined yet """ if gl.has_errors: return [None] if self.asm in ('DEFB', 'DEFS', 'DEFW'): return tuple([x.eval() if isinstance(x, Expr) else x for x in self.arg]) self.arg = tuple([x if not isinstance(x, Expr) else x.eval() for x in self.arg]) if gl.has_errors: return [None] if self.asm.split(' ')[0] in ('JR', 'DJNZ'): # A relative jump? if self.arg[0] < -128 or self.arg[0] > 127: error(self.lineno, 'Relative jump out of range') return [None] return super(Asm, self).argval()
python
{ "resource": "" }
q3501
Expr.eval
train
def eval(self): """ Recursively evals the node. Exits with an error if not resolved. """ Expr.ignore = False result = self.try_eval() Expr.ignore = True return result
python
{ "resource": "" }
q3502
Expr.try_eval
train
def try_eval(self): """ Recursively evals the node. Returns None if it is still unresolved. """ item = self.symbol.item if isinstance(item, int): return item if isinstance(item, Label): if item.defined: if isinstance(item.value, Expr): return item.value.try_eval() else: return item.value else: if Expr.ignore: return None # Try to resolve into the global namespace error(self.symbol.lineno, "Undefined label '%s'" % item.name) return None try: if isinstance(item, tuple): return tuple([x.try_eval() for x in item]) if isinstance(item, list): return [x.try_eval() for x in item] if item == '-' and len(self.children) == 1: return -self.left.try_eval() try: return self.funct[item](self.left.try_eval(), self.right.try_eval()) except ZeroDivisionError: error(self.symbol.lineno, 'Division by 0') except KeyError: pass except TypeError: pass return None
python
{ "resource": "" }
q3503
Label.define
train
def define(self, value, lineno, namespace=None): """ Defines label value. It can be anything. Even an AST """ if self.defined: error(lineno, "label '%s' already defined at line %i" % (self.name, self.lineno)) self.value = value self.lineno = lineno self.namespace = NAMESPACE if namespace is None else namespace
python
{ "resource": "" }
q3504
Memory.set_org
train
def set_org(self, value, lineno): """ Sets a new ORG value """ if value < 0 or value > MAX_MEM: error(lineno, "Memory ORG out of range [0 .. 65535]. Current value: %i" % value) self.index = self.ORG = value
python
{ "resource": "" }
q3505
Memory.id_name
train
def id_name(label, namespace=None): """ Given a name and a namespace, resolves returns the name as namespace + '.' + name. If namespace is none, the current NAMESPACE is used """ if not label.startswith(DOT): if namespace is None: namespace = NAMESPACE ex_label = namespace + label # The mangled namespace.labelname label else: if namespace is None: namespace = GLOBAL_NAMESPACE # Global namespace ex_label = label return ex_label, namespace
python
{ "resource": "" }
q3506
Memory.__set_byte
train
def __set_byte(self, byte, lineno): """ Sets a byte at the current location, and increments org in one. Raises an error if org > MAX_MEMORY """ if byte < 0 or byte > 255: error(lineno, 'Invalid byte value %i' % byte) self.memory_bytes[self.org] = byte self.index += 1
python
{ "resource": "" }
q3507
Memory.exit_proc
train
def exit_proc(self, lineno): """ Exits current procedure. Local labels are transferred to global scope unless they have been marked as local ones. Raises an error if no current local context (stack underflow) """ __DEBUG__('Exiting current scope from lineno %i' % lineno) if len(self.local_labels) <= 1: error(lineno, 'ENDP in global scope (with no PROC)') return for label in self.local_labels[-1].values(): if label.local: if not label.defined: error(lineno, "Undefined LOCAL label '%s'" % label.name) return continue name = label.name _lineno = label.lineno value = label.value if name not in self.global_labels.keys(): self.global_labels[name] = label else: self.global_labels[name].define(value, _lineno) self.local_labels.pop() # Removes current context self.scopes.pop()
python
{ "resource": "" }
q3508
Memory.dump
train
def dump(self): """ Returns a tuple containing code ORG, and a list of OUTPUT """ org = min(self.memory_bytes.keys()) # Org is the lowest one OUTPUT = [] align = [] for i in range(org, max(self.memory_bytes.keys()) + 1): if gl.has_errors: return org, OUTPUT try: try: a = [x for x in self.orgs[i] if isinstance(x, Asm)] # search for asm instructions if not a: align.append(0) # Fill with ZEROes not used memory regions continue OUTPUT += align align = [] a = a[0] if a.pending: a.arg = a.argval() a.pending = False tmp = a.bytes() for r in range(len(tmp)): self.memory_bytes[i + r] = tmp[r] except KeyError: pass OUTPUT.append(self.memory_bytes[i]) except KeyError: OUTPUT.append(0) # Fill with ZEROes not used memory regions return org, OUTPUT
python
{ "resource": "" }
q3509
Memory.get_label
train
def get_label(self, label, lineno): """ Returns a label in the current context or in the global one. If the label does not exists, creates a new one and returns it. """ global NAMESPACE ex_label, namespace = Memory.id_name(label) for i in range(len(self.local_labels) - 1, -1, -1): # Downstep result = self.local_labels[i].get(ex_label, None) if result is not None: return result result = Label(ex_label, lineno, namespace=namespace) self.local_labels[-1][ex_label] = result # HINT: no namespace return result
python
{ "resource": "" }
q3510
_addr
train
def _addr(value): ''' Common subroutine for emitting array address ''' output = [] try: indirect = False if value[0] == '*': indirect = True value = value[1:] value = int(value) & 0xFFFF if indirect: output.append('ld hl, (%s)' % str(value)) else: output.append('ld hl, %s' % str(value)) except ValueError: if value[0] == '_': output.append('ld hl, %s' % str(value)) if indirect: output.append('ld c, (hl)') output.append('inc hl') output.append('ld h, (hl)') output.append('ld l, c') else: output.append('pop hl') if indirect: output.append('ld c, (hl)') output.append('inc hl') output.append('ld h, (hl)') output.append('ld l, c') output.append('call __ARRAY') REQUIRES.add('array.asm') return output
python
{ "resource": "" }
q3511
SymbolARGLIST.make_node
train
def make_node(cls, node, *args): """ This will return a node with an argument_list. """ if node is None: node = cls() assert isinstance(node, SymbolARGUMENT) or isinstance(node, cls) if not isinstance(node, cls): return cls.make_node(None, node, *args) for arg in args: assert isinstance(arg, SymbolARGUMENT) node.appendChild(arg) return node
python
{ "resource": "" }
q3512
_mul32
train
def _mul32(ins): """ Multiplies two last 32bit values on top of the stack and and returns the value on top of the stack Optimizations done: * If any operand is 1, do nothing * If any operand is 0, push 0 """ op1, op2 = tuple(ins.quad[2:]) if _int_ops(op1, op2): op1, op2 = _int_ops(op1, op2) output = _32bit_oper(op1) if op2 == 1: output.append('push de') output.append('push hl') return output # A * 1 = Nothing if op2 == 0: output.append('ld hl, 0') output.append('push hl') output.append('push hl') return output output = _32bit_oper(op1, op2) output.append('call __MUL32') # Inmmediate output.append('push de') output.append('push hl') REQUIRES.add('mul32.asm') return output
python
{ "resource": "" }
q3513
_shl32
train
def _shl32(ins): """ Logical Left shift 32bit unsigned integers. The result is pushed onto the stack. Optimizations: * If 2nd operand is 0, do nothing """ op1, op2 = tuple(ins.quad[2:]) if is_int(op2): output = _32bit_oper(op1) if int(op2) == 0: output.append('push de') output.append('push hl') return output if int(op2) > 1: label = tmp_label() output.append('ld b, %s' % op2) output.append('%s:' % label) output.append('call __SHL32') output.append('djnz %s' % label) else: output.append('call __SHL32') output.append('push de') output.append('push hl') REQUIRES.add('shl32.asm') return output output = _8bit_oper(op2) output.append('ld b, a') output.extend(_32bit_oper(op1)) label = tmp_label() output.append('%s:' % label) output.append('call __SHL32') output.append('djnz %s' % label) output.append('push de') output.append('push hl') REQUIRES.add('shl32.asm') return output
python
{ "resource": "" }
q3514
_float_oper
train
def _float_oper(op1, op2=None): ''' Returns pop sequence for floating point operands 1st operand in A DE BC, 2nd operand remains in the stack Unlike 8bit and 16bit version, this does not supports operands inversion. Since many of the instructions are implemented as functions, they must support this. However, if 1st operand is a number (immediate) or indirect, the stack will be rearranged, so it contains a 48 bit pushed parameter value for the subroutine to be called. ''' output = [] op = op2 if op2 is not None else op1 indirect = (op[0] == '*') if indirect: op = op[1:] if is_float(op): op = float(op) if indirect: op = int(op) & 0xFFFF output.append('ld hl, (%i)' % op) output.append('call __ILOADF') REQUIRES.add('iloadf.asm') else: A, DE, BC = _float(op) output.append('ld a, %s' % A) output.append('ld de, %s' % DE) output.append('ld bc, %s' % BC) else: if indirect: if op[0] == '_': output.append('ld hl, (%s)' % op) else: output.append('pop hl') output.append('call __ILOADF') REQUIRES.add('iloadf.asm') else: if op[0] == '_': output.append('ld a, (%s)' % op) output.append('ld de, (%s + 1)' % op) output.append('ld bc, (%s + 3)' % op) else: output.extend(_fpop()) if op2 is not None: op = op1 if is_float(op): # An float must be in the stack. Let's pushit A, DE, BC = _float(op) output.append('ld hl, %s' % BC) output.append('push hl') output.append('ld hl, %s' % DE) output.append('push hl') output.append('ld h, %s' % A) output.append('push hl') elif op[0] == '*': # Indirect op = op[1:] output.append('exx') # uses alternate set to put it on the stack output.append("ex af, af'") if is_int(op): # noqa TODO: it will fail op = int(op) output.append('ld hl, %i' % op) elif op[0] == '_': output.append('ld hl, (%s)' % op) else: output.append('pop hl') output.append('call __ILOADF') output.extend(_fpush()) output.append("ex af, af'") output.append('exx') REQUIRES.add('iloadf.asm') elif op[0] == '_': if is_float(op2): tmp = output output = [] output.append('ld hl, %s + 4' % op) ''' output.append('ld hl, (%s + 3)' % op) output.append('push hl') output.append('ld hl, (%s + 1)' % op) output.append('push hl') output.append('ld a, (%s)' % op) output.append('push af') ''' output.append('call __FP_PUSH_REV') output.extend(tmp) REQUIRES.add('pushf.asm') else: ''' output.append('ld hl, (%s + 3)' % op) output.append('push hl') output.append('ld hl, (%s + 1)' % op) output.append('push hl') output.append('ld hl, (%s - 1)' % op) output.append('push hl') ''' output.append('ld hl, %s + 4' % op) output.append('call __FP_PUSH_REV') REQUIRES.add('pushf.asm') else: pass # Else do nothing, and leave the op onto the stack return output
python
{ "resource": "" }
q3515
_addf
train
def _addf(ins): ''' Adds 2 float values. The result is pushed onto the stack. ''' op1, op2 = tuple(ins.quad[2:]) if _f_ops(op1, op2) is not None: opa, opb = _f_ops(op1, op2) if opb == 0: # A + 0 => A output = _float_oper(opa) output.extend(_fpush()) return output output = _float_oper(op1, op2) output.append('call __ADDF') output.extend(_fpush()) REQUIRES.add('addf.asm') return output
python
{ "resource": "" }
q3516
_divf
train
def _divf(ins): ''' Divides 2 float values. The result is pushed onto the stack. ''' op1, op2 = tuple(ins.quad[2:]) if is_float(op2) and float(op2) == 1: # Nothing to do. A / 1 = A output = _float_oper(op1) output.extend(_fpush()) return output output = _float_oper(op1, op2) output.append('call __DIVF') output.extend(_fpush()) REQUIRES.add('divf.asm') return output
python
{ "resource": "" }
q3517
_modf
train
def _modf(ins): ''' Reminder of div. 2 float values. The result is pushed onto the stack. ''' op1, op2 = tuple(ins.quad[2:]) output = _float_oper(op1, op2) output.append('call __MODF') output.extend(_fpush()) REQUIRES.add('modf.asm') return output
python
{ "resource": "" }
q3518
get_uniques
train
def get_uniques(l): """ Returns a list with no repeated elements. """ result = [] for i in l: if i not in result: result.append(i) return result
python
{ "resource": "" }
q3519
SymbolARGUMENT.typecast
train
def typecast(self, type_): """ Test type casting to the argument expression. On success changes the node value to the new typecast, and returns True. On failure, returns False, and the node value is set to None. """ self.value = SymbolTYPECAST.make_node(type_, self.value, self.lineno) return self.value is not None
python
{ "resource": "" }
q3520
syntax_error
train
def syntax_error(lineno, msg): """ Generic syntax error routine """ if global_.has_errors > OPTIONS.max_syntax_errors.value: msg = 'Too many errors. Giving up!' msg = "%s:%i: %s" % (global_.FILENAME, lineno, msg) msg_output(msg) if global_.has_errors > OPTIONS.max_syntax_errors.value: sys.exit(1) global_.has_errors += 1
python
{ "resource": "" }
q3521
warning
train
def warning(lineno, msg): """ Generic warning error routine """ msg = "%s:%i: warning: %s" % (global_.FILENAME, lineno, msg) msg_output(msg) global_.has_warnings += 1
python
{ "resource": "" }
q3522
oper
train
def oper(inst): """ Returns operands of an ASM instruction. Even "indirect" operands, like SP if RET or CALL is used. """ i = inst.strip(' \t\n').split(' ') I = i[0].lower() # Instruction i = ''.join(i[1:]) op = i.split(',') if I in {'call', 'jp', 'jr'} and len(op) > 1: op = op[1:] + ['f'] elif I == 'djnz': op.append('b') elif I in {'push', 'pop', 'call'}: op.append('sp') # Sp is also affected by push, pop and call elif I in {'or', 'and', 'xor', 'neg', 'cpl', 'rrca', 'rlca'}: op.append('a') elif I in {'rra', 'rla'}: op.extend(['a', 'f']) elif I in ('rr', 'rl'): op.append('f') elif I in {'adc', 'sbc'}: if len(op) == 1: op = ['a', 'f'] + op elif I in {'add', 'sub'}: if len(op) == 1: op = ['a'] + op elif I in {'ldd', 'ldi', 'lddr', 'ldir'}: op = ['hl', 'de', 'bc'] elif I in {'cpd', 'cpi', 'cpdr', 'cpir'}: op = ['a', 'hl', 'bc'] elif I == 'exx': op = ['*', 'bc', 'de', 'hl', 'b', 'c', 'd', 'e', 'h', 'l'] elif I in {'ret', 'reti', 'retn'}: op += ['sp'] elif I == 'out': if len(op) and RE_OUTC.match(op[0]): op[0] = 'c' else: op.pop(0) elif I == 'in': if len(op) > 1 and RE_OUTC.match(op[1]): op[1] = 'c' else: op.pop(1) for i in range(len(op)): tmp = RE_INDIR16.match(op[i]) if tmp is not None: op[i] = '(' + op[i].strip()[1:-1].strip().lower() + ')' # ' ( dE ) ' => '(de)' return op
python
{ "resource": "" }
q3523
result
train
def result(i): """ Returns which 8-bit registers are used by an asm instruction to return a result. """ ins = inst(i) op = oper(i) if ins in ('or', 'and') and op == ['a']: return ['f'] if ins in {'xor', 'or', 'and', 'neg', 'cpl', 'daa', 'rld', 'rrd', 'rra', 'rla', 'rrca', 'rlca'}: return ['a', 'f'] if ins in {'bit', 'cp', 'scf', 'ccf'}: return ['f'] if ins in {'sub', 'add', 'sbc', 'adc'}: if len(op) == 1: return ['a', 'f'] else: return single_registers(op[0]) + ['f'] if ins == 'djnz': return ['b', 'f'] if ins in {'ldir', 'ldi', 'lddr', 'ldd'}: return ['f', 'b', 'c', 'd', 'e', 'h', 'l'] if ins in {'cpi', 'cpir', 'cpd', 'cpdr'}: return ['f', 'b', 'c', 'h', 'l'] if ins in ('pop', 'ld'): return single_registers(op[0]) if ins in {'inc', 'dec', 'sbc', 'rr', 'rl', 'rrc', 'rlc'}: return ['f'] + single_registers(op[0]) if ins in ('set', 'res'): return single_registers(op[1]) return []
python
{ "resource": "" }
q3524
block_partition
train
def block_partition(block, i): """ Returns two blocks, as a result of partitioning the given one at i-th instruction. """ i += 1 new_block = BasicBlock(block.asm[i:]) block.mem = block.mem[:i] block.asm = block.asm[:i] block.update_labels() new_block.update_labels() new_block.goes_to = block.goes_to block.goes_to = IdentitySet() new_block.label_goes = block.label_goes block.label_goes = [] new_block.next = new_block.original_next = block.original_next new_block.prev = block new_block.add_comes_from(block) if new_block.next is not None: new_block.next.prev = new_block new_block.next.add_comes_from(new_block) new_block.next.delete_from(block) block.next = block.original_next = new_block block.update_next_block() block.add_goes_to(new_block) return block, new_block
python
{ "resource": "" }
q3525
partition_block
train
def partition_block(block): """ If a block is not partitionable, returns a list with the same block. Otherwise, returns a list with the resulting blocks, recursively. """ result = [block] if not block.is_partitionable: return result EDP = END_PROGRAM_LABEL + ':' for i in range(len(block) - 1): if i and block.asm[i] == EDP: # END_PROGRAM label always starts a basic block block, new_block = block_partition(block, i - 1) LABELS[END_PROGRAM_LABEL].basic_block = new_block result.extend(partition_block(new_block)) return result if block.mem[i].is_ender: block, new_block = block_partition(block, i) result.extend(partition_block(new_block)) op = block.mem[i].opers for l in op: if l in LABELS.keys(): JUMP_LABELS.add(l) block.label_goes += [l] return result if block.asm[i] in arch.zx48k.backend.ASMS: if i > 0: block, new_block = block_partition(block, i - 1) result.extend(partition_block(new_block)) return result block, new_block = block_partition(block, i) result.extend(partition_block(new_block)) return result for label in JUMP_LABELS: must_partition = False if LABELS[label].basic_block is block: for i in range(len(block)): cell = block.mem[i] if cell.inst == label: break if cell.is_label: continue if cell.is_ender: continue must_partition = True if must_partition: block, new_block = block_partition(block, i - 1) LABELS[label].basic_block = new_block result.extend(partition_block(new_block)) return result return result
python
{ "resource": "" }
q3526
get_labels
train
def get_labels(MEMORY, basic_block): """ Traverses memory, to annotate all the labels in the global LABELS table """ for cell in MEMORY: if cell.is_label: label = cell.inst LABELS[label] = LabelInfo(label, cell.addr, basic_block)
python
{ "resource": "" }
q3527
initialize_memory
train
def initialize_memory(basic_block): """ Initializes global memory array with the given one """ global MEMORY MEMORY = basic_block.mem get_labels(MEMORY, basic_block) basic_block.mem = MEMORY
python
{ "resource": "" }
q3528
cleanupmem
train
def cleanupmem(initial_memory): """ Cleans up initial memory. Each label must be ALONE. Each instruction must have an space, etc... """ i = 0 while i < len(initial_memory): tmp = initial_memory[i] match = RE_LABEL.match(tmp) if not match: i += 1 continue if tmp.rstrip() == match.group(): i += 1 continue initial_memory[i] = tmp[match.end():] initial_memory.insert(i, match.group()) i += 1
python
{ "resource": "" }
q3529
optimize
train
def optimize(initial_memory): """ This will remove useless instructions """ global BLOCKS global PROC_COUNTER LABELS.clear() JUMP_LABELS.clear() del MEMORY[:] PROC_COUNTER = 0 cleanupmem(initial_memory) if OPTIONS.optimization.value <= 2: return '\n'.join(x for x in initial_memory if not RE_PRAGMA.match(x)) optimize_init() bb = BasicBlock(initial_memory) cleanup_local_labels(bb) initialize_memory(bb) BLOCKS = basic_blocks = get_basic_blocks(bb) # 1st partition the Basic Blocks for x in basic_blocks: x.clean_up_comes_from() x.clean_up_goes_to() for x in basic_blocks: x.update_goes_and_comes() LABELS['*START*'].basic_block.add_goes_to(basic_blocks[0]) LABELS['*START*'].basic_block.next = basic_blocks[0] basic_blocks[0].prev = LABELS['*START*'].basic_block LABELS[END_PROGRAM_LABEL].basic_block.add_goes_to(LABELS['*__END_PROGRAM*'].basic_block) for x in basic_blocks: x.optimize() for x in basic_blocks: if x.comes_from == [] and len([y for y in JUMP_LABELS if x is LABELS[y].basic_block]): x.ignored = True return '\n'.join([y for y in flatten_list([x.asm for x in basic_blocks if not x.ignored]) if not RE_PRAGMA.match(y)])
python
{ "resource": "" }
q3530
Registers.reset_flags
train
def reset_flags(self): """ Resets flags to an "unknown state" """ self.C = None self.Z = None self.P = None self.S = None
python
{ "resource": "" }
q3531
Registers.get
train
def get(self, r): """ Returns precomputed value of the given expression """ if r is None: return None if r.lower() == '(sp)' and self.stack: return self.stack[-1] if r[:1] == '(': return self.mem[r[1:-1]] r = r.lower() if is_number(r): return str(valnum(r)) if not is_register(r): return None return self.regs[r]
python
{ "resource": "" }
q3532
Registers.eq
train
def eq(self, r1, r2): """ True if values of r1 and r2 registers are equal """ if not is_register(r1) or not is_register(r2): return False if self.regs[r1] is None or self.regs[r2] is None: # HINT: This's been never USED?? return False return self.regs[r1] == self.regs[r2]
python
{ "resource": "" }
q3533
Registers.inc
train
def inc(self, r): """ Does inc on the register and precomputes flags """ self.set_flag(None) if not is_register(r): if r[0] == '(': # a memory position, basically: inc(hl) r_ = r[1:-1].strip() v_ = self.getv(self.mem.get(r_, None)) if v_ is not None: v_ = (v_ + 1) & 0xFF self.mem[r_] = str(v_) self.Z = int(v_ == 0) # HINT: This might be improved else: self.mem[r_] = new_tmp_val() return if self.getv(r) is not None: self.set(r, self.getv(r) + 1) else: self.set(r, None)
python
{ "resource": "" }
q3534
Registers.rrc
train
def rrc(self, r): """ Does a ROTATION to the RIGHT |>> """ if not is_number(self.regs[r]): self.set(r, None) self.set_flag(None) return v_ = self.getv(self.regs[r]) & 0xFF self.regs[r] = str((v_ >> 1) | ((v_ & 1) << 7))
python
{ "resource": "" }
q3535
Registers.rlc
train
def rlc(self, r): """ Does a ROTATION to the LEFT <<| """ if not is_number(self.regs[r]): self.set(r, None) self.set_flag(None) return v_ = self.getv(self.regs[r]) & 0xFF self.set(r, ((v_ << 1) & 0xFF) | (v_ >> 7))
python
{ "resource": "" }
q3536
Registers._is
train
def _is(self, r, val): """ True if value of r is val. """ if not is_register(r) or val is None: return False r = r.lower() if is_register(val): return self.eq(r, val) if is_number(val): val = str(valnum(val)) else: val = str(val) if val[0] == '(': val = self.mem[val[1:-1]] return self.regs[r] == val
python
{ "resource": "" }
q3537
MemCell.opers
train
def opers(self): """ Returns a list of operators this mnemonic uses """ i = [x for x in self.asm.strip(' \t\n').split(' ') if x != ''] if len(i) == 1: return [] i = ''.join(i[1:]).split(',') if self.condition_flag is not None: i = i[1:] else: i = i[0:] op = [x.lower() if is_register(x) else x for x in i] return op
python
{ "resource": "" }
q3538
MemCell.affects
train
def affects(self, reglist): """ Returns if this instruction affects any of the registers in reglist. """ if isinstance(reglist, str): reglist = [reglist] reglist = single_registers(reglist) return len([x for x in self.destroys if x in reglist]) > 0
python
{ "resource": "" }
q3539
MemCell.needs
train
def needs(self, reglist): """ Returns if this instruction need any of the registers in reglist. """ if isinstance(reglist, str): reglist = [reglist] reglist = single_registers(reglist) return len([x for x in self.requires if x in reglist]) > 0
python
{ "resource": "" }
q3540
MemCell.used_labels
train
def used_labels(self): """ Returns a list of required labels for this instruction """ result = [] tmp = self.asm.strip(' \n\r\t') if not len(tmp) or tmp[0] in ('#', ';'): return result try: tmpLexer = asmlex.lex.lex(object=asmlex.Lexer(), lextab='zxbasmlextab') tmpLexer.input(tmp) while True: token = tmpLexer.token() if not token: break if token.type == 'ID': result += [token.value] except: pass return result
python
{ "resource": "" }
q3541
MemCell.replace_label
train
def replace_label(self, oldLabel, newLabel): """ Replaces old label with a new one """ if oldLabel == newLabel: return tmp = re.compile(r'\b' + oldLabel + r'\b') last = 0 l = len(newLabel) while True: match = tmp.search(self.asm[last:]) if not match: break txt = self.asm self.asm = txt[:last + match.start()] + newLabel + txt[last + match.end():] last += match.start() + l
python
{ "resource": "" }
q3542
BasicBlock.is_partitionable
train
def is_partitionable(self): """ Returns if this block can be partitiones in 2 or more blocks, because if contains enders. """ if len(self.mem) < 2: return False # An atomic block if any(x.is_ender or x.asm in arch.zx48k.backend.ASMS for x in self.mem): return True for label in JUMP_LABELS: if LABELS[label].basic_block == self and (not self.mem[0].is_label or self.mem[0].inst != label): return True return False
python
{ "resource": "" }
q3543
BasicBlock.delete_from
train
def delete_from(self, basic_block): """ Removes the basic_block ptr from the list for "comes_from" if it exists. It also sets self.prev to None if it is basic_block. """ if basic_block is None: return if self.lock: return self.lock = True if self.prev is basic_block: if self.prev.next is self: self.prev.next = None self.prev = None for i in range(len(self.comes_from)): if self.comes_from[i] is basic_block: self.comes_from.pop(i) break self.lock = False
python
{ "resource": "" }
q3544
BasicBlock.add_comes_from
train
def add_comes_from(self, basic_block): """ This simulates a set. Adds the basic_block to the comes_from list if not done already. """ if basic_block is None: return if self.lock: return # Return if already added if basic_block in self.comes_from: return self.lock = True self.comes_from.add(basic_block) basic_block.add_goes_to(self) self.lock = False
python
{ "resource": "" }
q3545
BasicBlock.is_used
train
def is_used(self, regs, i, top=None): """ Checks whether any of the given regs are required from the given point to the end or not. """ if i < 0: i = 0 if self.lock: return True regs = list(regs) # make a copy if top is None: top = len(self) else: top -= 1 for ii in range(i, top): for r in self.mem[ii].requires: if r in regs: return True for r in self.mem[ii].destroys: if r in regs: regs.remove(r) if not regs: return False self.lock = True result = self.goes_requires(regs) self.lock = False return result
python
{ "resource": "" }
q3546
BasicBlock.swap
train
def swap(self, a, b): """ Swaps mem positions a and b """ self.mem[a], self.mem[b] = self.mem[b], self.mem[a] self.asm[a], self.asm[b] = self.asm[b], self.asm[a]
python
{ "resource": "" }
q3547
BasicBlock.goes_requires
train
def goes_requires(self, regs): """ Returns whether any of the goes_to block requires any of the given registers. """ if len(self) and self.mem[-1].inst == 'call' and self.mem[-1].condition_flag is None: for block in self.calls: if block.is_used(regs, 0): return True d = block.destroys() if not len([x for x in regs if x not in d]): return False # If all registers are destroyed then they're not used for block in self.goes_to: if block.is_used(regs, 0): return True return False
python
{ "resource": "" }
q3548
BasicBlock.get_label_idx
train
def get_label_idx(self, label): """ Returns the index of a label. Returns None if not found. """ for i in range(len(self)): if self.mem[i].is_label and self.mem[i].inst == label: return i return None
python
{ "resource": "" }
q3549
BasicBlock.get_first_non_label_instruction
train
def get_first_non_label_instruction(self): """ Returns the memcell of the given block, which is not a LABEL. """ for i in range(len(self)): if not self.mem[i].is_label: return self.mem[i] return None
python
{ "resource": "" }
q3550
_free_sequence
train
def _free_sequence(tmp1, tmp2=False): ''' Outputs a FREEMEM sequence for 1 or 2 ops ''' if not tmp1 and not tmp2: return [] output = [] if tmp1 and tmp2: output.append('pop de') output.append('ex (sp), hl') output.append('push de') output.append('call __MEM_FREE') output.append('pop hl') output.append('call __MEM_FREE') else: output.append('ex (sp), hl') output.append('call __MEM_FREE') output.append('pop hl') REQUIRES.add('alloc.asm') return output
python
{ "resource": "" }
q3551
_lenstr
train
def _lenstr(ins): ''' Returns string length ''' (tmp1, output) = _str_oper(ins.quad[2], no_exaf=True) if tmp1: output.append('push hl') output.append('call __STRLEN') output.extend(_free_sequence(tmp1)) output.append('push hl') REQUIRES.add('strlen.asm') return output
python
{ "resource": "" }
q3552
_mul8
train
def _mul8(ins): """ Multiplies 2 las values from the stack. Optimizations: * If any of the ops is ZERO, then do A = 0 ==> XOR A, cause A * 0 = 0 * A = 0 * If any ot the ops is ONE, do NOTHING A * 1 = 1 * A = A """ op1, op2 = tuple(ins.quad[2:]) if _int_ops(op1, op2) is not None: op1, op2 = _int_ops(op1, op2) output = _8bit_oper(op1) if op2 == 1: # A * 1 = 1 * A = A output.append('push af') return output if op2 == 0: output.append('xor a') output.append('push af') return output if op2 == 2: # A * 2 == A SLA 1 output.append('add a, a') output.append('push af') return output if op2 == 4: # A * 4 == A SLA 2 output.append('add a, a') output.append('add a, a') output.append('push af') return output output.append('ld h, %i' % int8(op2)) else: if op2[0] == '_': # stack optimization op1, op2 = op2, op1 output = _8bit_oper(op1, op2) output.append('call __MUL8_FAST') # Inmmediate output.append('push af') REQUIRES.add('mul8.asm') return output
python
{ "resource": "" }
q3553
_divu8
train
def _divu8(ins): """ Divides 2 8bit unsigned integers. The result is pushed onto the stack. Optimizations: * If 2nd op is 1 then do nothing * If 2nd op is 2 then Shift Right Logical """ op1, op2 = tuple(ins.quad[2:]) if is_int(op2): op2 = int8(op2) output = _8bit_oper(op1) if op2 == 1: output.append('push af') return output if op2 == 2: output.append('srl a') output.append('push af') return output output.append('ld h, %i' % int8(op2)) else: if op2[0] == '_': # Optimization when 2nd operand is an id if is_int(op1) and int(op1) == 0: output = list() # Optimization: Discard previous op if not from the stack output.append('xor a') output.append('push af') return output rev = True op1, op2 = op2, op1 else: rev = False output = _8bit_oper(op1, op2, rev) output.append('call __DIVU8_FAST') output.append('push af') REQUIRES.add('div8.asm') return output
python
{ "resource": "" }
q3554
_shru8
train
def _shru8(ins): """ Shift 8bit unsigned integer to the right. The result is pushed onto the stack. Optimizations: * If 1nd or 2nd op is 0 then do nothing * If 2nd op is < 4 then unroll loop """ op1, op2 = tuple(ins.quad[2:]) if is_int(op2): op2 = int8(op2) output = _8bit_oper(op1) if op2 == 0: output.append('push af') return output if op2 < 4: output.extend(['srl a'] * op2) output.append('push af') return output label = tmp_label() output.append('ld b, %i' % int8(op2)) output.append('%s:' % label) output.append('srl a') output.append('djnz %s' % label) output.append('push af') return output if is_int(op1) and int(op1) == 0: output = _8bit_oper(op2) output.append('xor a') output.append('push af') return output output = _8bit_oper(op1, op2, True) label = tmp_label() label2 = tmp_label() output.append('or a') output.append('ld b, a') output.append('ld a, h') output.append('jr z, %s' % label2) output.append('%s:' % label) output.append('srl a') output.append('djnz %s' % label) output.append('%s:' % label2) output.append('push af') return output
python
{ "resource": "" }
q3555
MacroCall.is_defined
train
def is_defined(self, symbolTable=None): """ True if this macro has been defined """ if symbolTable is None: symbolTable = self.table return symbolTable.defined(self.id_)
python
{ "resource": "" }
q3556
SymbolBOUNDLIST.make_node
train
def make_node(cls, node, *args): ''' Creates an array BOUND LIST. ''' if node is None: return cls.make_node(SymbolBOUNDLIST(), *args) if node.token != 'BOUNDLIST': return cls.make_node(None, node, *args) for arg in args: if arg is None: continue node.appendChild(arg) return node
python
{ "resource": "" }
q3557
init
train
def init(): """ Initializes parser state """ global LABELS global LET_ASSIGNMENT global PRINT_IS_USED global SYMBOL_TABLE global ast global data_ast global optemps global OPTIONS global last_brk_linenum LABELS = {} LET_ASSIGNMENT = False PRINT_IS_USED = False last_brk_linenum = 0 ast = None data_ast = None # Global Variables AST optemps = OpcodesTemps() gl.INITS.clear() del gl.FUNCTION_CALLS[:] del gl.FUNCTION_LEVEL[:] del gl.FUNCTIONS[:] SYMBOL_TABLE = gl.SYMBOL_TABLE = api.symboltable.SymbolTable() OPTIONS = api.config.OPTIONS # DATAs info gl.DATA_LABELS.clear() gl.DATA_IS_USED = False del gl.DATAS[:] gl.DATA_PTR_CURRENT = api.utils.current_data_label() gl.DATA_FUNCTIONS = [] gl.error_msg_cache.clear()
python
{ "resource": "" }
q3558
make_func_call
train
def make_func_call(id_, lineno, params): """ This will return an AST node for a function call. """ return symbols.FUNCCALL.make_node(id_, params, lineno)
python
{ "resource": "" }
q3559
make_label
train
def make_label(id_, lineno): """ Creates a label entry. Returns None on error. """ entry = SYMBOL_TABLE.declare_label(id_, lineno) if entry: gl.DATA_LABELS[id_] = gl.DATA_PTR_CURRENT # This label points to the current DATA block index return entry
python
{ "resource": "" }
q3560
SymbolBOUND.make_node
train
def make_node(lower, upper, lineno): """ Creates an array bound """ if not is_static(lower, upper): syntax_error(lineno, 'Array bounds must be constants') return None if isinstance(lower, SymbolVAR): lower = lower.value if isinstance(upper, SymbolVAR): upper = upper.value lower.value = int(lower.value) upper.value = int(upper.value) if lower.value < 0: syntax_error(lineno, 'Array bounds must be greater than 0') return None if lower.value > upper.value: syntax_error(lineno, 'Lower array bound must be less or equal to upper one') return None return SymbolBOUND(lower.value, upper.value)
python
{ "resource": "" }
q3561
Basic.sentence_bytes
train
def sentence_bytes(self, sentence): """ Return bytes of a sentence. This is a very simple parser. Sentence is a list of strings and numbers. 1st element of sentence MUST match a token. """ result = [TOKENS[sentence[0]]] for i in sentence[1:]: # Remaining bytes if isinstance(i, str): result.extend(self.literal(i)) elif isinstance(i, float) or isinstance(i, int): # A number? result.extend(self.number(i)) else: result.extend(i) # Must be another thing return result
python
{ "resource": "" }
q3562
Basic.line
train
def line(self, sentences, line_number=None): """ Return the bytes for a basic line. If no line number is given, current one + 10 will be used Sentences if a list of sentences """ if line_number is None: line_number = self.current_line + 10 self.current_line = line_number sep = [] result = [] for sentence in sentences: result.extend(sep) result.extend(self.sentence_bytes(sentence)) sep = [ord(':')] result.extend([ENTER]) result = self.line_number(line_number) + self.numberLH(len(result)) + result return result
python
{ "resource": "" }
q3563
Lexer.include
train
def include(self, filename): """ Changes FILENAME and line count """ if filename != STDIN and filename in [x[0] for x in self.filestack]: # Already included? self.warning(' Recursive inclusion') self.filestack.append([filename, 1, self.lex, self.input_data]) self.lex = lex.lex(object=self) result = self.put_current_line() # First #line start with \n (EOL) try: if filename == STDIN: self.input_data = sys.stdin.read() else: self.input_data = api.utils.read_txt_file(filename) if len(self.input_data) and self.input_data[-1] != EOL: self.input_data += EOL except IOError: self.input_data = EOL self.lex.input(self.input_data) return result
python
{ "resource": "" }
q3564
SymbolVAR.add_alias
train
def add_alias(self, entry): """ Adds id to the current list 'aliased_by' """ assert isinstance(entry, SymbolVAR) self.aliased_by.append(entry)
python
{ "resource": "" }
q3565
SymbolVAR.make_alias
train
def make_alias(self, entry): """ Make this variable an alias of another one """ entry.add_alias(self) self.alias = entry self.scope = entry.scope # Local aliases can be "global" (static) self.byref = entry.byref self.offset = entry.offset self.addr = entry.addr
python
{ "resource": "" }
q3566
SymbolVAR.to_label
train
def to_label(var_instance): """ Converts a var_instance to a label one """ # This can be done 'cause LABEL is just a dummy descent of VAR assert isinstance(var_instance, SymbolVAR) from symbols import LABEL var_instance.__class__ = LABEL var_instance.class_ = CLASS.label var_instance._scope_owner = [] return var_instance
python
{ "resource": "" }
q3567
SymbolVAR.to_function
train
def to_function(var_instance, lineno=None): """ Converts a var_instance to a function one """ assert isinstance(var_instance, SymbolVAR) from symbols import FUNCTION var_instance.__class__ = FUNCTION var_instance.class_ = CLASS.function var_instance.reset(lineno=lineno) return var_instance
python
{ "resource": "" }
q3568
SymbolVAR.to_vararray
train
def to_vararray(var_instance, bounds): """ Converts a var_instance to a var array one """ assert isinstance(var_instance, SymbolVAR) from symbols import BOUNDLIST from symbols import VARARRAY assert isinstance(bounds, BOUNDLIST) var_instance.__class__ = VARARRAY var_instance.class_ = CLASS.array var_instance.bounds = bounds return var_instance
python
{ "resource": "" }
q3569
TYPE.to_type
train
def to_type(cls, typename): """ Converts a type ID to name. On error returns None """ NAME_TYPES = {cls.TYPE_NAMES[x]: x for x in cls.TYPE_NAMES} return NAME_TYPES.get(typename, None)
python
{ "resource": "" }
q3570
f16
train
def f16(op): """ Returns a floating point operand converted to 32 bits unsigned int. Negative numbers are returned in 2 complement. The result is returned in a tuple (DE, HL) => High16 (Int part), Low16 (Decimal part) """ op = float(op) negative = op < 0 if negative: op = -op DE = int(op) HL = int((op - DE) * 2**16) & 0xFFFF DE &= 0xFFFF if negative: # Do C2 DE ^= 0xFFFF HL ^= 0xFFFF DEHL = ((DE << 16) | HL) + 1 HL = DEHL & 0xFFFF DE = (DEHL >> 16) & 0xFFFF return (DE, HL)
python
{ "resource": "" }
q3571
throw_invalid_quad_params
train
def throw_invalid_quad_params(quad, QUADS, nparams): """ Exception raised when an invalid number of params in the quad code has been emmitted. """ raise InvalidICError(str(quad), "Invalid quad code params for '%s' (expected %i, but got %i)" % (quad, QUADS[quad][0], nparams) )
python
{ "resource": "" }
q3572
fp
train
def fp(x): """ Returns a floating point number as EXP+128, Mantissa """ def bin32(f): """ Returns ASCII representation for a 32 bit integer value """ result = '' a = int(f) & 0xFFFFFFFF # ensures int 32 for i in range(32): result = str(a % 2) + result a = a >> 1 return result def bindec32(f): """ Returns binary representation of a mantissa x (x is float) """ result = '0' a = f if f >= 1: result = bin32(f) result += '.' c = int(a) for i in range(32): a -= c a *= 2 c = int(a) result += str(c) return result e = 0 # exponent s = 1 if x < 0 else 0 # sign m = abs(x) # mantissa while m >= 1: m /= 2.0 e += 1 while 0 < m < 0.5: m *= 2.0 e -= 1 M = bindec32(m)[3:] M = str(s) + M E = bin32(e + 128)[-8:] if x != 0 else bin32(0)[-8:] return M, E
python
{ "resource": "" }
q3573
immediate_float
train
def immediate_float(x): """ Returns C DE HL as values for loading and immediate floating point. """ def bin2hex(y): return "%02X" % int(y, 2) M, E = fp(x) C = '0' + bin2hex(E) + 'h' ED = '0' + bin2hex(M[8:16]) + bin2hex(M[:8]) + 'h' LH = '0' + bin2hex(M[24:]) + bin2hex(M[16:24]) + 'h' return C, ED, LH
python
{ "resource": "" }
q3574
Tree.inorder
train
def inorder(self, funct, stopOn=None): """ Iterates in order, calling the function with the current node. If stopOn is set to True or False, it will stop on true or false. """ if stopOn is None: for i in self.children: i.inorder(funct) else: for i in self.children: if i.inorder(funct) == stopOn: return stopOn return funct(self)
python
{ "resource": "" }
q3575
Tree.preorder
train
def preorder(self, funct, stopOn=None): """ Iterates in preorder, calling the function with the current node. If stopOn is set to True or False, it will stop on true or false. """ if funct(self.symbol) == stopOn and stopOn is not None: return stopOn if stopOn is None: for i in self.children: i.preorder(funct) else: for i in self.children: if i.preorder(funct) == stopOn: return stopOn
python
{ "resource": "" }
q3576
Tree.postorder
train
def postorder(self, funct, stopOn=None): """ Iterates in postorder, calling the function with the current node. If stopOn is set to True or False, it will stop on true or false. """ if stopOn is None: for i in range(len(self.children) - 1, -1, -1): self.children[i].postorder(funct) else: for i in range(len(self.children) - 1, -1, -1): if self.children[i].postorder(funct) == stopOn: return stopOn return funct(self.symbol)
python
{ "resource": "" }
q3577
Tree.makenode
train
def makenode(clss, symbol, *nexts): """ Stores the symbol in an AST instance, and left and right to the given ones """ result = clss(symbol) for i in nexts: if i is None: continue if not isinstance(i, clss): raise NotAnAstError(i) result.appendChild(i) return result
python
{ "resource": "" }
q3578
get_include_path
train
def get_include_path(): """ Default include path using a tricky sys calls. """ f1 = os.path.basename(sys.argv[0]).lower() # script filename f2 = os.path.basename(sys.executable).lower() # Executable filename # If executable filename and script name are the same, we are if f1 == f2 or f2 == f1 + '.exe': # under a "compiled" python binary result = os.path.dirname(os.path.realpath(sys.executable)) else: result = os.path.dirname(os.path.realpath(__file__)) return result
python
{ "resource": "" }
q3579
search_filename
train
def search_filename(fname, lineno, local_first): """ Search a filename into the list of the include path. If local_first is true, it will try first in the current directory of the file being analyzed. """ fname = api.utils.sanitize_filename(fname) i_path = [CURRENT_DIR] + INCLUDEPATH if local_first else list(INCLUDEPATH) i_path.extend(OPTIONS.include_path.value.split(':') if OPTIONS.include_path.value else []) if os.path.isabs(fname): if os.path.isfile(fname): return fname else: for dir_ in i_path: path = api.utils.sanitize_filename(os.path.join(dir_, fname)) if os.path.exists(path): return path error(lineno, "file '%s' not found" % fname) return ''
python
{ "resource": "" }
q3580
filter_
train
def filter_(input_, filename='<internal>', state='INITIAL'): """ Filter the input string thought the preprocessor. result is appended to OUTPUT global str """ global CURRENT_DIR prev_dir = CURRENT_DIR CURRENT_FILE.append(filename) CURRENT_DIR = os.path.dirname(CURRENT_FILE[-1]) LEXER.input(input_, filename) LEXER.lex.begin(state) parser.parse(lexer=LEXER, debug=OPTIONS.Debug.value > 2) CURRENT_FILE.pop() CURRENT_DIR = prev_dir
python
{ "resource": "" }
q3581
_set_pyqtgraph_title
train
def _set_pyqtgraph_title(layout): """ Private function to add a title to the first row of the window. Returns True if a Title is set. Else, returns False. """ if 'title_size' in pytplot.tplot_opt_glob: size = pytplot.tplot_opt_glob['title_size'] if 'title_text' in pytplot.tplot_opt_glob: if pytplot.tplot_opt_glob['title_text'] != '': layout.addItem(LabelItem(pytplot.tplot_opt_glob['title_text'], size=size, color='k'), row=0, col=0) return True return False
python
{ "resource": "" }
q3582
TVar._check_spec_bins_ordering
train
def _check_spec_bins_ordering(self): """ This is a private function of the TVar object, this is run during object creation to check if spec_bins are ascending or descending """ if self.spec_bins is None: return if len(self.spec_bins) == len(self.data.index): self.spec_bins_time_varying = True break_top_loop = False for index, row in self.spec_bins.iterrows(): if row.isnull().values.all(): continue else: for i in row.index: if np.isfinite(row[i]) and np.isfinite(row[i + 1]): ascending = row[i] < row[i + 1] break_top_loop = True break else: continue if break_top_loop: break else: ascending = self.spec_bins[0].iloc[0] < self.spec_bins[1].iloc[0] return ascending
python
{ "resource": "" }
q3583
UpdatingImage.paint
train
def paint(self, p, *args): ''' I have no idea why, but we need to generate the picture after painting otherwise it draws incorrectly. ''' if self.picturenotgened: self.generatePicture(self.getBoundingParents()[0].rect()) self.picturenotgened = False pg.ImageItem.paint(self, p, *args) self.generatePicture(self.getBoundingParents()[0].rect())
python
{ "resource": "" }
q3584
UpdatingImage.setImage
train
def setImage(self, image=None, autoLevels=None, **kargs): """ Same this as ImageItem.setImage, but we don't update the drawing """ profile = debug.Profiler() gotNewData = False if image is None: if self.image is None: return else: gotNewData = True shapeChanged = (self.image is None or image.shape != self.image.shape) image = image.view(np.ndarray) if self.image is None or image.dtype != self.image.dtype: self._effectiveLut = None self.image = image if self.image.shape[0] > 2**15-1 or self.image.shape[1] > 2**15-1: if 'autoDownsample' not in kargs: kargs['autoDownsample'] = True if shapeChanged: self.prepareGeometryChange() self.informViewBoundsChanged() profile() if autoLevels is None: if 'levels' in kargs: autoLevels = False else: autoLevels = True if autoLevels: img = self.image while img.size > 2**16: img = img[::2, ::2] mn, mx = img.min(), img.max() if mn == mx: mn = 0 mx = 255 kargs['levels'] = [mn,mx] profile() self.setOpts(update=False, **kargs) profile() self.qimage = None self.update() profile() if gotNewData: self.sigImageChanged.emit()
python
{ "resource": "" }
q3585
PytplotExporter.getPaintItems
train
def getPaintItems(self, root=None): """Return a list of all items that should be painted in the correct order.""" if root is None: root = self.item preItems = [] postItems = [] if isinstance(root, QtGui.QGraphicsScene): childs = [i for i in root.items() if i.parentItem() is None] rootItem = [] else: # CHANGE: For GraphicsLayouts, there is no function for childItems(), so I just # replaced it with .items() try: childs = root.childItems() except: childs = root.items() rootItem = [root] childs.sort(key=lambda a: a.zValue()) while len(childs) > 0: ch = childs.pop(0) tree = self.getPaintItems(ch) if int(ch.flags() & ch.ItemStacksBehindParent) > 0 or ( ch.zValue() < 0 and int(ch.flags() & ch.ItemNegativeZStacksBehindParent) > 0): preItems.extend(tree) else: postItems.extend(tree) return preItems + rootItem + postItems
python
{ "resource": "" }
q3586
qn_df
train
def qn_df(df, axis='row', keep_orig=False): ''' do quantile normalization of a dataframe dictionary, does not write to net ''' df_qn = {} for mat_type in df: inst_df = df[mat_type] # using transpose to do row qn if axis == 'row': inst_df = inst_df.transpose() missing_values = inst_df.isnull().values.any() # make mask of missing values if missing_values: # get nan mask missing_mask = pd.isnull(inst_df) # tmp fill in na with zero, will not affect qn inst_df = inst_df.fillna(value=0) # calc common distribution common_dist = calc_common_dist(inst_df) # swap in common distribution inst_df = swap_in_common_dist(inst_df, common_dist) # swap back in missing values if missing_values: inst_df = inst_df.mask(missing_mask, other=np.nan) # using transpose to do row qn if axis == 'row': inst_df = inst_df.transpose() df_qn[mat_type] = inst_df return df_qn
python
{ "resource": "" }
q3587
main
train
def main(net): ''' calculate pvalue of category closeness ''' # calculate the distance between the data points within the same category and # compare to null distribution for inst_rc in ['row', 'col']: inst_nodes = deepcopy(net.dat['nodes'][inst_rc]) inst_index = deepcopy(net.dat['node_info'][inst_rc]['clust']) # reorder based on clustered order inst_nodes = [ inst_nodes[i] for i in inst_index] # make distance matrix dataframe dm = dist_matrix_lattice(inst_nodes) node_infos = list(net.dat['node_info'][inst_rc].keys()) all_cats = [] for inst_info in node_infos: if 'dict_cat_' in inst_info: all_cats.append(inst_info) for cat_dict in all_cats: tmp_dict = net.dat['node_info'][inst_rc][cat_dict] pval_name = cat_dict.replace('dict_','pval_') net.dat['node_info'][inst_rc][pval_name] = {} for cat_name in tmp_dict: subset = tmp_dict[cat_name] inst_median = calc_median_dist_subset(dm, subset) hist = calc_hist_distances(dm, subset, inst_nodes) pval = 0 for i in range(len(hist['prob'])): if i == 0: pval = hist['prob'][i] if i >= 1: if inst_median >= hist['bins'][i]: pval = pval + hist['prob'][i] net.dat['node_info'][inst_rc][pval_name][cat_name] = pval
python
{ "resource": "" }
q3588
df_to_dat
train
def df_to_dat(net, df, define_cat_colors=False): ''' This is always run when data is loaded. ''' from . import categories # check if df has unique values df['mat'] = make_unique_labels.main(net, df['mat']) net.dat['mat'] = df['mat'].values net.dat['nodes']['row'] = df['mat'].index.tolist() net.dat['nodes']['col'] = df['mat'].columns.tolist() for inst_rc in ['row', 'col']: if type(net.dat['nodes'][inst_rc][0]) is tuple: # get the number of categories from the length of the tuple # subtract 1 because the name is the first element of the tuple num_cat = len(net.dat['nodes'][inst_rc][0]) - 1 net.dat['node_info'][inst_rc]['full_names'] = net.dat['nodes']\ [inst_rc] for inst_rcat in range(num_cat): net.dat['node_info'][inst_rc]['cat-' + str(inst_rcat)] = \ [i[inst_rcat + 1] for i in net.dat['nodes'][inst_rc]] net.dat['nodes'][inst_rc] = [i[0] for i in net.dat['nodes'][inst_rc]] if 'mat_up' in df: net.dat['mat_up'] = df['mat_up'].values net.dat['mat_dn'] = df['mat_dn'].values if 'mat_orig' in df: net.dat['mat_orig'] = df['mat_orig'].values categories.dict_cat(net, define_cat_colors=define_cat_colors)
python
{ "resource": "" }
q3589
mat_to_numpy_arr
train
def mat_to_numpy_arr(self): ''' convert list to numpy array - numpy arrays can not be saved as json ''' import numpy as np self.dat['mat'] = np.asarray(self.dat['mat'])
python
{ "resource": "" }
q3590
cluster_row_and_col
train
def cluster_row_and_col(net, dist_type='cosine', linkage_type='average', dendro=True, run_clustering=True, run_rank=True, ignore_cat=False, calc_cat_pval=False, links=False): ''' cluster net.dat and make visualization json, net.viz. optionally leave out dendrogram colorbar groups with dendro argument ''' import scipy from copy import deepcopy from scipy.spatial.distance import pdist from . import categories, make_viz, cat_pval dm = {} for inst_rc in ['row', 'col']: tmp_mat = deepcopy(net.dat['mat']) dm[inst_rc] = calc_distance_matrix(tmp_mat, inst_rc, dist_type) # save directly to dat structure node_info = net.dat['node_info'][inst_rc] node_info['ini'] = list(range( len(net.dat['nodes'][inst_rc]), -1, -1)) # cluster if run_clustering is True: node_info['clust'], node_info['group'] = \ clust_and_group(net, dm[inst_rc], linkage_type=linkage_type) else: dendro = False node_info['clust'] = node_info['ini'] # sorting if run_rank is True: node_info['rank'] = sort_rank_nodes(net, inst_rc, 'sum') node_info['rankvar'] = sort_rank_nodes(net, inst_rc, 'var') else: node_info['rank'] = node_info['ini'] node_info['rankvar'] = node_info['ini'] ################################## if ignore_cat is False: categories.calc_cat_clust_order(net, inst_rc) if calc_cat_pval is True: cat_pval.main(net) # make the visualization json make_viz.viz_json(net, dendro, links) return dm
python
{ "resource": "" }
q3591
check_categories
train
def check_categories(lines): ''' find out how many row and col categories are available ''' # count the number of row categories rcat_line = lines[0].split('\t') # calc the number of row names and categories num_rc = 0 found_end = False # skip first tab for inst_string in rcat_line[1:]: if inst_string == '': if found_end is False: num_rc = num_rc + 1 else: found_end = True max_rcat = 15 if max_rcat > len(lines): max_rcat = len(lines) - 1 num_cc = 0 for i in range(max_rcat): ccat_line = lines[i + 1].split('\t') # make sure that line has length greater than one to prevent false cats from # trailing new lines at end of matrix if ccat_line[0] == '' and len(ccat_line) > 1: num_cc = num_cc + 1 num_labels = {} num_labels['row'] = num_rc + 1 num_labels['col'] = num_cc + 1 return num_labels
python
{ "resource": "" }
q3592
dict_cat
train
def dict_cat(net, define_cat_colors=False): ''' make a dictionary of node-category associations ''' # print('---------------------------------') # print('---- dict_cat: before setting cat colors') # print('---------------------------------\n') # print(define_cat_colors) # print(net.viz['cat_colors']) net.persistent_cat = True for inst_rc in ['row', 'col']: inst_keys = list(net.dat['node_info'][inst_rc].keys()) all_cats = [x for x in inst_keys if 'cat-' in x] for inst_name_cat in all_cats: dict_cat = {} tmp_cats = net.dat['node_info'][inst_rc][inst_name_cat] tmp_nodes = net.dat['nodes'][inst_rc] for i in range(len(tmp_cats)): inst_cat = tmp_cats[i] inst_node = tmp_nodes[i] if inst_cat not in dict_cat: dict_cat[inst_cat] = [] dict_cat[inst_cat].append(inst_node) tmp_name = 'dict_' + inst_name_cat.replace('-', '_') net.dat['node_info'][inst_rc][tmp_name] = dict_cat # merge with old cat_colors by default cat_colors = net.viz['cat_colors'] if define_cat_colors == True: cat_number = 0 for inst_rc in ['row', 'col']: inst_keys = list(net.dat['node_info'][inst_rc].keys()) all_cats = [x for x in inst_keys if 'cat-' in x] for cat_index in all_cats: if cat_index not in cat_colors[inst_rc]: cat_colors[inst_rc][cat_index] = {} cat_names = sorted(list(set(net.dat['node_info'][inst_rc][cat_index]))) # loop through each category name and assign a color for tmp_name in cat_names: # using the same rules as the front-end to define cat_colors inst_color = get_cat_color(cat_number + cat_names.index(tmp_name)) check_name = tmp_name # check if category is string type and non-numeric try: float(check_name) is_string_cat = False except: is_string_cat = True if is_string_cat == True: # check for default non-color if ': ' in check_name: check_name = check_name.split(': ')[1] # if check_name == 'False' or check_name == 'false': if 'False' in check_name or 'false' in check_name: inst_color = '#eee' if 'Not ' in check_name: inst_color = '#eee' # print('cat_colors') # print('----------') # print(cat_colors[inst_rc][cat_index]) # do not overwrite old colors if tmp_name not in cat_colors[inst_rc][cat_index] and is_string_cat: cat_colors[inst_rc][cat_index][tmp_name] = inst_color # print('overwrite: ' + tmp_name + ' -> ' + str(inst_color)) cat_number = cat_number + 1 net.viz['cat_colors'] = cat_colors
python
{ "resource": "" }
q3593
calc_cat_clust_order
train
def calc_cat_clust_order(net, inst_rc): ''' cluster category subset of data ''' from .__init__ import Network from copy import deepcopy from . import calc_clust, run_filter inst_keys = list(net.dat['node_info'][inst_rc].keys()) all_cats = [x for x in inst_keys if 'cat-' in x] if len(all_cats) > 0: for inst_name_cat in all_cats: tmp_name = 'dict_' + inst_name_cat.replace('-', '_') dict_cat = net.dat['node_info'][inst_rc][tmp_name] unordered_cats = dict_cat.keys() ordered_cats = order_categories(unordered_cats) # this is the ordering of the columns based on their category, not # including their clustering ordering within category all_cat_orders = [] tmp_names_list = [] for inst_cat in ordered_cats: inst_nodes = dict_cat[inst_cat] tmp_names_list.extend(inst_nodes) # cat_net = deepcopy(Network()) # cat_net.dat['mat'] = deepcopy(net.dat['mat']) # cat_net.dat['nodes'] = deepcopy(net.dat['nodes']) # cat_df = cat_net.dat_to_df() # sub_df = {} # if inst_rc == 'col': # sub_df['mat'] = cat_df['mat'][inst_nodes] # elif inst_rc == 'row': # # need to transpose df # cat_df['mat'] = cat_df['mat'].transpose() # sub_df['mat'] = cat_df['mat'][inst_nodes] # sub_df['mat'] = sub_df['mat'].transpose() # # filter matrix before clustering # ################################### # threshold = 0.0001 # sub_df = run_filter.df_filter_row_sum(sub_df, threshold) # sub_df = run_filter.df_filter_col_sum(sub_df, threshold) # # load back to dat # cat_net.df_to_dat(sub_df) # cat_mat_shape = cat_net.dat['mat'].shape # print('***************') # try: # if cat_mat_shape[0]>1 and cat_mat_shape[1] > 1 and all_are_numbers == False: # calc_clust.cluster_row_and_col(cat_net, 'cos') # inst_cat_order = cat_net.dat['node_info'][inst_rc]['clust'] # else: # inst_cat_order = list(range(len(cat_net.dat['nodes'][inst_rc]))) # except: # inst_cat_order = list(range(len(cat_net.dat['nodes'][inst_rc]))) # prev_order_len = len(all_cat_orders) # # add prev order length to the current order number # inst_cat_order = [i + prev_order_len for i in inst_cat_order] # all_cat_orders.extend(inst_cat_order) # # generate ordered list of row/col names, which will be used to # # assign the order to specific nodes # names_clust_list = [x for (y, x) in sorted(zip(all_cat_orders, # tmp_names_list))] names_clust_list = tmp_names_list # calc category-cluster order final_order = [] for i in range(len(net.dat['nodes'][inst_rc])): inst_node_name = net.dat['nodes'][inst_rc][i] inst_node_num = names_clust_list.index(inst_node_name) final_order.append(inst_node_num) inst_index_cat = inst_name_cat.replace('-', '_') + '_index' net.dat['node_info'][inst_rc][inst_index_cat] = final_order
python
{ "resource": "" }
q3594
Network.load_file_as_string
train
def load_file_as_string(self, file_string, filename=''): ''' Load file as a string. ''' load_data.load_file_as_string(self, file_string, filename=filename)
python
{ "resource": "" }
q3595
Network.load_df
train
def load_df(self, df): ''' Load Pandas DataFrame. ''' # self.__init__() self.reset() df_dict = {} df_dict['mat'] = deepcopy(df) # always define category colors if applicable when loading a df data_formats.df_to_dat(self, df_dict, define_cat_colors=True)
python
{ "resource": "" }
q3596
Network.widget_df
train
def widget_df(self): ''' Export a DataFrame from the front-end visualization. For instance, a user can filter to show only a single cluster using the dendrogram and then get a dataframe of this cluster using the widget_df method. ''' if hasattr(self, 'widget_instance') == True: if self.widget_instance.mat_string != '': tmp_net = deepcopy(Network()) df_string = self.widget_instance.mat_string tmp_net.load_file_as_string(df_string) df = tmp_net.export_df() return df else: return self.export_df() else: if hasattr(self, 'widget_class') == True: print('Please make the widget before exporting the widget DataFrame.') print('Do this using the widget method: net.widget()') else: print('Can not make widget because Network has no attribute widget_class') print('Please instantiate Network with clustergrammer_widget using: Network(clustergrammer_widget)')
python
{ "resource": "" }
q3597
Network.write_json_to_file
train
def write_json_to_file(self, net_type, filename, indent='no-indent'): ''' Save dat or viz as a JSON to file. ''' export_data.write_json_to_file(self, net_type, filename, indent)
python
{ "resource": "" }
q3598
Network.filter_sum
train
def filter_sum(self, inst_rc, threshold, take_abs=True): ''' Filter a network's rows or columns based on the sum across rows or columns. ''' inst_df = self.dat_to_df() if inst_rc == 'row': inst_df = run_filter.df_filter_row_sum(inst_df, threshold, take_abs) elif inst_rc == 'col': inst_df = run_filter.df_filter_col_sum(inst_df, threshold, take_abs) self.df_to_dat(inst_df)
python
{ "resource": "" }
q3599
Network.filter_cat
train
def filter_cat(self, axis, cat_index, cat_name): ''' Filter the matrix based on their category. cat_index is the index of the category, the first category has index=1. ''' run_filter.filter_cat(self, axis, cat_index, cat_name)
python
{ "resource": "" }