_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 31
13.1k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q3500
|
Asm.argval
|
train
|
def argval(self):
""" Solve args values or raise errors if not
defined yet
"""
if gl.has_errors:
return [None]
if self.asm in ('DEFB', 'DEFS', 'DEFW'):
return tuple([x.eval() if isinstance(x, Expr) else x for x in self.arg])
|
python
|
{
"resource": ""
}
|
q3501
|
Expr.eval
|
train
|
def eval(self):
""" Recursively evals the node. Exits with an
error if not resolved.
"""
|
python
|
{
"resource": ""
}
|
q3502
|
Expr.try_eval
|
train
|
def try_eval(self):
""" Recursively evals the node. Returns None
if it is still unresolved.
"""
item = self.symbol.item
if isinstance(item, int):
return item
if isinstance(item, Label):
if item.defined:
if isinstance(item.value, Expr):
return item.value.try_eval()
else:
return item.value
else:
if Expr.ignore:
return None
# Try to resolve into the global namespace
error(self.symbol.lineno, "Undefined label '%s'" % item.name)
return None
try:
if isinstance(item, tuple):
return tuple([x.try_eval() for x in item])
if isinstance(item, list):
return [x.try_eval() for x in item]
|
python
|
{
"resource": ""
}
|
q3503
|
Label.define
|
train
|
def define(self, value, lineno, namespace=None):
""" Defines label value. It can be anything. Even an AST
"""
if self.defined:
|
python
|
{
"resource": ""
}
|
q3504
|
Memory.set_org
|
train
|
def set_org(self, value, lineno):
""" Sets a new ORG value
"""
if value < 0 or value > MAX_MEM:
error(lineno, "Memory ORG
|
python
|
{
"resource": ""
}
|
q3505
|
Memory.id_name
|
train
|
def id_name(label, namespace=None):
""" Given a name and a namespace, resolves
returns the name as namespace + '.' + name. If namespace
is none, the current NAMESPACE is used
"""
if not label.startswith(DOT):
if namespace is None:
namespace = NAMESPACE
|
python
|
{
"resource": ""
}
|
q3506
|
Memory.__set_byte
|
train
|
def __set_byte(self, byte, lineno):
""" Sets a byte at the current location,
and increments org in one. Raises an error if org > MAX_MEMORY
"""
if byte < 0 or byte > 255:
|
python
|
{
"resource": ""
}
|
q3507
|
Memory.exit_proc
|
train
|
def exit_proc(self, lineno):
""" Exits current procedure. Local labels are transferred to global
scope unless they have been marked as local ones.
Raises an error if no current local context (stack underflow)
"""
__DEBUG__('Exiting current scope from lineno %i' % lineno)
if len(self.local_labels) <= 1:
error(lineno, 'ENDP in global scope (with no PROC)')
return
for label in self.local_labels[-1].values():
if label.local:
if not label.defined:
error(lineno, "Undefined LOCAL label '%s'" % label.name)
return
continue
name = label.name
|
python
|
{
"resource": ""
}
|
q3508
|
Memory.dump
|
train
|
def dump(self):
""" Returns a tuple containing code ORG, and a list of OUTPUT
"""
org = min(self.memory_bytes.keys()) # Org is the lowest one
OUTPUT = []
align = []
for i in range(org, max(self.memory_bytes.keys()) + 1):
if gl.has_errors:
return org, OUTPUT
try:
try:
a = [x for x in self.orgs[i] if isinstance(x, Asm)] # search for asm instructions
if not a:
align.append(0) # Fill with ZEROes not used memory regions
continue
OUTPUT += align
align = []
a = a[0]
|
python
|
{
"resource": ""
}
|
q3509
|
Memory.get_label
|
train
|
def get_label(self, label, lineno):
""" Returns a label in the current context or in the global one.
If the label does not exists, creates a new one and returns it.
"""
global NAMESPACE
ex_label, namespace = Memory.id_name(label)
for i in range(len(self.local_labels) - 1, -1, -1): # Downstep
result = self.local_labels[i].get(ex_label, None)
|
python
|
{
"resource": ""
}
|
q3510
|
_addr
|
train
|
def _addr(value):
''' Common subroutine for emitting array address
'''
output = []
try:
indirect = False
if value[0] == '*':
indirect = True
value = value[1:]
value = int(value) & 0xFFFF
if indirect:
output.append('ld hl, (%s)' % str(value))
else:
output.append('ld hl, %s' % str(value))
except ValueError:
|
python
|
{
"resource": ""
}
|
q3511
|
SymbolARGLIST.make_node
|
train
|
def make_node(cls, node, *args):
""" This will return a node with an argument_list.
"""
if node is None:
node = cls()
assert isinstance(node, SymbolARGUMENT) or
|
python
|
{
"resource": ""
}
|
q3512
|
_mul32
|
train
|
def _mul32(ins):
""" Multiplies two last 32bit values on top of the stack and
and returns the value on top of the stack
Optimizations done:
* If any operand is 1, do nothing
* If any operand is 0, push 0
"""
op1, op2 = tuple(ins.quad[2:])
if _int_ops(op1, op2):
op1, op2 = _int_ops(op1, op2)
output = _32bit_oper(op1)
if op2 == 1:
output.append('push de')
output.append('push hl')
return output # A * 1 = Nothing
|
python
|
{
"resource": ""
}
|
q3513
|
_shl32
|
train
|
def _shl32(ins):
""" Logical Left shift 32bit unsigned integers.
The result is pushed onto the stack.
Optimizations:
* If 2nd operand is 0, do nothing
"""
op1, op2 = tuple(ins.quad[2:])
if is_int(op2):
output = _32bit_oper(op1)
if int(op2) == 0:
output.append('push de')
output.append('push hl')
return output
if int(op2) > 1:
label = tmp_label()
output.append('ld b, %s' % op2)
output.append('%s:' % label)
output.append('call __SHL32')
output.append('djnz %s' % label)
else:
|
python
|
{
"resource": ""
}
|
q3514
|
_float_oper
|
train
|
def _float_oper(op1, op2=None):
''' Returns pop sequence for floating point operands
1st operand in A DE BC, 2nd operand remains in the stack
Unlike 8bit and 16bit version, this does not supports
operands inversion. Since many of the instructions are implemented
as functions, they must support this.
However, if 1st operand is a number (immediate) or indirect, the stack
will be rearranged, so it contains a 48 bit pushed parameter value for the
subroutine to be called.
'''
output = []
op = op2 if op2 is not None else op1
indirect = (op[0] == '*')
if indirect:
op = op[1:]
if is_float(op):
op = float(op)
if indirect:
op = int(op) & 0xFFFF
output.append('ld hl, (%i)' % op)
output.append('call __ILOADF')
REQUIRES.add('iloadf.asm')
else:
A, DE, BC = _float(op)
output.append('ld a, %s' % A)
output.append('ld de, %s' % DE)
output.append('ld bc, %s' % BC)
else:
if indirect:
if op[0] == '_':
output.append('ld hl, (%s)' % op)
else:
output.append('pop hl')
output.append('call __ILOADF')
REQUIRES.add('iloadf.asm')
else:
if op[0] == '_':
output.append('ld a, (%s)' % op)
output.append('ld de, (%s + 1)' % op)
output.append('ld bc, (%s + 3)' % op)
else:
output.extend(_fpop())
if op2 is not None:
op = op1
if is_float(op): # An float must be in the stack. Let's pushit
A, DE, BC = _float(op)
output.append('ld hl, %s' % BC)
output.append('push hl')
output.append('ld hl, %s' % DE)
output.append('push hl')
output.append('ld h, %s' % A)
output.append('push hl')
elif op[0] == '*': # Indirect
op = op[1:]
output.append('exx') # uses alternate set to put it on the stack
output.append("ex af, af'")
if is_int(op): # noqa TODO: it will fail
op = int(op)
|
python
|
{
"resource": ""
}
|
q3515
|
_addf
|
train
|
def _addf(ins):
''' Adds 2 float values. The result is pushed onto the stack.
'''
op1, op2 = tuple(ins.quad[2:])
if _f_ops(op1, op2) is not None:
opa, opb = _f_ops(op1, op2)
if opb == 0: # A + 0 => A
output = _float_oper(opa)
|
python
|
{
"resource": ""
}
|
q3516
|
_divf
|
train
|
def _divf(ins):
''' Divides 2 float values. The result is pushed onto the stack.
'''
op1, op2 = tuple(ins.quad[2:])
if is_float(op2) and float(op2) == 1: # Nothing to do. A / 1 = A
output = _float_oper(op1)
output.extend(_fpush())
|
python
|
{
"resource": ""
}
|
q3517
|
_modf
|
train
|
def _modf(ins):
''' Reminder of div. 2 float values. The result is pushed onto the stack.
'''
op1, op2 =
|
python
|
{
"resource": ""
}
|
q3518
|
get_uniques
|
train
|
def get_uniques(l):
""" Returns a list with no repeated elements.
"""
result = []
for i in l:
|
python
|
{
"resource": ""
}
|
q3519
|
SymbolARGUMENT.typecast
|
train
|
def typecast(self, type_):
""" Test type casting to the argument expression.
On success changes the node value to the new typecast, and returns
True. On failure, returns False, and the node value is set to None.
"""
|
python
|
{
"resource": ""
}
|
q3520
|
syntax_error
|
train
|
def syntax_error(lineno, msg):
""" Generic syntax error routine
"""
if global_.has_errors > OPTIONS.max_syntax_errors.value:
msg = 'Too many errors. Giving up!'
msg = "%s:%i: %s" % (global_.FILENAME, lineno,
|
python
|
{
"resource": ""
}
|
q3521
|
warning
|
train
|
def warning(lineno, msg):
""" Generic warning error routine
"""
|
python
|
{
"resource": ""
}
|
q3522
|
oper
|
train
|
def oper(inst):
""" Returns operands of an ASM instruction.
Even "indirect" operands, like SP if RET or CALL is used.
"""
i = inst.strip(' \t\n').split(' ')
I = i[0].lower() # Instruction
i = ''.join(i[1:])
op = i.split(',')
if I in {'call', 'jp', 'jr'} and len(op) > 1:
op = op[1:] + ['f']
elif I == 'djnz':
op.append('b')
elif I in {'push', 'pop', 'call'}:
op.append('sp') # Sp is also affected by push, pop and call
elif I in {'or', 'and', 'xor', 'neg', 'cpl', 'rrca', 'rlca'}:
op.append('a')
elif I in {'rra', 'rla'}:
op.extend(['a', 'f'])
elif I in ('rr', 'rl'):
op.append('f')
elif I in {'adc', 'sbc'}:
if len(op) == 1:
op = ['a', 'f'] + op
elif I in {'add', 'sub'}:
if len(op) == 1:
op = ['a'] + op
elif I in {'ldd', 'ldi', 'lddr', 'ldir'}:
op = ['hl', 'de', 'bc']
elif I in {'cpd', 'cpi', 'cpdr', 'cpir'}:
op = ['a', 'hl', 'bc']
elif
|
python
|
{
"resource": ""
}
|
q3523
|
result
|
train
|
def result(i):
""" Returns which 8-bit registers are used by an asm
instruction to return a result.
"""
ins = inst(i)
op = oper(i)
if ins in ('or', 'and') and op == ['a']:
return ['f']
if ins in {'xor', 'or', 'and', 'neg', 'cpl', 'daa', 'rld', 'rrd', 'rra', 'rla', 'rrca', 'rlca'}:
return ['a', 'f']
if ins in {'bit', 'cp', 'scf', 'ccf'}:
return ['f']
if ins in {'sub', 'add', 'sbc', 'adc'}:
if len(op) == 1:
return ['a', 'f']
else:
return single_registers(op[0]) + ['f']
if ins == 'djnz':
return
|
python
|
{
"resource": ""
}
|
q3524
|
block_partition
|
train
|
def block_partition(block, i):
""" Returns two blocks, as a result of partitioning the given one at
i-th instruction.
"""
i += 1
new_block = BasicBlock(block.asm[i:])
block.mem = block.mem[:i]
block.asm = block.asm[:i]
block.update_labels()
new_block.update_labels()
new_block.goes_to = block.goes_to
block.goes_to = IdentitySet()
new_block.label_goes = block.label_goes
block.label_goes = []
new_block.next = new_block.original_next = block.original_next
new_block.prev = block
|
python
|
{
"resource": ""
}
|
q3525
|
partition_block
|
train
|
def partition_block(block):
""" If a block is not partitionable, returns a list with the same block.
Otherwise, returns a list with the resulting blocks, recursively.
"""
result = [block]
if not block.is_partitionable:
return result
EDP = END_PROGRAM_LABEL + ':'
for i in range(len(block) - 1):
if i and block.asm[i] == EDP: # END_PROGRAM label always starts a basic block
block, new_block = block_partition(block, i - 1)
LABELS[END_PROGRAM_LABEL].basic_block = new_block
result.extend(partition_block(new_block))
return result
if block.mem[i].is_ender:
block, new_block = block_partition(block, i)
result.extend(partition_block(new_block))
op = block.mem[i].opers
for l in op:
if l in LABELS.keys():
JUMP_LABELS.add(l)
block.label_goes += [l]
return result
if block.asm[i] in arch.zx48k.backend.ASMS:
if i > 0:
block, new_block = block_partition(block, i - 1)
|
python
|
{
"resource": ""
}
|
q3526
|
get_labels
|
train
|
def get_labels(MEMORY, basic_block):
""" Traverses memory, to annotate all the labels in the global
LABELS table
"""
for cell in MEMORY:
if cell.is_label:
|
python
|
{
"resource": ""
}
|
q3527
|
initialize_memory
|
train
|
def initialize_memory(basic_block):
""" Initializes global memory array with the given one
"""
global MEMORY
MEMORY
|
python
|
{
"resource": ""
}
|
q3528
|
cleanupmem
|
train
|
def cleanupmem(initial_memory):
""" Cleans up initial memory. Each label must be
ALONE. Each instruction must have an space, etc...
"""
i = 0
while i < len(initial_memory):
tmp = initial_memory[i]
|
python
|
{
"resource": ""
}
|
q3529
|
optimize
|
train
|
def optimize(initial_memory):
""" This will remove useless instructions
"""
global BLOCKS
global PROC_COUNTER
LABELS.clear()
JUMP_LABELS.clear()
del MEMORY[:]
PROC_COUNTER = 0
cleanupmem(initial_memory)
if OPTIONS.optimization.value <= 2:
return '\n'.join(x for x in initial_memory if not RE_PRAGMA.match(x))
optimize_init()
bb = BasicBlock(initial_memory)
cleanup_local_labels(bb)
initialize_memory(bb)
BLOCKS = basic_blocks = get_basic_blocks(bb) # 1st partition the Basic Blocks
for x in basic_blocks:
x.clean_up_comes_from()
x.clean_up_goes_to()
for x in basic_blocks:
x.update_goes_and_comes()
LABELS['*START*'].basic_block.add_goes_to(basic_blocks[0])
LABELS['*START*'].basic_block.next = basic_blocks[0]
basic_blocks[0].prev = LABELS['*START*'].basic_block
|
python
|
{
"resource": ""
}
|
q3530
|
Registers.reset_flags
|
train
|
def reset_flags(self):
""" Resets flags to an "unknown state"
"""
self.C
|
python
|
{
"resource": ""
}
|
q3531
|
Registers.get
|
train
|
def get(self, r):
""" Returns precomputed value of the given expression
"""
if r is None:
return None
if r.lower() == '(sp)' and self.stack:
return self.stack[-1]
if r[:1] == '(':
|
python
|
{
"resource": ""
}
|
q3532
|
Registers.eq
|
train
|
def eq(self, r1, r2):
""" True if values of r1 and r2 registers are equal
"""
if not is_register(r1) or not is_register(r2):
|
python
|
{
"resource": ""
}
|
q3533
|
Registers.inc
|
train
|
def inc(self, r):
""" Does inc on the register and precomputes flags
"""
self.set_flag(None)
if not is_register(r):
if r[0] == '(': # a memory position, basically: inc(hl)
r_ = r[1:-1].strip()
v_ = self.getv(self.mem.get(r_, None))
if v_ is not None:
v_ = (v_ + 1) & 0xFF
self.mem[r_] = str(v_)
|
python
|
{
"resource": ""
}
|
q3534
|
Registers.rrc
|
train
|
def rrc(self, r):
""" Does a ROTATION to the RIGHT |>>
"""
if not is_number(self.regs[r]):
self.set(r, None)
self.set_flag(None)
return
|
python
|
{
"resource": ""
}
|
q3535
|
Registers.rlc
|
train
|
def rlc(self, r):
""" Does a ROTATION to the LEFT <<|
"""
if not is_number(self.regs[r]):
self.set(r, None)
self.set_flag(None)
|
python
|
{
"resource": ""
}
|
q3536
|
Registers._is
|
train
|
def _is(self, r, val):
""" True if value of r is val.
"""
if not is_register(r) or val is None:
return False
r = r.lower()
if is_register(val):
return self.eq(r, val)
if is_number(val):
|
python
|
{
"resource": ""
}
|
q3537
|
MemCell.opers
|
train
|
def opers(self):
""" Returns a list of operators this mnemonic uses
"""
i = [x for x in self.asm.strip(' \t\n').split(' ') if x != '']
if len(i) == 1:
return []
i = ''.join(i[1:]).split(',')
|
python
|
{
"resource": ""
}
|
q3538
|
MemCell.affects
|
train
|
def affects(self, reglist):
""" Returns if this instruction affects any of the registers
in reglist.
"""
if isinstance(reglist, str):
reglist = [reglist]
|
python
|
{
"resource": ""
}
|
q3539
|
MemCell.needs
|
train
|
def needs(self, reglist):
""" Returns if this instruction need any of the registers
in reglist.
"""
if isinstance(reglist, str):
reglist = [reglist]
reglist
|
python
|
{
"resource": ""
}
|
q3540
|
MemCell.used_labels
|
train
|
def used_labels(self):
""" Returns a list of required labels for this instruction
"""
result = []
tmp = self.asm.strip(' \n\r\t')
if not len(tmp) or tmp[0] in ('#', ';'):
return result
try:
|
python
|
{
"resource": ""
}
|
q3541
|
MemCell.replace_label
|
train
|
def replace_label(self, oldLabel, newLabel):
""" Replaces old label with a new one
"""
if oldLabel == newLabel:
return
tmp = re.compile(r'\b' + oldLabel + r'\b')
last = 0
l = len(newLabel)
while True:
match = tmp.search(self.asm[last:])
if not match:
|
python
|
{
"resource": ""
}
|
q3542
|
BasicBlock.is_partitionable
|
train
|
def is_partitionable(self):
""" Returns if this block can be partitiones in 2 or more blocks,
because if contains enders.
"""
if len(self.mem) < 2:
return False # An atomic block
if any(x.is_ender or x.asm in arch.zx48k.backend.ASMS for x in self.mem):
return True
|
python
|
{
"resource": ""
}
|
q3543
|
BasicBlock.delete_from
|
train
|
def delete_from(self, basic_block):
""" Removes the basic_block ptr from the list for "comes_from"
if it exists. It also sets self.prev to None if it is basic_block.
"""
if basic_block is None:
return
if self.lock:
return
self.lock = True
if self.prev is basic_block:
if self.prev.next is self:
self.prev.next =
|
python
|
{
"resource": ""
}
|
q3544
|
BasicBlock.add_comes_from
|
train
|
def add_comes_from(self, basic_block):
""" This simulates a set. Adds the basic_block to the comes_from
list if not done already.
"""
if basic_block is None:
|
python
|
{
"resource": ""
}
|
q3545
|
BasicBlock.is_used
|
train
|
def is_used(self, regs, i, top=None):
""" Checks whether any of the given regs are required from the given point
to the end or not.
"""
if i < 0:
i = 0
if self.lock:
return True
regs = list(regs) # make a copy
if top is None:
top = len(self)
else:
top -= 1
for ii in range(i, top):
for r in self.mem[ii].requires:
|
python
|
{
"resource": ""
}
|
q3546
|
BasicBlock.swap
|
train
|
def swap(self, a, b):
""" Swaps mem positions a and b
"""
self.mem[a], self.mem[b] = self.mem[b], self.mem[a]
|
python
|
{
"resource": ""
}
|
q3547
|
BasicBlock.goes_requires
|
train
|
def goes_requires(self, regs):
""" Returns whether any of the goes_to block requires any of
the given registers.
"""
if len(self) and self.mem[-1].inst == 'call' and self.mem[-1].condition_flag is None:
for block in self.calls:
if block.is_used(regs, 0):
return True
d = block.destroys()
|
python
|
{
"resource": ""
}
|
q3548
|
BasicBlock.get_label_idx
|
train
|
def get_label_idx(self, label):
""" Returns the index of a label.
Returns None if not found.
|
python
|
{
"resource": ""
}
|
q3549
|
BasicBlock.get_first_non_label_instruction
|
train
|
def get_first_non_label_instruction(self):
""" Returns the memcell of the given block, which is
not a LABEL.
"""
for i in range(len(self)):
|
python
|
{
"resource": ""
}
|
q3550
|
_free_sequence
|
train
|
def _free_sequence(tmp1, tmp2=False):
''' Outputs a FREEMEM sequence for 1 or 2 ops
'''
if not tmp1 and not tmp2:
return []
output = []
if tmp1 and tmp2:
output.append('pop de')
output.append('ex (sp), hl')
output.append('push de')
output.append('call __MEM_FREE')
output.append('pop hl')
|
python
|
{
"resource": ""
}
|
q3551
|
_lenstr
|
train
|
def _lenstr(ins):
''' Returns string length
'''
(tmp1, output) = _str_oper(ins.quad[2], no_exaf=True)
if tmp1:
output.append('push hl')
output.append('call __STRLEN')
|
python
|
{
"resource": ""
}
|
q3552
|
_mul8
|
train
|
def _mul8(ins):
""" Multiplies 2 las values from the stack.
Optimizations:
* If any of the ops is ZERO,
then do A = 0 ==> XOR A, cause A * 0 = 0 * A = 0
* If any ot the ops is ONE, do NOTHING
A * 1 = 1 * A = A
"""
op1, op2 = tuple(ins.quad[2:])
if _int_ops(op1, op2) is not None:
op1, op2 = _int_ops(op1, op2)
output = _8bit_oper(op1)
if op2 == 1: # A * 1 = 1 * A = A
output.append('push af')
return output
if op2 == 0:
output.append('xor a')
output.append('push af')
return output
if op2 == 2: # A * 2 == A SLA 1
output.append('add a, a')
output.append('push af')
return output
if op2 == 4: # A * 4 == A SLA 2
|
python
|
{
"resource": ""
}
|
q3553
|
_divu8
|
train
|
def _divu8(ins):
""" Divides 2 8bit unsigned integers. The result is pushed onto the stack.
Optimizations:
* If 2nd op is 1 then
do nothing
* If 2nd op is 2 then
Shift Right Logical
"""
op1, op2 = tuple(ins.quad[2:])
if is_int(op2):
op2 = int8(op2)
output = _8bit_oper(op1)
if op2 == 1:
output.append('push af')
return output
if op2 == 2:
output.append('srl a')
|
python
|
{
"resource": ""
}
|
q3554
|
_shru8
|
train
|
def _shru8(ins):
""" Shift 8bit unsigned integer to the right. The result is pushed onto the stack.
Optimizations:
* If 1nd or 2nd op is 0 then
do nothing
* If 2nd op is < 4 then
unroll loop
"""
op1, op2 = tuple(ins.quad[2:])
if is_int(op2):
op2 = int8(op2)
output = _8bit_oper(op1)
if op2 == 0:
output.append('push af')
return output
if op2 < 4:
output.extend(['srl a'] * op2)
output.append('push af')
|
python
|
{
"resource": ""
}
|
q3555
|
MacroCall.is_defined
|
train
|
def is_defined(self, symbolTable=None):
""" True if this macro has been defined
|
python
|
{
"resource": ""
}
|
q3556
|
SymbolBOUNDLIST.make_node
|
train
|
def make_node(cls, node, *args):
''' Creates an array BOUND LIST.
'''
if node is None:
return cls.make_node(SymbolBOUNDLIST(), *args)
if node.token != 'BOUNDLIST':
return cls.make_node(None, node, *args)
|
python
|
{
"resource": ""
}
|
q3557
|
init
|
train
|
def init():
""" Initializes parser state
"""
global LABELS
global LET_ASSIGNMENT
global PRINT_IS_USED
global SYMBOL_TABLE
global ast
global data_ast
global optemps
global OPTIONS
global last_brk_linenum
LABELS = {}
LET_ASSIGNMENT = False
PRINT_IS_USED = False
last_brk_linenum = 0
ast = None
data_ast = None # Global Variables AST
optemps = OpcodesTemps()
gl.INITS.clear()
|
python
|
{
"resource": ""
}
|
q3558
|
make_func_call
|
train
|
def make_func_call(id_, lineno, params):
""" This will return an AST
|
python
|
{
"resource": ""
}
|
q3559
|
make_label
|
train
|
def make_label(id_, lineno):
""" Creates a label entry. Returns None on error.
"""
entry = SYMBOL_TABLE.declare_label(id_, lineno)
|
python
|
{
"resource": ""
}
|
q3560
|
SymbolBOUND.make_node
|
train
|
def make_node(lower, upper, lineno):
""" Creates an array bound
"""
if not is_static(lower, upper):
syntax_error(lineno, 'Array bounds must be constants')
return None
if isinstance(lower, SymbolVAR):
lower = lower.value
if isinstance(upper, SymbolVAR):
|
python
|
{
"resource": ""
}
|
q3561
|
Basic.sentence_bytes
|
train
|
def sentence_bytes(self, sentence):
""" Return bytes of a sentence.
This is a very simple parser. Sentence is a list of strings and numbers.
1st element of sentence MUST match a token.
"""
result = [TOKENS[sentence[0]]]
|
python
|
{
"resource": ""
}
|
q3562
|
Basic.line
|
train
|
def line(self, sentences, line_number=None):
""" Return the bytes for a basic line.
If no line number is given, current one + 10 will be used
Sentences if a list of sentences
"""
if line_number is None:
line_number = self.current_line + 10
self.current_line = line_number
sep = []
result = []
for sentence in sentences:
result.extend(sep)
|
python
|
{
"resource": ""
}
|
q3563
|
Lexer.include
|
train
|
def include(self, filename):
""" Changes FILENAME and line count
"""
if filename != STDIN and filename in [x[0] for x in self.filestack]: # Already included?
self.warning(' Recursive inclusion')
self.filestack.append([filename, 1, self.lex, self.input_data])
self.lex = lex.lex(object=self)
result = self.put_current_line() # First #line start with \n (EOL)
try:
if filename == STDIN:
self.input_data = sys.stdin.read()
|
python
|
{
"resource": ""
}
|
q3564
|
SymbolVAR.add_alias
|
train
|
def add_alias(self, entry):
""" Adds id to the current list 'aliased_by'
"""
assert
|
python
|
{
"resource": ""
}
|
q3565
|
SymbolVAR.make_alias
|
train
|
def make_alias(self, entry):
""" Make this variable an alias of another one
"""
entry.add_alias(self)
self.alias = entry
self.scope = entry.scope
|
python
|
{
"resource": ""
}
|
q3566
|
SymbolVAR.to_label
|
train
|
def to_label(var_instance):
""" Converts a var_instance to a label one
"""
# This can be done 'cause LABEL is just a dummy descent of VAR
|
python
|
{
"resource": ""
}
|
q3567
|
SymbolVAR.to_function
|
train
|
def to_function(var_instance, lineno=None):
""" Converts a var_instance to a function one
"""
assert isinstance(var_instance, SymbolVAR)
|
python
|
{
"resource": ""
}
|
q3568
|
SymbolVAR.to_vararray
|
train
|
def to_vararray(var_instance, bounds):
""" Converts a var_instance to a var array one
"""
assert isinstance(var_instance, SymbolVAR)
from symbols import BOUNDLIST
|
python
|
{
"resource": ""
}
|
q3569
|
TYPE.to_type
|
train
|
def to_type(cls, typename):
""" Converts a type ID to name. On error returns None
"""
NAME_TYPES
|
python
|
{
"resource": ""
}
|
q3570
|
f16
|
train
|
def f16(op):
""" Returns a floating point operand converted to 32 bits unsigned int.
Negative numbers are returned in 2 complement.
The result is returned in a tuple (DE, HL) => High16 (Int part), Low16 (Decimal part)
"""
|
python
|
{
"resource": ""
}
|
q3571
|
throw_invalid_quad_params
|
train
|
def throw_invalid_quad_params(quad, QUADS, nparams):
""" Exception raised when an invalid number of params in the
quad code has been emmitted.
"""
raise InvalidICError(str(quad),
|
python
|
{
"resource": ""
}
|
q3572
|
fp
|
train
|
def fp(x):
""" Returns a floating point number as EXP+128, Mantissa
"""
def bin32(f):
""" Returns ASCII representation for a 32 bit integer value
"""
result = ''
a = int(f) & 0xFFFFFFFF # ensures int 32
for i in range(32):
result = str(a % 2) + result
a = a >> 1
return result
def bindec32(f):
""" Returns binary representation of a mantissa x (x is float)
"""
result = '0'
a = f
if f >= 1:
result = bin32(f)
result += '.'
c = int(a)
for i in range(32):
a -= c
a *= 2
c =
|
python
|
{
"resource": ""
}
|
q3573
|
immediate_float
|
train
|
def immediate_float(x):
""" Returns C DE HL as values for loading
and immediate floating point.
"""
def bin2hex(y):
return "%02X"
|
python
|
{
"resource": ""
}
|
q3574
|
Tree.inorder
|
train
|
def inorder(self, funct, stopOn=None):
""" Iterates in order, calling the function with the current node.
If stopOn is set to True or False, it will stop on true or false.
|
python
|
{
"resource": ""
}
|
q3575
|
Tree.preorder
|
train
|
def preorder(self, funct, stopOn=None):
""" Iterates in preorder, calling the function with the current node.
If stopOn is set to True or False, it will stop on true or false.
"""
|
python
|
{
"resource": ""
}
|
q3576
|
Tree.postorder
|
train
|
def postorder(self, funct, stopOn=None):
""" Iterates in postorder, calling the function with the current node.
If stopOn is set to True or False, it will stop on true or false.
"""
if stopOn is None:
|
python
|
{
"resource": ""
}
|
q3577
|
Tree.makenode
|
train
|
def makenode(clss, symbol, *nexts):
""" Stores the symbol in an AST instance,
and left and right to the given ones
"""
result = clss(symbol)
for i in nexts:
if i is None:
|
python
|
{
"resource": ""
}
|
q3578
|
get_include_path
|
train
|
def get_include_path():
""" Default include path using a tricky sys
calls.
"""
f1 = os.path.basename(sys.argv[0]).lower() # script filename
f2 = os.path.basename(sys.executable).lower() # Executable filename
# If executable filename and script name are the same, we are
if f1 == f2 or f2 == f1 + '.exe': # under a
|
python
|
{
"resource": ""
}
|
q3579
|
search_filename
|
train
|
def search_filename(fname, lineno, local_first):
""" Search a filename into the list of the include path.
If local_first is true, it will try first in the current directory of
the file being analyzed.
"""
fname = api.utils.sanitize_filename(fname)
i_path = [CURRENT_DIR] + INCLUDEPATH if local_first else list(INCLUDEPATH)
i_path.extend(OPTIONS.include_path.value.split(':') if OPTIONS.include_path.value else [])
if os.path.isabs(fname):
if os.path.isfile(fname):
|
python
|
{
"resource": ""
}
|
q3580
|
filter_
|
train
|
def filter_(input_, filename='<internal>', state='INITIAL'):
""" Filter the input string thought the preprocessor.
result is appended to OUTPUT global str
"""
global CURRENT_DIR
prev_dir = CURRENT_DIR
CURRENT_FILE.append(filename)
CURRENT_DIR
|
python
|
{
"resource": ""
}
|
q3581
|
_set_pyqtgraph_title
|
train
|
def _set_pyqtgraph_title(layout):
"""
Private function to add a title to the first row of the window.
Returns True if a Title is set. Else, returns False.
"""
if 'title_size' in pytplot.tplot_opt_glob:
size = pytplot.tplot_opt_glob['title_size']
if 'title_text' in
|
python
|
{
"resource": ""
}
|
q3582
|
TVar._check_spec_bins_ordering
|
train
|
def _check_spec_bins_ordering(self):
"""
This is a private function of the TVar object, this is run during
object creation to check if spec_bins are ascending or descending
"""
if self.spec_bins is None:
return
if len(self.spec_bins) == len(self.data.index):
self.spec_bins_time_varying = True
break_top_loop = False
for index, row in self.spec_bins.iterrows():
if row.isnull().values.all():
continue
else:
for i in row.index:
if np.isfinite(row[i]) and np.isfinite(row[i + 1]):
ascending = row[i] < row[i +
|
python
|
{
"resource": ""
}
|
q3583
|
UpdatingImage.paint
|
train
|
def paint(self, p, *args):
'''
I have no idea why, but we need to generate the picture after painting otherwise
it draws incorrectly.
'''
|
python
|
{
"resource": ""
}
|
q3584
|
UpdatingImage.setImage
|
train
|
def setImage(self, image=None, autoLevels=None, **kargs):
"""
Same this as ImageItem.setImage, but we don't update the drawing
"""
profile = debug.Profiler()
gotNewData = False
if image is None:
if self.image is None:
return
else:
gotNewData = True
shapeChanged = (self.image is None or image.shape != self.image.shape)
image = image.view(np.ndarray)
if self.image is None or image.dtype != self.image.dtype:
self._effectiveLut = None
self.image = image
if self.image.shape[0] > 2**15-1 or self.image.shape[1] > 2**15-1:
if 'autoDownsample' not in kargs:
kargs['autoDownsample'] = True
if shapeChanged:
self.prepareGeometryChange()
self.informViewBoundsChanged()
profile()
if autoLevels is None:
if 'levels' in kargs:
|
python
|
{
"resource": ""
}
|
q3585
|
PytplotExporter.getPaintItems
|
train
|
def getPaintItems(self, root=None):
"""Return a list of all items that should be painted in the correct order."""
if root is None:
root = self.item
preItems = []
postItems = []
if isinstance(root, QtGui.QGraphicsScene):
childs = [i for i in root.items() if i.parentItem() is None]
rootItem = []
else:
# CHANGE: For GraphicsLayouts, there is no function for childItems(), so I just
# replaced it with .items()
try:
childs = root.childItems()
except:
childs = root.items()
rootItem = [root]
childs.sort(key=lambda a: a.zValue())
while len(childs) > 0:
ch = childs.pop(0)
|
python
|
{
"resource": ""
}
|
q3586
|
qn_df
|
train
|
def qn_df(df, axis='row', keep_orig=False):
'''
do quantile normalization of a dataframe dictionary, does not write to net
'''
df_qn = {}
for mat_type in df:
inst_df = df[mat_type]
# using transpose to do row qn
if axis == 'row':
inst_df = inst_df.transpose()
missing_values = inst_df.isnull().values.any()
# make mask of missing values
if missing_values:
# get nan mask
missing_mask = pd.isnull(inst_df)
# tmp fill in na with zero, will not affect qn
inst_df = inst_df.fillna(value=0)
# calc common distribution
common_dist = calc_common_dist(inst_df)
# swap in common distribution
|
python
|
{
"resource": ""
}
|
q3587
|
main
|
train
|
def main(net):
'''
calculate pvalue of category closeness
'''
# calculate the distance between the data points within the same category and
# compare to null distribution
for inst_rc in ['row', 'col']:
inst_nodes = deepcopy(net.dat['nodes'][inst_rc])
inst_index = deepcopy(net.dat['node_info'][inst_rc]['clust'])
# reorder based on clustered order
inst_nodes = [ inst_nodes[i] for i in inst_index]
# make distance matrix dataframe
dm = dist_matrix_lattice(inst_nodes)
node_infos = list(net.dat['node_info'][inst_rc].keys())
all_cats = []
for inst_info in node_infos:
if 'dict_cat_' in inst_info:
all_cats.append(inst_info)
for cat_dict in all_cats:
tmp_dict = net.dat['node_info'][inst_rc][cat_dict]
pval_name = cat_dict.replace('dict_','pval_')
net.dat['node_info'][inst_rc][pval_name] = {}
|
python
|
{
"resource": ""
}
|
q3588
|
df_to_dat
|
train
|
def df_to_dat(net, df, define_cat_colors=False):
'''
This is always run when data is loaded.
'''
from . import categories
# check if df has unique values
df['mat'] = make_unique_labels.main(net, df['mat'])
net.dat['mat'] = df['mat'].values
net.dat['nodes']['row'] = df['mat'].index.tolist()
net.dat['nodes']['col'] = df['mat'].columns.tolist()
for inst_rc in ['row', 'col']:
if type(net.dat['nodes'][inst_rc][0]) is tuple:
# get the number of categories from the length of the tuple
# subtract 1 because the name is the first element of the tuple
num_cat = len(net.dat['nodes'][inst_rc][0]) - 1
net.dat['node_info'][inst_rc]['full_names'] = net.dat['nodes']\
[inst_rc]
for inst_rcat in range(num_cat):
|
python
|
{
"resource": ""
}
|
q3589
|
mat_to_numpy_arr
|
train
|
def mat_to_numpy_arr(self):
''' convert list to numpy array - numpy arrays can not be saved as json '''
|
python
|
{
"resource": ""
}
|
q3590
|
cluster_row_and_col
|
train
|
def cluster_row_and_col(net, dist_type='cosine', linkage_type='average',
dendro=True, run_clustering=True, run_rank=True,
ignore_cat=False, calc_cat_pval=False, links=False):
''' cluster net.dat and make visualization json, net.viz.
optionally leave out dendrogram colorbar groups with dendro argument '''
import scipy
from copy import deepcopy
from scipy.spatial.distance import pdist
from . import categories, make_viz, cat_pval
dm = {}
for inst_rc in ['row', 'col']:
tmp_mat = deepcopy(net.dat['mat'])
dm[inst_rc] = calc_distance_matrix(tmp_mat, inst_rc, dist_type)
# save directly to dat structure
node_info = net.dat['node_info'][inst_rc]
node_info['ini'] = list(range( len(net.dat['nodes'][inst_rc]), -1, -1))
# cluster
if run_clustering is True:
node_info['clust'], node_info['group'] = \
clust_and_group(net, dm[inst_rc], linkage_type=linkage_type)
else:
|
python
|
{
"resource": ""
}
|
q3591
|
check_categories
|
train
|
def check_categories(lines):
'''
find out how many row and col categories are available
'''
# count the number of row categories
rcat_line = lines[0].split('\t')
# calc the number of row names and categories
num_rc = 0
found_end = False
# skip first tab
for inst_string in rcat_line[1:]:
if inst_string == '':
if found_end is False:
num_rc = num_rc + 1
else:
found_end = True
max_rcat = 15
if max_rcat > len(lines):
max_rcat = len(lines) - 1
num_cc = 0
for i in range(max_rcat):
ccat_line = lines[i + 1].split('\t')
|
python
|
{
"resource": ""
}
|
q3592
|
dict_cat
|
train
|
def dict_cat(net, define_cat_colors=False):
'''
make a dictionary of node-category associations
'''
# print('---------------------------------')
# print('---- dict_cat: before setting cat colors')
# print('---------------------------------\n')
# print(define_cat_colors)
# print(net.viz['cat_colors'])
net.persistent_cat = True
for inst_rc in ['row', 'col']:
inst_keys = list(net.dat['node_info'][inst_rc].keys())
all_cats = [x for x in inst_keys if 'cat-' in x]
for inst_name_cat in all_cats:
dict_cat = {}
tmp_cats = net.dat['node_info'][inst_rc][inst_name_cat]
tmp_nodes = net.dat['nodes'][inst_rc]
for i in range(len(tmp_cats)):
inst_cat = tmp_cats[i]
inst_node = tmp_nodes[i]
if inst_cat not in dict_cat:
dict_cat[inst_cat] = []
dict_cat[inst_cat].append(inst_node)
tmp_name = 'dict_' + inst_name_cat.replace('-', '_')
net.dat['node_info'][inst_rc][tmp_name] = dict_cat
# merge with old cat_colors by default
cat_colors = net.viz['cat_colors']
if define_cat_colors == True:
cat_number = 0
for inst_rc in ['row', 'col']:
inst_keys
|
python
|
{
"resource": ""
}
|
q3593
|
calc_cat_clust_order
|
train
|
def calc_cat_clust_order(net, inst_rc):
'''
cluster category subset of data
'''
from .__init__ import Network
from copy import deepcopy
from . import calc_clust, run_filter
inst_keys = list(net.dat['node_info'][inst_rc].keys())
all_cats = [x for x in inst_keys if 'cat-' in x]
if len(all_cats) > 0:
for inst_name_cat in all_cats:
tmp_name = 'dict_' + inst_name_cat.replace('-', '_')
dict_cat = net.dat['node_info'][inst_rc][tmp_name]
unordered_cats = dict_cat.keys()
ordered_cats = order_categories(unordered_cats)
# this is the ordering of the columns based on their category, not
# including their clustering ordering within category
all_cat_orders = []
tmp_names_list = []
for inst_cat in ordered_cats:
inst_nodes = dict_cat[inst_cat]
tmp_names_list.extend(inst_nodes)
# cat_net = deepcopy(Network())
# cat_net.dat['mat'] = deepcopy(net.dat['mat'])
# cat_net.dat['nodes'] = deepcopy(net.dat['nodes'])
# cat_df = cat_net.dat_to_df()
# sub_df = {}
# if inst_rc == 'col':
# sub_df['mat'] = cat_df['mat'][inst_nodes]
# elif inst_rc == 'row':
# # need to transpose df
# cat_df['mat'] = cat_df['mat'].transpose()
# sub_df['mat'] = cat_df['mat'][inst_nodes]
|
python
|
{
"resource": ""
}
|
q3594
|
Network.load_file_as_string
|
train
|
def load_file_as_string(self, file_string, filename=''):
'''
Load file as a string.
|
python
|
{
"resource": ""
}
|
q3595
|
Network.load_df
|
train
|
def load_df(self, df):
'''
Load Pandas DataFrame.
'''
# self.__init__()
self.reset()
df_dict = {}
df_dict['mat'] = deepcopy(df)
# always define category colors
|
python
|
{
"resource": ""
}
|
q3596
|
Network.widget_df
|
train
|
def widget_df(self):
'''
Export a DataFrame from the front-end visualization. For instance, a user
can filter to show only a single cluster using the dendrogram and then
get a dataframe of this cluster using the widget_df method.
'''
if hasattr(self, 'widget_instance') == True:
if self.widget_instance.mat_string != '':
tmp_net = deepcopy(Network())
df_string = self.widget_instance.mat_string
tmp_net.load_file_as_string(df_string)
df = tmp_net.export_df()
return df
else:
return self.export_df()
else:
if hasattr(self, 'widget_class') == True:
|
python
|
{
"resource": ""
}
|
q3597
|
Network.write_json_to_file
|
train
|
def write_json_to_file(self, net_type, filename, indent='no-indent'):
'''
Save dat or viz as a JSON to file.
'''
|
python
|
{
"resource": ""
}
|
q3598
|
Network.filter_sum
|
train
|
def filter_sum(self, inst_rc, threshold, take_abs=True):
'''
Filter a network's rows or columns based on the sum across rows or columns.
'''
inst_df = self.dat_to_df()
if inst_rc == 'row':
inst_df = run_filter.df_filter_row_sum(inst_df,
|
python
|
{
"resource": ""
}
|
q3599
|
Network.filter_cat
|
train
|
def filter_cat(self, axis, cat_index, cat_name):
'''
Filter the matrix based on their category. cat_index is the index of the category, the first
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.