text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Given a transaction dict returned by btc_tx_deserialize, convert it back into a
<END_TASK>
<USER_TASK:>
Description:
def btc_tx_serialize(_txobj):
"""
Given a transaction dict returned by btc_tx_deserialize, convert it back into a
hex-encoded byte string.
Derived from code written by Vitalik Buterin in pybitcointools (https://github.com/vbuterin/pybitcointools)
""" |
# output buffer
o = []
txobj = None
if encoding.json_is_base(_txobj, 16):
# txobj is built from hex strings already. deserialize them
txobj = encoding.json_changebase(_txobj, lambda x: binascii.unhexlify(x))
else:
txobj = copy.deepcopy(_txobj)
# version
o.append(encoding.encode(txobj["version"], 256, 4)[::-1])
# do we have any witness scripts?
have_witness = False
for inp in txobj['ins']:
if inp.has_key('witness_script') and len(inp['witness_script']) > 0:
have_witness = True
break
if have_witness:
# add segwit marker
o.append('\x00\x01')
# number of inputs
o.append(encoding.num_to_var_int(len(txobj["ins"])))
# all inputs
for inp in txobj["ins"]:
# input tx hash
o.append(inp["outpoint"]["hash"][::-1])
# input tx outpoint
o.append(encoding.encode(inp["outpoint"]["index"], 256, 4)[::-1])
# input scriptsig
script = inp.get('script')
if not script:
script = bytes()
scriptsig = encoding.num_to_var_int(len(script)) + script
o.append(scriptsig)
# sequence
o.append(encoding.encode(inp.get("sequence", UINT_MAX - 1), 256, 4)[::-1])
# number of outputs
o.append(encoding.num_to_var_int(len(txobj["outs"])))
# all outputs
for out in txobj["outs"]:
# value
o.append(encoding.encode(out["value"], 256, 8)[::-1])
# scriptPubKey
scriptpubkey = encoding.num_to_var_int(len(out['script'])) + out['script']
o.append(scriptpubkey)
# add witnesses
if have_witness:
for inp in txobj['ins']:
witness_script = inp.get('witness_script')
if not witness_script:
witness_script = '\x00'
o.append(witness_script)
# locktime
o.append(encoding.encode(txobj["locktime"], 256, 4)[::-1])
# full string
ret = ''.join( encoding.json_changebase(o, lambda x: encoding.safe_hexlify(x)) )
return ret |
<SYSTEM_TASK:>
Strip the witness information from a serialized transaction
<END_TASK>
<USER_TASK:>
Description:
def btc_tx_witness_strip( tx_serialized ):
"""
Strip the witness information from a serialized transaction
""" |
if not btc_tx_is_segwit(tx_serialized):
# already strippped
return tx_serialized
tx = btc_tx_deserialize(tx_serialized)
for inp in tx['ins']:
del inp['witness_script']
tx_stripped = btc_tx_serialize(tx)
return tx_stripped |
<SYSTEM_TASK:>
Given an unsigned serialized transaction, add more inputs and outputs to it.
<END_TASK>
<USER_TASK:>
Description:
def btc_tx_extend(partial_tx_hex, new_inputs, new_outputs, **blockchain_opts):
"""
Given an unsigned serialized transaction, add more inputs and outputs to it.
@new_inputs and @new_outputs will be virtualchain-formatted:
* new_inputs[i] will have {'outpoint': {'index':..., 'hash':...}, 'script':..., 'witness_script': ...}
* new_outputs[i] will have {'script':..., 'value':... (in fundamental units, e.g. satoshis!)}
""" |
# recover tx
tx = btc_tx_deserialize(partial_tx_hex)
tx_inputs, tx_outputs = tx['ins'], tx['outs']
locktime, version = tx['locktime'], tx['version']
tx_inputs += new_inputs
tx_outputs += new_outputs
new_tx = {
'ins': tx_inputs,
'outs': tx_outputs,
'locktime': locktime,
'version': version,
}
new_unsigned_tx = btc_tx_serialize(new_tx)
return new_unsigned_tx |
<SYSTEM_TASK:>
Return a DER-encoded length field
<END_TASK>
<USER_TASK:>
Description:
def btc_tx_der_encode_length(l):
"""
Return a DER-encoded length field
Based on code from python-ecdsa (https://github.com/warner/python-ecdsa)
by Brian Warner. Subject to the MIT license.
""" |
if l < 0:
raise ValueError("length cannot be negative")
if l < 0x80:
return int2byte(l)
s = ("%x" % l).encode()
if len(s) % 2:
s = b("0") + s
s = binascii.unhexlify(s)
llen = len(s)
return int2byte(0x80 | llen) + s |
<SYSTEM_TASK:>
Return a DER-encoded sequence
<END_TASK>
<USER_TASK:>
Description:
def btc_tx_der_encode_sequence(*encoded_pieces):
"""
Return a DER-encoded sequence
Based on code from python-ecdsa (https://github.com/warner/python-ecdsa)
by Brian Warner. Subject to the MIT license.
""" |
# borrowed from python-ecdsa
total_len = sum([len(p) for p in encoded_pieces])
return b('\x30') + btc_tx_der_encode_length(total_len) + b('').join(encoded_pieces) |
<SYSTEM_TASK:>
Calculate the sighash of a non-segwit transaction.
<END_TASK>
<USER_TASK:>
Description:
def btc_tx_sighash( tx, idx, script, hashcode=SIGHASH_ALL):
"""
Calculate the sighash of a non-segwit transaction.
If it's SIGHASH_NONE, then digest the inputs but no outputs
If it's SIGHASH_SINGLE, then digest all inputs and all outputs up to i (excluding values and scripts), and fully digest the ith input and output
If it's (something) | SIGHASH_ANYONECANPAY, then only digest the ith input.
Return the double-sha256 digest of the relevant fields.
THIS DOES NOT WORK WITH SEGWIT OUTPUTS
Adapted from https://github.com/vbuterin/pybitcointools, by Vitalik Buterin
""" |
txobj = btc_tx_deserialize(tx)
idx = int(idx)
hashcode = int(hashcode)
newtx = copy.deepcopy(txobj)
# remove all scriptsigs in all inputs, except for the ith input's scriptsig.
# the other inputs will be 'partially signed', except for SIGHASH_ANYONECANPAY mode.
for i in xrange(0, len(newtx['ins'])):
newtx['ins'][i]["script"] = ''
if i == idx:
if newtx['ins'][i].has_key('witness_script') and newtx['ins'][i]['witness_script']:
raise ValueError('this method does not handle segwit inputs')
if newtx['ins'][i].has_key('witness_script'):
del newtx['ins'][i]['witness_script']
newtx["ins"][idx]["script"] = script
if (hashcode & 0x1f) == SIGHASH_NONE:
# don't care about the outputs with this signature
newtx["outs"] = []
for inp in newtx['ins']:
inp['sequence'] = 0
elif (hashcode & 0x1f) == SIGHASH_SINGLE:
# only signing for this input.
# all outputs after this input will not be signed.
# all outputs before this input will be partially signed (but not their values or scripts)
if len(newtx['ins']) > len(newtx['outs']):
raise ValueError('invalid hash code: {} inputs but {} outputs'.format(len(newtx['ins']), len(newtx['outs'])))
newtx["outs"] = newtx["outs"][:len(newtx["ins"])]
for out in newtx["outs"][:len(newtx["ins"]) - 1]:
out['value'] = 2**64 - 1
out['script'] = ""
elif (hashcode & SIGHASH_ANYONECANPAY) != 0:
# only going to sign this specific input, and nothing else
newtx["ins"] = [newtx["ins"][idx]]
signing_tx = btc_tx_serialize(newtx)
sighash = btc_tx_get_hash( signing_tx, hashcode )
return sighash |
<SYSTEM_TASK:>
Sign a native p2wsh or p2sh-p2wsh multisig input.
<END_TASK>
<USER_TASK:>
Description:
def btc_tx_sign_multisig_segwit(tx, idx, prevout_amount, witness_script, private_keys, hashcode=SIGHASH_ALL, hashcodes=None, native=False):
"""
Sign a native p2wsh or p2sh-p2wsh multisig input.
@tx must be a hex-encoded tx
Return the signed transaction
""" |
from .multisig import parse_multisig_redeemscript
if hashcodes is None:
hashcodes = [hashcode] * len(private_keys)
txobj = btc_tx_deserialize(str(tx))
privs = {}
for pk in private_keys:
pubk = ecdsalib.ecdsa_private_key(pk).public_key().to_hex()
compressed_pubkey = keylib.key_formatting.compress(pubk)
privs[compressed_pubkey] = pk
m, public_keys = parse_multisig_redeemscript(witness_script)
used_keys, sigs = [], []
for i, public_key in enumerate(public_keys):
if public_key not in privs:
continue
if len(used_keys) == m:
break
if public_key in used_keys:
raise ValueError('Tried to reuse key in witness script: {}'.format(public_key))
pk_str = privs[public_key]
used_keys.append(public_key)
sig = btc_tx_make_input_signature_segwit(tx, idx, prevout_amount, witness_script, pk_str, hashcodes[i])
sigs.append(sig)
# print ''
if len(used_keys) != m:
raise ValueError('Missing private keys (used {}, required {})'.format(len(used_keys), m))
if native:
# native p2wsh
txobj['ins'][idx]['witness_script'] = btc_witness_script_serialize([None] + sigs + [witness_script])
# print 'segwit multisig: native p2wsh: witness script {}'.format(txobj['ins'][idx]['witness_script'])
else:
# p2sh-p2wsh
redeem_script = btc_make_p2sh_p2wsh_redeem_script(witness_script)
txobj['ins'][idx]['script'] = redeem_script
txobj['ins'][idx]['witness_script'] = btc_witness_script_serialize([None] + sigs + [witness_script])
# print 'segwit multisig: p2sh p2wsh: witness script {}'.format(txobj['ins'][idx]['witness_script'])
# print 'segwit multisig: p2sh p2wsh: redeem script {}'.format(txobj['ins'][idx]['script'])
return btc_tx_serialize(txobj) |
<SYSTEM_TASK:>
What kind of scriptsig can this private key make?
<END_TASK>
<USER_TASK:>
Description:
def btc_privkey_scriptsig_classify(private_key_info):
"""
What kind of scriptsig can this private key make?
""" |
if btc_is_singlesig(private_key_info):
return 'p2pkh'
if btc_is_multisig(private_key_info):
return 'p2sh'
if btc_is_singlesig_segwit(private_key_info):
return 'p2sh-p2wpkh'
if btc_is_multisig_segwit(private_key_info):
return 'p2sh-p2wsh'
return None |
<SYSTEM_TASK:>
Sign all unsigned inputs with a given key.
<END_TASK>
<USER_TASK:>
Description:
def btc_tx_sign_all_unsigned_inputs(private_key_info, prev_outputs, unsigned_tx_hex, scriptsig_type=None, segwit=None, **blockchain_opts):
"""
Sign all unsigned inputs with a given key.
Use the given outputs to fund them.
@private_key_info: either a hex private key, or a dict with 'private_keys' and 'redeem_script'
defined as keys.
@prev_outputs: a list of {'out_script': xxx, 'value': xxx} that are in 1-to-1 correspondence with the unsigned inputs in the tx ('value' is in satoshis)
@unsigned_hex_tx: hex transaction with unsigned inputs
Returns: signed hex transaction
""" |
if segwit is None:
segwit = get_features('segwit')
txobj = btc_tx_deserialize(unsigned_tx_hex)
inputs = txobj['ins']
if scriptsig_type is None:
scriptsig_type = btc_privkey_scriptsig_classify(private_key_info)
tx_hex = unsigned_tx_hex
prevout_index = 0
# import json
# print ''
# print 'transaction:\n{}'.format(json.dumps(btc_tx_deserialize(unsigned_tx_hex), indent=4, sort_keys=True))
# print 'prevouts:\n{}'.format(json.dumps(prev_outputs, indent=4, sort_keys=True))
# print ''
for i, inp in enumerate(inputs):
do_witness_script = segwit
if inp.has_key('witness_script'):
do_witness_script = True
elif segwit:
# all inputs must receive a witness script, even if it's empty
inp['witness_script'] = ''
if (inp['script'] and len(inp['script']) > 0) or (inp.has_key('witness_script') and len(inp['witness_script']) > 0):
continue
if prevout_index >= len(prev_outputs):
raise ValueError("Not enough prev_outputs ({} given, {} more prev-outputs needed)".format(len(prev_outputs), len(inputs) - prevout_index))
# tx with index i signed with privkey
tx_hex = btc_tx_sign_input(str(unsigned_tx_hex), i, prev_outputs[prevout_index]['out_script'], prev_outputs[prevout_index]['value'], private_key_info, segwit=do_witness_script, scriptsig_type=scriptsig_type)
unsigned_tx_hex = tx_hex
prevout_index += 1
return tx_hex |
<SYSTEM_TASK:>
Given block header information, serialize it and return the hex hash.
<END_TASK>
<USER_TASK:>
Description:
def block_header_serialize( inp ):
"""
Given block header information, serialize it and return the hex hash.
inp has:
* version (int)
* prevhash (str)
* merkle_root (str)
* timestamp (int)
* bits (int)
* nonce (int)
Based on code from pybitcointools (https://github.com/vbuterin/pybitcointools)
by Vitalik Buterin
""" |
# concatenate to form header
o = encoding.encode(inp['version'], 256, 4)[::-1] + \
inp['prevhash'].decode('hex')[::-1] + \
inp['merkle_root'].decode('hex')[::-1] + \
encoding.encode(inp['timestamp'], 256, 4)[::-1] + \
encoding.encode(inp['bits'], 256, 4)[::-1] + \
encoding.encode(inp['nonce'], 256, 4)[::-1]
# get (reversed) hash
h = hashing.bin_sha256(hashing.bin_sha256(o))[::-1].encode('hex')
assert h == inp['hash'], (hashing.bin_sha256(o).encode('hex'), inp['hash'])
return o.encode('hex') |
<SYSTEM_TASK:>
Calculate the hex form of a block's header, given its getblock information from bitcoind.
<END_TASK>
<USER_TASK:>
Description:
def block_header_to_hex( block_data, prev_hash ):
"""
Calculate the hex form of a block's header, given its getblock information from bitcoind.
""" |
header_info = {
"version": block_data['version'],
"prevhash": prev_hash,
"merkle_root": block_data['merkleroot'],
"timestamp": block_data['time'],
"bits": int(block_data['bits'], 16),
"nonce": block_data['nonce'],
"hash": block_data['hash']
}
return block_header_serialize(header_info) |
<SYSTEM_TASK:>
Verify whether or not bitcoind's block header matches the hash we expect.
<END_TASK>
<USER_TASK:>
Description:
def block_header_verify( block_data, prev_hash, block_hash ):
"""
Verify whether or not bitcoind's block header matches the hash we expect.
""" |
serialized_header = block_header_to_hex( block_data, prev_hash )
candidate_hash_bin_reversed = hashing.bin_double_sha256(binascii.unhexlify(serialized_header))
candidate_hash = binascii.hexlify( candidate_hash_bin_reversed[::-1] )
return block_hash == candidate_hash |
<SYSTEM_TASK:>
Saves the configuration to a JSON, in the standard config location.
<END_TASK>
<USER_TASK:>
Description:
def save(self, force=False):
"""
Saves the configuration to a JSON, in the standard config location.
Args:
force (Optional[:obj:`bool`]): Continue writing, even if the original
config file was not loaded properly. This is dangerous, because
it could cause the previous configuration options to be lost.
Defaults to :obj:`False`.
Raises:
:obj:`ConfigError`: if the configuration file was not successfully
loaded on initialization of the class, and
:obj:`force` is :obj:`False`.
""" |
if (not self._success) and (not force):
raise ConfigError((
'The config file appears to be corrupted:\n\n'
' {fname}\n\n'
'Before attempting to save the configuration, please either '
'fix the config file manually, or overwrite it with a blank '
'configuration as follows:\n\n'
' from dustmaps.config import config\n'
' config.reset()\n\n'
).format(fname=self.fname))
with open(self.fname, 'w') as f:
json.dump(self._options, f, indent=2) |
<SYSTEM_TASK:>
Resets the configuration, and overwrites the existing configuration
<END_TASK>
<USER_TASK:>
Description:
def reset(self):
"""
Resets the configuration, and overwrites the existing configuration
file.
""" |
self._options = {}
self.save(force=True)
self._success = True |
<SYSTEM_TASK:>
Interact with the blockchain peer,
<END_TASK>
<USER_TASK:>
Description:
def run( self ):
"""
Interact with the blockchain peer,
until we get a socket error or we
exit the loop explicitly.
The order of operations is:
* send version
* receive version
* send verack
* send getdata
* receive blocks
* for each block:
* for each transaction with nulldata:
* for each input:
* get the transaction that produced the consumed input
Return True on success
Return False on error
""" |
log.debug("Segwit support: {}".format(get_features('segwit')))
self.begin()
try:
self.loop()
except socket.error, se:
if not self.finished:
# unexpected
log.exception(se)
return False
# fetch remaining sender transactions
try:
self.fetch_sender_txs()
except Exception, e:
log.exception(e)
return False
# should be done now
try:
self.block_data_sanity_checks()
except AssertionError, ae:
log.exception(ae)
return False
return True |
<SYSTEM_TASK:>
Have we received all block data?
<END_TASK>
<USER_TASK:>
Description:
def have_all_block_data(self):
"""
Have we received all block data?
""" |
if not (self.num_blocks_received == self.num_blocks_requested):
log.debug("num blocks received = %s, num requested = %s" % (self.num_blocks_received, self.num_blocks_requested))
return False
return True |
<SYSTEM_TASK:>
Fetch all sender txs via JSON-RPC,
<END_TASK>
<USER_TASK:>
Description:
def fetch_sender_txs(self):
"""
Fetch all sender txs via JSON-RPC,
and merge them into our block data.
Try backing off (up to 5 times) if we fail
to fetch transactions via JSONRPC
Return True on success
Raise on error
""" |
# fetch remaining sender transactions
if len(self.sender_info.keys()) > 0:
sender_txids = self.sender_info.keys()[:]
sender_txid_batches = []
batch_size = 20
for i in xrange(0, len(sender_txids), batch_size ):
sender_txid_batches.append( sender_txids[i:i+batch_size] )
for i in xrange(0, len(sender_txid_batches)):
sender_txid_batch = sender_txid_batches[i]
log.debug("Fetch %s TXs via JSON-RPC (%s-%s of %s)" % (len(sender_txid_batch), i * batch_size, i * batch_size + len(sender_txid_batch), len(sender_txids)))
sender_txs = None
for j in xrange(0, 5):
sender_txs = self.fetch_txs_rpc( self.bitcoind_opts, sender_txid_batch )
if sender_txs is None:
log.error("Failed to fetch transactions; trying again (%s of %s)" % (j+1, 5))
time.sleep(j+1)
continue
break
if sender_txs is None:
raise Exception("Failed to fetch transactions")
# pair back up with nulldata transactions
for sender_txid, sender_tx in sender_txs.items():
assert sender_txid in self.sender_info.keys(), "Unsolicited sender tx %s" % sender_txid
# match sender outputs to the nulldata tx's inputs
for nulldata_input_vout_index in self.sender_info[sender_txid].keys():
if sender_txid != "0000000000000000000000000000000000000000000000000000000000000000":
# regular tx, not coinbase
assert nulldata_input_vout_index < len(sender_tx['outs']), 'Output index {} is out of bounds for {}'.format(nulldata_input_vout_index, sender_txid)
# save sender info
self.add_sender_info(sender_txid, nulldata_input_vout_index, sender_tx['outs'][nulldata_input_vout_index])
else:
# coinbase
self.add_sender_info(sender_txid, nulldata_input_vout_index, sender_tx['outs'][0])
# update accounting
self.num_txs_received += 1
return True |
<SYSTEM_TASK:>
Verify that the data we received makes sense.
<END_TASK>
<USER_TASK:>
Description:
def block_data_sanity_checks(self):
"""
Verify that the data we received makes sense.
Return True on success
Raise on error
""" |
assert self.have_all_block_data(), "Still missing block data"
assert self.num_txs_received == len(self.sender_info.keys()), "Num TXs received: %s; num TXs requested: %s" % (self.num_txs_received, len(self.sender_info.keys()))
for (block_hash, block_info) in self.block_info.items():
for tx in block_info['txns']:
assert None not in tx['senders'], "Missing one or more senders in %s; dump follows\n%s" % (tx['txid'], simplejson.dumps(tx, indent=4, sort_keys=True))
for i in range(0, len(tx['ins'])):
inp = tx['ins'][i]
sinfo = tx['senders'][i]
assert sinfo['txid'] in self.sender_info, 'Surreptitious sender tx {}'.format(sinfo['txid'])
assert inp['outpoint']['index'] == sinfo['nulldata_vin_outpoint'], 'Mismatched sender/input index ({}: {} != {}); dump follows\n{}'.format(
sinfo['txid'], inp['outpoint']['index'], sinfo['nulldata_vin_outpoint'], simplejson.dumps(tx, indent=4, sort_keys=True))
assert inp['outpoint']['hash'] == sinfo['txid'], 'Mismatched sender/input txid ({} != {}); dump follows\n{}'.format(inp['txid'], sinfo['txid'], simplejson.dumps(tx, indent=4, sort_keys=True))
return True |
<SYSTEM_TASK:>
This method will implement the handshake of the
<END_TASK>
<USER_TASK:>
Description:
def begin(self):
"""
This method will implement the handshake of the
Bitcoin protocol. It will send the Version message,
and block until it receives a VerAck.
Once we receive the version, we'll send the verack,
and begin downloading.
""" |
log.debug("handshake (version %s)" % PROTOCOL_VERSION)
version = Version()
version.services = 0 # can't send blocks
log.debug("send Version")
self.send_message(version) |
<SYSTEM_TASK:>
Record sender information in our block info.
<END_TASK>
<USER_TASK:>
Description:
def add_sender_info( self, sender_txhash, nulldata_vin_outpoint, sender_out_data ):
"""
Record sender information in our block info.
@sender_txhash: txid of the sender
@nulldata_vin_outpoint: the 'vout' index from the nulldata tx input that this transaction funded
""" |
assert sender_txhash in self.sender_info.keys(), "Missing sender info for %s" % sender_txhash
assert nulldata_vin_outpoint in self.sender_info[sender_txhash], "Missing outpoint %s for sender %s" % (nulldata_vin_outpoint, sender_txhash)
block_hash = self.sender_info[sender_txhash][nulldata_vin_outpoint]['block_hash']
relindex = self.sender_info[sender_txhash][nulldata_vin_outpoint]['relindex']
relinput_index = self.sender_info[sender_txhash][nulldata_vin_outpoint]['relinput']
value_in_satoshis = sender_out_data['value']
script_pubkey = sender_out_data['script']
script_info = bits.btc_tx_output_parse_script(script_pubkey)
script_type = script_info['type']
addresses = script_info.get('addresses', [])
sender_info = {
"value": value_in_satoshis,
"script_pubkey": script_pubkey,
"script_type": script_type,
"addresses": addresses,
"nulldata_vin_outpoint": nulldata_vin_outpoint,
"txid": sender_txhash,
}
# debit this tx's total value
self.block_info[block_hash]['txns'][relindex]['fee'] += value_in_satoshis
# remember this sender, but put it in the right place.
# senders[i] must correspond to tx['vin'][i]
self.block_info[block_hash]['txns'][relindex]['senders'][relinput_index] = sender_info
self.block_info[block_hash]['num_senders'] += 1
return True |
<SYSTEM_TASK:>
Make sender information bundle for a particular input of
<END_TASK>
<USER_TASK:>
Description:
def make_sender_info( self, block_hash, txn, i, block_height ):
"""
Make sender information bundle for a particular input of
a nulldata transaction.
We'll use it to go find the transaction output that
funded the ith input of the given tx.
""" |
inp = txn['ins'][i]
ret = {
# to be filled in...
'scriptPubKey': None,
'addresses': None,
# for matching the input and sender funded
"txindex": txn['txindex'],
"relindex": txn['relindex'],
"output_index": inp['outpoint']['index'],
"block_hash": block_hash,
"relinput": i,
"block_height": block_height,
}
return ret |
<SYSTEM_TASK:>
Fetch the given list of transactions
<END_TASK>
<USER_TASK:>
Description:
def fetch_txs_rpc( self, bitcoind_opts, txids ):
"""
Fetch the given list of transactions
via the JSON-RPC interface.
Return a dict of parsed transactions on success,
keyed by txid.
Return None on error
""" |
headers = {'content-type': 'application/json'}
reqs = []
ret = {}
for i in xrange(0, len(txids)):
txid = txids[i]
if txid == "0000000000000000000000000000000000000000000000000000000000000000":
# coinbase; we never send these
ret[txid] = {
'version': 1,
'locktime': 0,
'ins': [],
'outs': [
{
'script': '',
'value': 0 # not really 0, but we don't care about coinbases anyway
}
],
}
continue
req = {'method': 'getrawtransaction', 'params': [txid, 0], 'jsonrpc': '2.0', 'id': i}
reqs.append( req )
proto = "http"
if bitcoind_opts.has_key('bitcoind_use_https') and bitcoind_opts['bitcoind_use_https']:
proto = "https"
server_url = "%s://%s:%s@%s:%s" % (proto, bitcoind_opts['bitcoind_user'], bitcoind_opts['bitcoind_passwd'], bitcoind_opts['bitcoind_server'], bitcoind_opts['bitcoind_port'])
try:
resp = requests.post( server_url, headers=headers, data=simplejson.dumps(reqs), verify=False )
except Exception, e:
log.exception(e)
log.error("Failed to fetch %s transactions" % len(txids))
return None
# get responses
try:
resp_json = resp.json()
assert type(resp_json) in [list]
except Exception, e:
log.exception(e)
log.error("Failed to parse transactions")
return None
try:
for resp in resp_json:
assert 'result' in resp, "Missing result"
txhex = resp['result']
assert txhex is not None, "Invalid RPC response '%s' (for %s)" % (simplejson.dumps(resp), txids[resp['id']])
if bits.btc_tx_is_segwit(txhex) and not get_features('segwit'):
# no segwit support yet
log.error("FATAL: SegWit transaction detected! Support for SegWit-formatted transactions is not yet activated")
log.error("Please ensure your bitcoind node has `rpcserialversion=0` set.")
log.error("Aborting...")
os.abort()
try:
tx_bin = txhex.decode('hex')
assert tx_bin is not None
tx_hash_bin = hashing.bin_double_sha256(tx_bin)[::-1]
assert tx_hash_bin is not None
tx_hash = tx_hash_bin.encode('hex')
assert tx_hash is not None
except Exception, e:
log.error("Failed to calculate txid of %s" % txhex)
raise
# solicited transaction?
assert tx_hash in txids, "Unsolicited transaction %s" % tx_hash
# unique?
if tx_hash in ret.keys():
continue
# parse from hex string
txn_serializer = TxSerializer()
txn = txn_serializer.deserialize( StringIO( binascii.unhexlify(txhex) ) )
ret[tx_hash] = self.parse_tx( txn, {}, "", -1 )
except Exception, e:
log.exception(e)
log.error("Failed to receive transactions")
return None
return ret |
<SYSTEM_TASK:>
Complete with additional information from original LTI POST data, as available.
<END_TASK>
<USER_TASK:>
Description:
def get_user_details(self, response):
""" Complete with additional information from original LTI POST data, as available. """ |
data = {}
# None of them is mandatory
data['id'] = response.get('user_id', None)
data['username'] = response.get('custom_username', None)
if not data['username']:
data['username'] = response.get('ext_user_username', None)
data['last_name'] = response.get('lis_person_name_family', None)
data['email'] = response.get(
'lis_person_contact_email_primary', None)
data['first_name'] = response.get('lis_person_name_given', None)
data['fullname'] = response.get('lis_person_name_full', None)
logger.debug("User details being used: " + str(data))
return data |
<SYSTEM_TASK:>
Constructs a MIME message from message and dispatch models.
<END_TASK>
<USER_TASK:>
Description:
def _build_message(self, to, text, subject=None, mtype=None, unsubscribe_url=None):
"""Constructs a MIME message from message and dispatch models.""" |
# TODO Maybe file attachments handling through `files` message_model context var.
if subject is None:
subject = u'%s' % _('No Subject')
if mtype == 'html':
msg = self.mime_multipart()
text_part = self.mime_multipart('alternative')
text_part.attach(self.mime_text(strip_tags(text), _charset='utf-8'))
text_part.attach(self.mime_text(text, 'html', _charset='utf-8'))
msg.attach(text_part)
else:
msg = self.mime_text(text, _charset='utf-8')
msg['From'] = self.from_email
msg['To'] = to
msg['Subject'] = subject
if unsubscribe_url:
msg['List-Unsubscribe'] = '<%s>' % unsubscribe_url
return msg |
<SYSTEM_TASK:>
Schedules an email message for delivery.
<END_TASK>
<USER_TASK:>
Description:
def schedule_email(message, to, subject=None, sender=None, priority=None):
"""Schedules an email message for delivery.
:param dict, str message: str or dict: use str for simple text email;
dict - to compile email from a template (default: `sitemessage/messages/email_html__smtp.html`).
:param list|str|unicode to: recipients addresses or Django User model heir instances
:param str subject: email subject
:param User sender: User model heir instance
:param int priority: number describing message priority. If set overrides priority provided with message type.
""" |
if SHORTCUT_EMAIL_MESSAGE_TYPE:
message_cls = get_registered_message_type(SHORTCUT_EMAIL_MESSAGE_TYPE)
else:
if isinstance(message, dict):
message_cls = EmailHtmlMessage
else:
message_cls = EmailTextMessage
schedule_messages(
message_cls(subject, message),
recipients(SHORTCUT_EMAIL_MESSENGER_TYPE, to),
sender=sender, priority=priority
) |
<SYSTEM_TASK:>
Schedules Jabber XMPP message for delivery.
<END_TASK>
<USER_TASK:>
Description:
def schedule_jabber_message(message, to, sender=None, priority=None):
"""Schedules Jabber XMPP message for delivery.
:param str message: text to send.
:param list|str|unicode to: recipients addresses or Django User model heir instances with `email` attributes.
:param User sender: User model heir instance
:param int priority: number describing message priority. If set overrides priority provided with message type.
""" |
schedule_messages(message, recipients('xmppsleek', to), sender=sender, priority=priority) |
<SYSTEM_TASK:>
Schedules a Tweet for delivery.
<END_TASK>
<USER_TASK:>
Description:
def schedule_tweet(message, to='', sender=None, priority=None):
"""Schedules a Tweet for delivery.
:param str message: text to send.
:param list|str|unicode to: recipients addresses or Django User model heir instances with `telegram` attributes.
If supplied tweets will be @-replies.
:param User sender: User model heir instance
:param int priority: number describing message priority. If set overrides priority provided with message type.
""" |
schedule_messages(message, recipients('twitter', to), sender=sender, priority=priority) |
<SYSTEM_TASK:>
Schedules Telegram message for delivery.
<END_TASK>
<USER_TASK:>
Description:
def schedule_telegram_message(message, to, sender=None, priority=None):
"""Schedules Telegram message for delivery.
:param str message: text to send.
:param list|str|unicode to: recipients addresses or Django User model heir instances with `telegram` attributes.
:param User sender: User model heir instance
:param int priority: number describing message priority. If set overrides priority provided with message type.
""" |
schedule_messages(message, recipients('telegram', to), sender=sender, priority=priority) |
<SYSTEM_TASK:>
Schedules Facebook wall message for delivery.
<END_TASK>
<USER_TASK:>
Description:
def schedule_facebook_message(message, sender=None, priority=None):
"""Schedules Facebook wall message for delivery.
:param str message: text or URL to publish.
:param User sender: User model heir instance
:param int priority: number describing message priority. If set overrides priority provided with message type.
""" |
schedule_messages(message, recipients('fb', ''), sender=sender, priority=priority) |
<SYSTEM_TASK:>
Schedules VKontakte message for delivery.
<END_TASK>
<USER_TASK:>
Description:
def schedule_vkontakte_message(message, to, sender=None, priority=None):
"""Schedules VKontakte message for delivery.
:param str message: text or URL to publish on wall.
:param list|str|unicode to: recipients addresses or Django User model heir instances with `vk` attributes.
:param User sender: User model heir instance
:param int priority: number describing message priority. If set overrides priority provided with message type.
""" |
schedule_messages(message, recipients('vk', to), sender=sender, priority=priority) |
<SYSTEM_TASK:>
Converts recipients data into a list of Recipient objects.
<END_TASK>
<USER_TASK:>
Description:
def _structure_recipients_data(cls, recipients):
"""Converts recipients data into a list of Recipient objects.
:param list recipients: list of objects
:return: list of Recipient
:rtype: list
""" |
try: # That's all due Django 1.7 apps loading.
from django.contrib.auth import get_user_model
USER_MODEL = get_user_model()
except ImportError:
# Django 1.4 fallback.
from django.contrib.auth.models import User as USER_MODEL
if not is_iterable(recipients):
recipients = (recipients,)
objects = []
for r in recipients:
user = None
if isinstance(r, USER_MODEL):
user = r
address = cls.get_address(r) # todo maybe raise an exception of not a string?
objects.append(Recipient(cls.get_alias(), user, address))
return objects |
<SYSTEM_TASK:>
Marks a dispatch as having error or consequently as failed
<END_TASK>
<USER_TASK:>
Description:
def mark_error(self, dispatch, error_log, message_cls):
"""Marks a dispatch as having error or consequently as failed
if send retry limit for that message type is exhausted.
Should be used within send().
:param Dispatch dispatch: a Dispatch
:param str error_log: error message
:param MessageBase message_cls: MessageBase heir
""" |
if message_cls.send_retry_limit is not None and (dispatch.retry_count + 1) >= message_cls.send_retry_limit:
self.mark_failed(dispatch, error_log)
else:
dispatch.error_log = error_log
self._st['error'].append(dispatch) |
<SYSTEM_TASK:>
Marks a dispatch as failed.
<END_TASK>
<USER_TASK:>
Description:
def mark_failed(self, dispatch, error_log):
"""Marks a dispatch as failed.
Sitemessage won't try to deliver already failed messages.
Should be used within send().
:param Dispatch dispatch: a Dispatch
:param str error_log: str - error message
""" |
dispatch.error_log = error_log
self._st['failed'].append(dispatch) |
<SYSTEM_TASK:>
Performs message processing.
<END_TASK>
<USER_TASK:>
Description:
def _process_messages(self, messages, ignore_unknown_message_types=False):
"""Performs message processing.
:param dict messages: indexed by message id dict with messages data
:param bool ignore_unknown_message_types: whether to silence exceptions
:raises UnknownMessageTypeError:
""" |
with self.before_after_send_handling():
for message_id, message_data in messages.items():
message_model, dispatch_models = message_data
try:
message_cls = get_registered_message_type(message_model.cls)
except UnknownMessageTypeError:
if ignore_unknown_message_types:
continue
raise
message_type_cache = None
for dispatch in dispatch_models:
if not dispatch.message_cache: # Create actual message text for further usage.
try:
if message_type_cache is None and not message_cls.has_dynamic_context:
# If a message class doesn't depend upon a dispatch data for message compilation,
# we'd compile a message just once.
message_type_cache = message_cls.compile(message_model, self, dispatch=dispatch)
dispatch.message_cache = message_type_cache or message_cls.compile(
message_model, self, dispatch=dispatch)
except Exception as e:
self.mark_error(dispatch, e, message_cls)
self.send(message_cls, message_model, dispatch_models) |
<SYSTEM_TASK:>
Runs the 'configure' program in the working directory.
<END_TASK>
<USER_TASK:>
Description:
def run_configure(self, mandatory=True):
"""Runs the 'configure' program in the working directory.
Args:
mandatory (bool): Throw exception if 'configure' fails or a
'configure' file is missing.
""" |
if not has_file(self.working_dir, 'configure'):
if mandatory:
raise FileNotFoundError(
"Could not find a configure script for execution.")
else:
return
try:
prog = RunningProgram(self, 'configure')
prog.expect_exit_status(0)
except Exception:
if mandatory:
raise |
<SYSTEM_TASK:>
Runs a compiler in the working directory.
<END_TASK>
<USER_TASK:>
Description:
def run_compiler(self, compiler=GCC, inputs=None, output=None):
"""Runs a compiler in the working directory.
Args:
compiler (tuple): The compiler program and its command-line arguments,
including placeholders for output and input files.
inputs (tuple): The list of input files for the compiler.
output (str): The name of the output file.
""" |
# Let exceptions travel through
prog = RunningProgram(self, *compiler_cmdline(compiler=compiler,
inputs=inputs,
output=output))
prog.expect_exit_status(0) |
<SYSTEM_TASK:>
Combined call of 'configure', 'make' and the compiler.
<END_TASK>
<USER_TASK:>
Description:
def run_build(self, compiler=GCC, inputs=None, output=None):
"""Combined call of 'configure', 'make' and the compiler.
The success of 'configure' and 'make' is optional.
The arguments are the same as for run_compiler.
""" |
logger.info("Running build steps ...")
self.run_configure(mandatory=False)
self.run_make(mandatory=False)
self.run_compiler(compiler=compiler,
inputs=inputs,
output=output) |
<SYSTEM_TASK:>
Spawns a program in the working directory.
<END_TASK>
<USER_TASK:>
Description:
def spawn_program(self, name, arguments=[], timeout=30, exclusive=False):
"""Spawns a program in the working directory.
This method allows the interaction with the running program,
based on the returned RunningProgram object.
Args:
name (str): The name of the program to be executed.
arguments (tuple): Command-line arguments for the program.
timeout (int): The timeout for execution.
exclusive (bool): Prevent parallel validation runs on the
test machines, e.g. when doing performance
measurements for submitted code.
Returns:
RunningProgram: An object representing the running program.
""" |
logger.debug("Spawning program for interaction ...")
if exclusive:
kill_longrunning(self.config)
return RunningProgram(self, name, arguments, timeout) |
<SYSTEM_TASK:>
Runs a program in the working directory to completion.
<END_TASK>
<USER_TASK:>
Description:
def run_program(self, name, arguments=[], timeout=30, exclusive=False):
"""Runs a program in the working directory to completion.
Args:
name (str): The name of the program to be executed.
arguments (tuple): Command-line arguments for the program.
timeout (int): The timeout for execution.
exclusive (bool): Prevent parallel validation runs on the
test machines, e.g. when doing performance
measurements for submitted code.
Returns:
tuple: A tuple of the exit code, as reported by the operating system,
and the output produced during the execution.
""" |
logger.debug("Running program ...")
if exclusive:
kill_longrunning(self.config)
prog = RunningProgram(self, name, arguments, timeout)
return prog.expect_end() |
<SYSTEM_TASK:>
Scans the student files for text patterns.
<END_TASK>
<USER_TASK:>
Description:
def grep(self, regex):
"""Scans the student files for text patterns.
Args:
regex (str): Regular expression used for scanning inside the files.
Returns:
tuple: Names of the matching files in the working directory.
""" |
matches = []
logger.debug("Searching student files for '{0}'".format(regex))
for fname in self.student_files:
if os.path.isfile(self.working_dir + fname):
for line in open(self.working_dir + fname, 'br'):
if re.search(regex.encode(), line):
logger.debug("{0} contains '{1}'".format(fname, regex))
matches.append(fname)
return matches |
<SYSTEM_TASK:>
Checks the student submission for specific files.
<END_TASK>
<USER_TASK:>
Description:
def ensure_files(self, filenames):
"""Checks the student submission for specific files.
Args:
filenames (tuple): The list of file names to be cjecked for.
Returns:
bool: Indicator if all files are found in the student archive.
""" |
logger.debug("Testing {0} for the following files: {1}".format(
self.working_dir, filenames))
dircontent = os.listdir(self.working_dir)
for fname in filenames:
if fname not in dircontent:
return False
return True |
<SYSTEM_TASK:>
Display any available live chats as advertisements.
<END_TASK>
<USER_TASK:>
Description:
def live_chat_banner(context):
""" Display any available live chats as advertisements.
""" |
context = copy(context)
# Find any upcoming or current live chat. The Chat date must be less than 5
# days away, or currently in progress.
oldchat = LiveChat.chat_finder.get_last_live_chat()
if oldchat:
context['last_live_chat'] = {
'title': oldchat.title,
'chat_ends_at': oldchat.chat_ends_at,
'expert': oldchat.expert,
'url': reverse('livechat:show_archived_livechat')
}
chat = LiveChat.chat_finder.upcoming_live_chat()
if chat is not None:
context['live_chat_advert'] = {
'title': chat.title,
'description': chat.description,
'expert': chat.expert,
'commenting_closed': chat.comments_closed,
'cancelled': chat.is_cancelled,
'archived': chat.is_archived,
'in_progress': chat.is_in_progress(),
'url': reverse(
'livechat:show_livechat',
kwargs={
'slug': chat.slug}),
'archive_url':reverse('livechat:show_archived_livechat')
}
context['live_chat_advert']['datetime'] = {
'time': chat.chat_starts_at.time,
'date': chat.chat_starts_at.date
}
return context |
<SYSTEM_TASK:>
Get the exit status of the program execution.
<END_TASK>
<USER_TASK:>
Description:
def get_exitstatus(self):
"""Get the exit status of the program execution.
Returns:
int: Exit status as reported by the operating system,
or None if it is not available.
""" |
logger.debug("Exit status is {0}".format(self._spawn.exitstatus))
return self._spawn.exitstatus |
<SYSTEM_TASK:>
Wait until the running program performs some given output, or terminates.
<END_TASK>
<USER_TASK:>
Description:
def expect_output(self, pattern, timeout=-1):
"""Wait until the running program performs some given output, or terminates.
Args:
pattern: The pattern the output should be checked for.
timeout (int): How many seconds should be waited for the output.
The pattern argument may be a string, a compiled regular expression,
or a list of any of those types. Strings will be compiled into regular expressions.
Returns:
int: The index into the pattern list. If the pattern was not a list, it returns 0 on a successful match.
Raises:
TimeoutException: The output did not match within the given time frame.
TerminationException: The program terminated before producing the output.
NestedException: An internal problem occured while waiting for the output.
""" |
logger.debug("Expecting output '{0}' from '{1}'".format(pattern, self.name))
try:
return self._spawn.expect(pattern, timeout)
except pexpect.exceptions.EOF as e:
logger.debug("Raising termination exception.")
raise TerminationException(instance=self, real_exception=e, output=self.get_output())
except pexpect.exceptions.TIMEOUT as e:
logger.debug("Raising timeout exception.")
raise TimeoutException(instance=self, real_exception=e, output=self.get_output())
except Exception as e:
logger.debug("Expecting output failed: " + str(e))
raise NestedException(instance=self, real_exception=e, output=self.get_output()) |
<SYSTEM_TASK:>
Sends an input line to the running program, including os.linesep.
<END_TASK>
<USER_TASK:>
Description:
def sendline(self, text):
"""Sends an input line to the running program, including os.linesep.
Args:
text (str): The input text to be send.
Raises:
TerminationException: The program terminated before / while / after sending the input.
NestedException: An internal problem occured while waiting for the output.
""" |
logger.debug("Sending input '{0}' to '{1}'".format(text, self.name))
try:
return self._spawn.sendline(text)
except pexpect.exceptions.EOF as e:
logger.debug("Raising termination exception.")
raise TerminationException(instance=self, real_exception=e, output=self.get_output())
except pexpect.exceptions.TIMEOUT as e:
logger.debug("Raising timeout exception.")
raise TimeoutException(instance=self, real_exception=e, output=self.get_output())
except Exception as e:
logger.debug("Sending input failed: " + str(e))
raise NestedException(instance=self, real_exception=e, output=self.get_output()) |
<SYSTEM_TASK:>
Wait for the running program to finish.
<END_TASK>
<USER_TASK:>
Description:
def expect_end(self):
"""Wait for the running program to finish.
Returns:
A tuple with the exit code, as reported by the operating system, and the output produced.
""" |
logger.debug("Waiting for termination of '{0}'".format(self.name))
try:
# Make sure we fetch the last output bytes.
# Recommendation from the pexpect docs.
self._spawn.expect(pexpect.EOF)
self._spawn.wait()
dircontent = str(os.listdir(self.job.working_dir))
logger.debug("Working directory after execution: " + dircontent)
return self.get_exitstatus(), self.get_output()
except pexpect.exceptions.EOF as e:
logger.debug("Raising termination exception.")
raise TerminationException(instance=self, real_exception=e, output=self.get_output())
except pexpect.exceptions.TIMEOUT as e:
logger.debug("Raising timeout exception.")
raise TimeoutException(instance=self, real_exception=e, output=self.get_output())
except Exception as e:
logger.debug("Waiting for expected program end failed.")
raise NestedException(instance=self, real_exception=e, output=self.get_output()) |
<SYSTEM_TASK:>
Wait for the running program to finish and expect some exit status.
<END_TASK>
<USER_TASK:>
Description:
def expect_exitstatus(self, exit_status):
"""Wait for the running program to finish and expect some exit status.
Args:
exit_status (int): The expected exit status.
Raises:
WrongExitStatusException: The produced exit status is not the expected one.
""" |
self.expect_end()
logger.debug("Checking exit status of '{0}', output so far: {1}".format(
self.name, self.get_output()))
if self._spawn.exitstatus is None:
raise WrongExitStatusException(
instance=self, expected=exit_status, output=self.get_output())
if self._spawn.exitstatus is not exit_status:
raise WrongExitStatusException(
instance=self,
expected=exit_status,
got=self._spawn.exitstatus,
output=self.get_output()) |
<SYSTEM_TASK:>
Handles unsubscribe request.
<END_TASK>
<USER_TASK:>
Description:
def unsubscribe(request, message_id, dispatch_id, hashed, redirect_to=None):
"""Handles unsubscribe request.
:param Request request:
:param int message_id:
:param int dispatch_id:
:param str hashed:
:param str redirect_to:
:return:
""" |
return _generic_view(
'handle_unsubscribe_request', sig_unsubscribe_failed,
request, message_id, dispatch_id, hashed, redirect_to=redirect_to
) |
<SYSTEM_TASK:>
Handles mark message as read request.
<END_TASK>
<USER_TASK:>
Description:
def mark_read(request, message_id, dispatch_id, hashed, redirect_to=None):
"""Handles mark message as read request.
:param Request request:
:param int message_id:
:param int dispatch_id:
:param str hashed:
:param str redirect_to:
:return:
""" |
if redirect_to is None:
redirect_to = get_static_url('img/sitemessage/blank.png')
return _generic_view(
'handle_mark_read_request', sig_mark_read_failed,
request, message_id, dispatch_id, hashed, redirect_to=redirect_to
) |
<SYSTEM_TASK:>
Schedules a message or messages.
<END_TASK>
<USER_TASK:>
Description:
def schedule_messages(messages, recipients=None, sender=None, priority=None):
"""Schedules a message or messages.
:param MessageBase|str|list messages: str or MessageBase heir or list - use str to create PlainTextMessage.
:param list|None recipients: recipients addresses or Django User model heir instances
If `None` Dispatches should be created before send using `prepare_dispatches()`.
:param User|None sender: User model heir instance
:param int priority: number describing message priority. If set overrides priority provided with message type.
:return: list of tuples - (message_model, dispatches_models)
:rtype: list
""" |
if not is_iterable(messages):
messages = (messages,)
results = []
for message in messages:
if isinstance(message, six.string_types):
message = PlainTextMessage(message)
resulting_priority = message.priority
if priority is not None:
resulting_priority = priority
results.append(message.schedule(sender=sender, recipients=recipients, priority=resulting_priority))
return results |
<SYSTEM_TASK:>
Sends scheduled messages.
<END_TASK>
<USER_TASK:>
Description:
def send_scheduled_messages(priority=None, ignore_unknown_messengers=False, ignore_unknown_message_types=False):
"""Sends scheduled messages.
:param int, None priority: number to limit sending message by this priority.
:param bool ignore_unknown_messengers: to silence UnknownMessengerError
:param bool ignore_unknown_message_types: to silence UnknownMessageTypeError
:raises UnknownMessengerError:
:raises UnknownMessageTypeError:
""" |
dispatches_by_messengers = Dispatch.group_by_messengers(Dispatch.get_unsent(priority=priority))
for messenger_id, messages in dispatches_by_messengers.items():
try:
messenger_obj = get_registered_messenger_object(messenger_id)
messenger_obj._process_messages(messages, ignore_unknown_message_types=ignore_unknown_message_types)
except UnknownMessengerError:
if ignore_unknown_messengers:
continue
raise |
<SYSTEM_TASK:>
Sends a notification email if any undelivered dispatches.
<END_TASK>
<USER_TASK:>
Description:
def check_undelivered(to=None):
"""Sends a notification email if any undelivered dispatches.
Returns undelivered (failed) dispatches count.
:param str|unicode to: Recipient address. If not set Django ADMINS setting is used.
:rtype: int
""" |
failed_count = Dispatch.objects.filter(dispatch_status=Dispatch.DISPATCH_STATUS_FAILED).count()
if failed_count:
from sitemessage.shortcuts import schedule_email
from sitemessage.messages.email import EmailTextMessage
if to is None:
admins = settings.ADMINS
if admins:
to = list(dict(admins).values())
if to:
priority = 999
register_message_types(EmailTextMessage)
schedule_email(
'You have %s undelivered dispatch(es) at %s' % (failed_count, get_site_url()),
subject='[SITEMESSAGE] Undelivered dispatches',
to=to, priority=priority)
send_scheduled_messages(priority=priority)
return failed_count |
<SYSTEM_TASK:>
Automatically creates dispatches for messages without them.
<END_TASK>
<USER_TASK:>
Description:
def prepare_dispatches():
"""Automatically creates dispatches for messages without them.
:return: list of Dispatch
:rtype: list
""" |
dispatches = []
target_messages = Message.get_without_dispatches()
cache = {}
for message_model in target_messages:
if message_model.cls not in cache:
message_cls = get_registered_message_type(message_model.cls)
subscribers = message_cls.get_subscribers()
cache[message_model.cls] = (message_cls, subscribers)
else:
message_cls, subscribers = cache[message_model.cls]
dispatches.extend(message_cls.prepare_dispatches(message_model))
return dispatches |
<SYSTEM_TASK:>
Returns a two element tuple with user subscription preferences to render in UI.
<END_TASK>
<USER_TASK:>
Description:
def get_user_preferences_for_ui(user, message_filter=None, messenger_filter=None, new_messengers_titles=None):
"""Returns a two element tuple with user subscription preferences to render in UI.
Message types with the same titles are merged into one row.
First element:
A list of messengers titles.
Second element:
User preferences dictionary indexed by message type titles.
Preferences (dictionary values) are lists of tuples:
(preference_alias, is_supported_by_messenger_flag, user_subscribed_flag)
Example:
{'My message type': [('test_message|smtp', True, False), ...]}
:param User user:
:param callable|None message_filter: A callable accepting a message object to filter out message types
:param callable|None messenger_filter: A callable accepting a messenger object to filter out messengers
:rtype: tuple
""" |
if new_messengers_titles is None:
new_messengers_titles = {}
msgr_to_msg = defaultdict(set)
msg_titles = OrderedDict()
msgr_titles = OrderedDict()
for msgr in get_registered_messenger_objects().values():
if not (messenger_filter is None or messenger_filter(msgr)) or not msgr.allow_user_subscription:
continue
msgr_alias = msgr.alias
msgr_title = new_messengers_titles.get(msgr.alias) or msgr.title
for msg in get_registered_message_types().values():
if not (message_filter is None or message_filter(msg)) or not msg.allow_user_subscription:
continue
msgr_supported = msg.supported_messengers
is_supported = (not msgr_supported or msgr.alias in msgr_supported)
if not is_supported:
continue
msg_alias = msg.alias
msg_titles.setdefault('%s' % msg.title, []).append(msg_alias)
msgr_to_msg[msgr_alias].update((msg_alias,))
msgr_titles[msgr_title] = msgr_alias
def sort_titles(titles):
return OrderedDict(sorted([(k, v) for k, v in titles.items()], key=itemgetter(0)))
msgr_titles = sort_titles(msgr_titles)
user_prefs = OrderedDict()
user_subscriptions = ['%s%s%s' % (pref.message_cls, _ALIAS_SEP, pref.messenger_cls)
for pref in Subscription.get_for_user(user)]
for msg_title, msg_aliases in sort_titles(msg_titles).items():
for __, msgr_alias in msgr_titles.items():
msg_candidates = msgr_to_msg[msgr_alias].intersection(msg_aliases)
alias = ''
msg_supported = False
subscribed = False
if msg_candidates:
alias = '%s%s%s' % (msg_candidates.pop(), _ALIAS_SEP, msgr_alias)
msg_supported = True
subscribed = alias in user_subscriptions
user_prefs.setdefault(msg_title, []).append((alias, msg_supported, subscribed))
return msgr_titles.keys(), user_prefs |
<SYSTEM_TASK:>
Sets user subscription preferences using data from a request.
<END_TASK>
<USER_TASK:>
Description:
def set_user_preferences_from_request(request):
"""Sets user subscription preferences using data from a request.
Expects data sent by form built with `sitemessage_prefs_table` template tag.
:param request:
:rtype: bool
:return: Flag, whether prefs were found in the request.
""" |
prefs = []
for pref in request.POST.getlist(_PREF_POST_KEY):
message_alias, messenger_alias = pref.split(_ALIAS_SEP)
try:
get_registered_message_type(message_alias)
get_registered_messenger_object(messenger_alias)
except (UnknownMessengerError, UnknownMessageTypeError):
pass
else:
prefs.append((message_alias, messenger_alias))
Subscription.replace_for_user(request.user, prefs)
return bool(prefs) |
<SYSTEM_TASK:>
Checks if a live chat is currently on the go, and add it to the request
<END_TASK>
<USER_TASK:>
Description:
def current_livechat(request):
"""
Checks if a live chat is currently on the go, and add it to the request
context.
This is to allow the AskMAMA URL in the top-navigation to be redirected to
the live chat object view consistently, and to make it available to the
views and tags that depends on it.
""" |
result = {}
livechat = LiveChat.chat_finder.get_current_live_chat()
if livechat:
result['live_chat'] = {}
result['live_chat']['current_live_chat'] = livechat
can_comment, reason_code = livechat.can_comment(request)
result['live_chat']['can_render_comment_form'] = can_comment
result['live_chat']['can_comment_code'] = reason_code
return result |
<SYSTEM_TASK:>
Complete with additional information from session, as available.
<END_TASK>
<USER_TASK:>
Description:
def get_user_details(self, response):
""" Complete with additional information from session, as available. """ |
result = {
'id': response['id'],
'username': response.get('username', None),
'email': response.get('email', None),
'first_name': response.get('first_name', None),
'last_name': response.get('last_name', None)
}
if result['first_name'] and result['last_name']:
result['fullname'] = result['first_name'] + \
' ' + result['last_name']
return result |
<SYSTEM_TASK:>
Create a profile for the user, when missing.
<END_TASK>
<USER_TASK:>
Description:
def post_user_login(sender, request, user, **kwargs):
"""
Create a profile for the user, when missing.
Make sure that all neccessary user groups exist and have the right permissions.
We need that automatism for people not calling the configure tool,
admin rights for admins after the first login, and similar cases.
""" |
logger.debug("Running post-processing for user login.")
# Users created by social login or admins have no profile.
# We fix that during their first login.
try:
with transaction.atomic():
profile, created = UserProfile.objects.get_or_create(user=user)
if created:
logger.info("Created missing profile for user " + str(user.pk))
except Exception as e:
logger.error("Error while creating user profile: " + str(e))
check_permission_system() |
<SYSTEM_TASK:>
Returns a list of Recipient objects subscribed for this message type.
<END_TASK>
<USER_TASK:>
Description:
def get_subscribers(cls, active_only=True):
"""Returns a list of Recipient objects subscribed for this message type.
:param bool active_only: Flag whether
:return:
""" |
subscribers_raw = Subscription.get_for_message_cls(cls.alias)
subscribers = []
for subscriber in subscribers_raw:
messenger_cls = subscriber.messenger_cls
address = subscriber.address
recipient = subscriber.recipient
# Do not send messages to inactive users.
if active_only and recipient:
if not getattr(recipient, 'is_active', False):
continue
if address is None:
try:
address = get_registered_messenger_object(messenger_cls).get_address(recipient)
except UnknownMessengerError:
pass
if address and isinstance(address, string_types):
subscribers.append(Recipient(messenger_cls, recipient, address))
return subscribers |
<SYSTEM_TASK:>
Returns a common pattern sitemessage URL.
<END_TASK>
<USER_TASK:>
Description:
def _get_url(cls, name, message_model, dispatch_model):
"""Returns a common pattern sitemessage URL.
:param str name: URL name
:param Message message_model:
:param Dispatch|None dispatch_model:
:return:
""" |
global APP_URLS_ATTACHED
url = ''
if dispatch_model is None:
return url
if APP_URLS_ATTACHED != False: # sic!
hashed = cls.get_dispatch_hash(dispatch_model.id, message_model.id)
try:
url = reverse(name, args=[message_model.id, dispatch_model.id, hashed])
url = '%s%s' % (get_site_url(), url)
except NoReverseMatch:
if APP_URLS_ATTACHED is None:
APP_URLS_ATTACHED = False
return url |
<SYSTEM_TASK:>
Handles user subscription cancelling request.
<END_TASK>
<USER_TASK:>
Description:
def handle_unsubscribe_request(cls, request, message, dispatch, hash_is_valid, redirect_to):
"""Handles user subscription cancelling request.
:param Request request: Request instance
:param Message message: Message model instance
:param Dispatch dispatch: Dispatch model instance
:param bool hash_is_valid: Flag indicating that user supplied request signature is correct
:param str redirect_to: Redirection URL
:rtype: list
""" |
if hash_is_valid:
Subscription.cancel(
dispatch.recipient_id or dispatch.address, cls.alias, dispatch.messenger
)
signal = sig_unsubscribe_success
else:
signal = sig_unsubscribe_failed
signal.send(cls, request=request, message=message, dispatch=dispatch)
return redirect(redirect_to) |
<SYSTEM_TASK:>
Handles a request to mark a message as read.
<END_TASK>
<USER_TASK:>
Description:
def handle_mark_read_request(cls, request, message, dispatch, hash_is_valid, redirect_to):
"""Handles a request to mark a message as read.
:param Request request: Request instance
:param Message message: Message model instance
:param Dispatch dispatch: Dispatch model instance
:param bool hash_is_valid: Flag indicating that user supplied request signature is correct
:param str redirect_to: Redirection URL
:rtype: list
""" |
if hash_is_valid:
dispatch.mark_read()
dispatch.save()
signal = sig_mark_read_success
else:
signal = sig_mark_read_failed
signal.send(cls, request=request, message=message, dispatch=dispatch)
return redirect(redirect_to) |
<SYSTEM_TASK:>
Get a template path to compile a message.
<END_TASK>
<USER_TASK:>
Description:
def get_template(cls, message, messenger):
"""Get a template path to compile a message.
1. `tpl` field of message context;
2. `template` field of message class;
3. deduced from message, messenger data and `template_ext` message type field
(e.g. `sitemessage/messages/plain__smtp.txt` for `plain` message type).
:param Message message: Message model
:param MessengerBase messenger: a MessengerBase heir
:return: str
:rtype: str
""" |
template = message.context.get('tpl', None)
if template: # Template name is taken from message context.
return template
if cls.template is None:
cls.template = 'sitemessage/messages/%s__%s.%s' % (
cls.get_alias(), messenger.get_alias(), cls.template_ext
)
return cls.template |
<SYSTEM_TASK:>
Compiles and returns a message text.
<END_TASK>
<USER_TASK:>
Description:
def compile(cls, message, messenger, dispatch=None):
"""Compiles and returns a message text.
Considers `use_tpl` field from message context to decide whether
template compilation is used.
Otherwise a SIMPLE_TEXT_ID field from message context is used as message contents.
:param Message message: model instance
:param MessengerBase messenger: MessengerBase heir instance
:param Dispatch dispatch: model instance to consider context from
:return: str
:rtype: str
""" |
if message.context.get('use_tpl', False):
context = message.context
context.update({
'SITE_URL': get_site_url(),
'directive_unsubscribe': cls.get_unsubscribe_directive(message, dispatch),
'directive_mark_read': cls.get_mark_read_directive(message, dispatch),
'message_model': message,
'dispatch_model': dispatch
})
context = cls.get_template_context(context)
return render_to_string(cls.get_template(message, messenger), context)
return message.context[cls.SIMPLE_TEXT_ID] |
<SYSTEM_TASK:>
Helper method to structure initial message context data.
<END_TASK>
<USER_TASK:>
Description:
def update_context(cls, base_context, str_or_dict, template_path=None):
"""Helper method to structure initial message context data.
NOTE: updates `base_context` inplace.
:param dict base_context: context dict to update
:param dict, str str_or_dict: text representing a message, or a dict to be placed into message context.
:param str template_path: template path to be used for message rendering
""" |
if isinstance(str_or_dict, dict):
base_context.update(str_or_dict)
base_context['use_tpl'] = True
else:
base_context[cls.SIMPLE_TEXT_ID] = str_or_dict
if cls.SIMPLE_TEXT_ID in str_or_dict:
base_context['use_tpl'] = False
base_context['tpl'] = template_path |
<SYSTEM_TASK:>
Creates Dispatch models for a given message and return them.
<END_TASK>
<USER_TASK:>
Description:
def prepare_dispatches(cls, message, recipients=None):
"""Creates Dispatch models for a given message and return them.
:param Message message: Message model instance
:param list|None recipients: A list or Recipient objects
:return: list of created Dispatch models
:rtype: list
""" |
return Dispatch.create(message, recipients or cls.get_subscribers()) |
<SYSTEM_TASK:>
Returns a dictionary of never expired page token indexed by page names.
<END_TASK>
<USER_TASK:>
Description:
def get_page_access_token(self, app_id, app_secret, user_token):
"""Returns a dictionary of never expired page token indexed by page names.
:param str app_id: Application ID
:param str app_secret: Application secret
:param str user_token: User short-lived token
:rtype: dict
""" |
url_extend = (
self._url_base + '/oauth/access_token?grant_type=fb_exchange_token&'
'client_id=%(app_id)s&client_secret=%(app_secret)s&fb_exchange_token=%(user_token)s')
response = self.lib.get(url_extend % {'app_id': app_id, 'app_secret': app_secret, 'user_token': user_token})
user_token_long_lived = response.text.split('=')[-1]
response = self.lib.get(self._url_versioned + '/me/accounts?access_token=%s' % user_token_long_lived)
json = response.json()
tokens = {item['name']: item['access_token'] for item in json['data'] if item.get('access_token')}
return tokens |
<SYSTEM_TASK:>
Remove leading and trailing slashes from a URL
<END_TASK>
<USER_TASK:>
Description:
def normalize_url(url: str) -> str:
"""
Remove leading and trailing slashes from a URL
:param url: URL
:return: URL with no leading and trailing slashes
:private:
""" |
if url.startswith('/'):
url = url[1:]
if url.endswith('/'):
url = url[:-1]
return url |
<SYSTEM_TASK:>
Yield URL parts. The given parts are usually in reverse order.
<END_TASK>
<USER_TASK:>
Description:
def _unwrap(variable_parts: VariablePartsType):
"""
Yield URL parts. The given parts are usually in reverse order.
""" |
curr_parts = variable_parts
var_any = []
while curr_parts:
curr_parts, (var_type, part) = curr_parts
if var_type == Routes._VAR_ANY_NODE:
var_any.append(part)
continue
if var_type == Routes._VAR_ANY_BREAK:
if var_any:
yield tuple(reversed(var_any))
var_any.clear()
var_any.append(part)
continue
if var_any:
yield tuple(reversed(var_any))
var_any.clear()
yield part
continue
yield part
if var_any:
yield tuple(reversed(var_any)) |
<SYSTEM_TASK:>
Map keys to variables. This map\
<END_TASK>
<USER_TASK:>
Description:
def make_params(
key_parts: Sequence[str],
variable_parts: VariablePartsType) -> Dict[str, Union[str, Tuple[str]]]:
"""
Map keys to variables. This map\
URL-pattern variables to\
a URL related parts
:param key_parts: A list of URL parts
:param variable_parts: A linked-list\
(ala nested tuples) of URL parts
:return: The param dict with the values\
assigned to the keys
:private:
""" |
# The unwrapped variable parts are in reverse order.
# Instead of reversing those we reverse the key parts
# and avoid the O(n) space required for reversing the vars
return dict(zip(reversed(key_parts), _unwrap(variable_parts))) |
<SYSTEM_TASK:>
Split a regular URL into parts
<END_TASK>
<USER_TASK:>
Description:
def _deconstruct_url(self, url: str) -> List[str]:
"""
Split a regular URL into parts
:param url: A normalized URL
:return: Parts of the URL
:raises kua.routes.RouteError: \
If the depth of the URL exceeds\
the max depth of the deepest\
registered pattern
:private:
""" |
parts = url.split('/', self._max_depth + 1)
if depth_of(parts) > self._max_depth:
raise RouteError('No match')
return parts |
<SYSTEM_TASK:>
Match URL parts to a registered pattern.
<END_TASK>
<USER_TASK:>
Description:
def _match(self, parts: Sequence[str]) -> RouteResolved:
"""
Match URL parts to a registered pattern.
This function is basically where all\
the CPU-heavy work is done.
:param parts: URL parts
:return: Matched route
:raises kua.routes.RouteError: If there is no match
:private:
""" |
route_match = None # type: RouteResolved
route_variable_parts = tuple() # type: VariablePartsType
# (route_partial, variable_parts, depth)
to_visit = [(self._routes, tuple(), 0)] # type: List[Tuple[dict, tuple, int]]
# Walk through the graph,
# keep track of all possible
# matching branches and do
# backtracking if needed
while to_visit:
curr, curr_variable_parts, depth = to_visit.pop()
try:
part = parts[depth]
except IndexError:
if self._ROUTE_NODE in curr:
route_match = curr[self._ROUTE_NODE]
route_variable_parts = curr_variable_parts
break
else:
continue
if self._VAR_ANY_NODE in curr:
to_visit.append((
{self._VAR_ANY_NODE: curr[self._VAR_ANY_NODE]},
(curr_variable_parts,
(self._VAR_ANY_NODE, part)),
depth + 1))
to_visit.append((
curr[self._VAR_ANY_NODE],
(curr_variable_parts,
(self._VAR_ANY_BREAK, part)),
depth + 1))
if self._VAR_NODE in curr:
to_visit.append((
curr[self._VAR_NODE],
(curr_variable_parts,
(self._VAR_NODE, part)),
depth + 1))
if part in curr:
to_visit.append((
curr[part],
curr_variable_parts,
depth + 1))
if not route_match:
raise RouteError('No match')
return RouteResolved(
params=make_params(
key_parts=route_match.key_parts,
variable_parts=route_variable_parts),
anything=route_match.anything) |
<SYSTEM_TASK:>
Match a URL to a registered pattern.
<END_TASK>
<USER_TASK:>
Description:
def match(self, url: str) -> RouteResolved:
"""
Match a URL to a registered pattern.
:param url: URL
:return: Matched route
:raises kua.RouteError: If there is no match
""" |
url = normalize_url(url)
parts = self._deconstruct_url(url)
return self._match(parts) |
<SYSTEM_TASK:>
Register a URL pattern into\
<END_TASK>
<USER_TASK:>
Description:
def add(self, url: str, anything: Any) -> None:
"""
Register a URL pattern into\
the routes for later matching.
It's possible to attach any kind of\
object to the pattern for later\
retrieving. A dict with methods and callbacks,\
for example. Anything really.
Registration order does not matter.\
Adding a URL first or last makes no difference.
:param url: URL
:param anything: Literally anything.
""" |
url = normalize_url(url)
parts = url.split('/')
curr_partial_routes = self._routes
curr_key_parts = []
for part in parts:
if part.startswith(':*'):
curr_key_parts.append(part[2:])
part = self._VAR_ANY_NODE
self._max_depth = self._max_depth_custom
elif part.startswith(':'):
curr_key_parts.append(part[1:])
part = self._VAR_NODE
curr_partial_routes = (curr_partial_routes
.setdefault(part, {}))
curr_partial_routes[self._ROUTE_NODE] = _Route(
key_parts=curr_key_parts,
anything=anything)
self._max_depth = max(self._max_depth, depth_of(parts)) |
<SYSTEM_TASK:>
Returns a registered message type object for a given application.
<END_TASK>
<USER_TASK:>
Description:
def get_message_type_for_app(app_name, default_message_type_alias):
"""Returns a registered message type object for a given application.
Supposed to be used by reusable applications authors,
to get message type objects which may be overridden by project authors
using `override_message_type_for_app`.
:param str|unicode app_name:
:param str|unicode default_message_type_alias:
:return: a message type object overridden is so, or the default
:rtype: MessageBase
""" |
message_type = default_message_type_alias
try:
message_type = _MESSAGES_FOR_APPS[app_name][message_type]
except KeyError:
pass
return get_registered_message_type(message_type) |
<SYSTEM_TASK:>
Structures recipients data.
<END_TASK>
<USER_TASK:>
Description:
def recipients(messenger, addresses):
"""Structures recipients data.
:param str|unicode, MessageBase messenger: MessengerBase heir
:param list[str|unicode]|str|unicode addresses: recipients addresses or Django User
model heir instances (NOTE: if supported by a messenger)
:return: list of Recipient
:rtype: list[Recipient]
""" |
if isinstance(messenger, six.string_types):
messenger = get_registered_messenger_object(messenger)
return messenger._structure_recipients_data(addresses) |
<SYSTEM_TASK:>
Find any upcoming or current live chat to advertise on the home page or
<END_TASK>
<USER_TASK:>
Description:
def upcoming_live_chat(self):
"""
Find any upcoming or current live chat to advertise on the home page or
live chat page.
These are LiveChat's with primary category of 'ask-mama' and category
of 'live-chat'. The Chat date must be less than 5 days away, or
happening now.
""" |
chat = None
now = datetime.now()
lcqs = self.get_query_set()
lcqs = lcqs.filter(
chat_ends_at__gte=now).order_by('-chat_starts_at')
try:
if settings.LIVECHAT_PRIMARY_CATEGORY:
lcqs = lcqs.filter(
primary_category__slug=settings.LIVECHAT_PRIMARY_CATEGORY)
except AttributeError:
pass
try:
if settings.LIVECHAT_CATEGORIES:
lcqs = lcqs.filter(
categories__slug__in=settings.LIVECHAT_CATEGORIES)
except AttributeError:
pass
if lcqs.exists():
chat = lcqs.latest('chat_starts_at')
return chat |
<SYSTEM_TASK:>
Check if there is a live chat on the go, so that we should take
<END_TASK>
<USER_TASK:>
Description:
def get_current_live_chat(self):
""" Check if there is a live chat on the go, so that we should take
over the AskMAMA page with the live chat.
""" |
now = datetime.now()
chat = self.upcoming_live_chat()
if chat and chat.is_in_progress():
return chat
return None |
<SYSTEM_TASK:>
Check if there is a live chat that ended in the last 3 days, and
<END_TASK>
<USER_TASK:>
Description:
def get_last_live_chat(self):
""" Check if there is a live chat that ended in the last 3 days, and
return it. We will display a link to it on the articles page.
""" |
now = datetime.now()
lcqs = self.get_query_set()
lcqs = lcqs.filter(
chat_ends_at__lte=now,
).order_by('-chat_ends_at')
for itm in lcqs:
if itm.chat_ends_at + timedelta(days=3) > now:
return itm
return None |
<SYSTEM_TASK:>
Get the comments that have been submitted for the chat
<END_TASK>
<USER_TASK:>
Description:
def comment_set(self):
""" Get the comments that have been submitted for the chat
""" |
ct = ContentType.objects.get_for_model(self.__class__)
qs = Comment.objects.filter(
content_type=ct,
object_pk=self.pk)
qs = qs.exclude(is_removed=True)
qs = qs.order_by('-submit_date')
return qs |
<SYSTEM_TASK:>
Simplified version. Not distributed friendly.
<END_TASK>
<USER_TASK:>
Description:
def _get_dispatches(filter_kwargs):
"""Simplified version. Not distributed friendly.""" |
dispatches = Dispatch.objects.prefetch_related('message').filter(
**filter_kwargs
).order_by('-message__time_created')
return list(dispatches) |
<SYSTEM_TASK:>
Distributed friendly version using ``select for update``.
<END_TASK>
<USER_TASK:>
Description:
def _get_dispatches_for_update(filter_kwargs):
"""Distributed friendly version using ``select for update``.""" |
dispatches = Dispatch.objects.prefetch_related('message').filter(
**filter_kwargs
).select_for_update(
**GET_DISPATCHES_ARGS[1]
).order_by('-message__time_created')
try:
dispatches = list(dispatches)
except NotSupportedError:
return None
except DatabaseError: # Probably locked. That's fine.
return []
return dispatches |
<SYSTEM_TASK:>
Determines whether the submission can be modified.
<END_TASK>
<USER_TASK:>
Description:
def can_modify(self, user=None):
"""Determines whether the submission can be modified.
Returns a boolean value.
The 'user' parameter is optional and additionally checks whether
the given user is authorized to perform these actions.
This function checks the submission states and assignment deadlines.""" |
# The user must be authorized to commit these actions.
if user and not self.user_can_modify(user):
#self.log('DEBUG', "Submission cannot be modified, user is not an authorized user ({!r} not in {!r})", user, self.authorized_users)
return False
# Modification of submissions, that are withdrawn, graded or currently being graded, is prohibited.
if self.state in [self.WITHDRAWN, self.GRADED, self.GRADING_IN_PROGRESS, ]:
#self.log('DEBUG', "Submission cannot be modified, is in state '{}'", self.state)
return False
# Modification of closed submissions is prohibited.
if self.is_closed():
if self.assignment.is_graded():
# There is a grading procedure, so taking it back would invalidate the tutors work
#self.log('DEBUG', "Submission cannot be modified, it is closed and graded")
return False
else:
#self.log('DEBUG', "Closed submission can be modified, since it has no grading scheme.")
return True
# Submissions, that are executed right now, cannot be modified
if self.state in [self.TEST_VALIDITY_PENDING, self.TEST_FULL_PENDING]:
if not self.file_upload:
self.log(
'CRITICAL', "Submission is in invalid state! State is '{}', but there is no file uploaded!", self.state)
raise AssertionError()
return False
if self.file_upload.is_executed():
# The above call informs that the uploaded file is being executed, or execution has been completed.
# Since the current state is 'PENDING', the execution cannot yet be completed.
# Thus, the submitted file is being executed right now.
return False
# Submissions must belong to an assignment.
if not self.assignment:
self.log('CRITICAL', "Submission does not belong to an assignment!")
raise AssertionError()
# Submissions, that belong to an assignment where the hard deadline has passed,
# cannot be modified.
if self.assignment.hard_deadline and timezone.now() > self.assignment.hard_deadline:
#self.log('DEBUG', "Submission cannot be modified - assignment's hard deadline has passed (hard deadline is: {})", self.assignment.hard_deadline)
return False
# The soft deadline has no effect (yet).
if self.assignment.soft_deadline:
if timezone.now() > self.assignment.soft_deadline:
# The soft deadline has passed
pass # do nothing.
#self.log('DEBUG', "Submission can be modified.")
return True |
<SYSTEM_TASK:>
Determines whether a submission can be re-uploaded.
<END_TASK>
<USER_TASK:>
Description:
def can_reupload(self, user=None):
"""Determines whether a submission can be re-uploaded.
Returns a boolean value.
Requires: can_modify.
Re-uploads are allowed only when test executions have failed.""" |
# Re-uploads are allowed only when test executions have failed.
if self.state not in (self.TEST_VALIDITY_FAILED, self.TEST_FULL_FAILED):
return False
# It must be allowed to modify the submission.
if not self.can_modify(user=user):
return False
return True |
<SYSTEM_TASK:>
Sends a command to API.
<END_TASK>
<USER_TASK:>
Description:
def _send_command(self, method_name, data=None):
"""Sends a command to API.
:param str method_name:
:param dict data:
:return:
""" |
try:
response = self.lib.post(self._tpl_url % {'token': self.auth_token, 'method': method_name}, data=data)
json = response.json()
if not json['ok']:
raise TelegramMessengerException(json['description'])
return json['result']
except self.lib.exceptions.RequestException as e:
raise TelegramMessengerException(e) |
<SYSTEM_TASK:>
Registers the built-in message types.
<END_TASK>
<USER_TASK:>
Description:
def register_builtin_message_types():
"""Registers the built-in message types.""" |
from .plain import PlainTextMessage
from .email import EmailTextMessage, EmailHtmlMessage
register_message_types(PlainTextMessage, EmailTextMessage, EmailHtmlMessage) |
<SYSTEM_TASK:>
Complete with additional information from environment, as available.
<END_TASK>
<USER_TASK:>
Description:
def get_user_details(self, response):
""" Complete with additional information from environment, as available. """ |
result = {
'username': response[self.ENV_USERNAME],
'email': response.get(self.ENV_EMAIL, None),
'first_name': response.get(self.ENV_FIRST_NAME, None),
'last_name': response.get(self.ENV_LAST_NAME, None)
}
if result['first_name'] and result['last_name']:
result['fullname']=result['first_name']+' '+result['last_name']
logger.debug("Returning user details: "+str(result))
return result |
<SYSTEM_TASK:>
Load TripleOrbitPopulation from saved .h5 file.
<END_TASK>
<USER_TASK:>
Description:
def load_hdf(cls, filename, path=''):
"""
Load TripleOrbitPopulation from saved .h5 file.
:param filename:
HDF file name.
:param path:
Path within HDF file where data is stored.
""" |
df_long = pd.read_hdf(filename,'{}/long/df'.format(path))
df_short = pd.read_hdf(filename,'{}/short/df'.format(path))
return cls.from_df(df_long, df_short) |
<SYSTEM_TASK:>
Radial Velocity time series for star 1 at given times ts.
<END_TASK>
<USER_TASK:>
Description:
def RV_timeseries(self,ts,recalc=False):
"""
Radial Velocity time series for star 1 at given times ts.
:param ts:
Times. If not ``Quantity``, assumed to be in days.
:type ts:
array-like or ``Quantity``
:param recalc: (optional)
If ``False``, then if called with the exact same ``ts``
as last call, it will return cached calculation.
""" |
if type(ts) != Quantity:
ts *= u.day
if not recalc and hasattr(self,'RV_measurements'):
if (ts == self.ts).all():
return self._RV_measurements
else:
pass
RVs = Quantity(np.zeros((len(ts),self.N)),unit='km/s')
for i,t in enumerate(ts):
RVs[i,:] = self.dRV(t,com=True)
self._RV_measurements = RVs
self.ts = ts
return RVs |
<SYSTEM_TASK:>
Creates an OrbitPopulation from a DataFrame.
<END_TASK>
<USER_TASK:>
Description:
def from_df(cls, df):
"""Creates an OrbitPopulation from a DataFrame.
:param df:
:class:`pandas.DataFrame` object. Must contain the following
columns: ``['M1','M2','P','ecc','mean_anomaly','obsx','obsy','obsz']``,
i.e., as what is accessed via :attr:`OrbitPopulation.dataframe`.
:return:
:class:`OrbitPopulation`.
""" |
return cls(df['M1'], df['M2'], df['P'],
ecc=df['ecc'], mean_anomaly=df['mean_anomaly'],
obsx=df['obsx'], obsy=df['obsy'], obsz=df['obsz']) |
<SYSTEM_TASK:>
Draw random periods and eccentricities according to empirical survey data.
<END_TASK>
<USER_TASK:>
Description:
def draw_pers_eccs(n,**kwargs):
"""
Draw random periods and eccentricities according to empirical survey data.
""" |
pers = draw_raghavan_periods(n)
eccs = draw_eccs(n,pers,**kwargs)
return pers,eccs |
<SYSTEM_TASK:>
draws eccentricities appropriate to given periods, generated according to empirical data from Multiple Star Catalog
<END_TASK>
<USER_TASK:>
Description:
def draw_eccs(n,per=10,binsize=0.1,fuzz=0.05,maxecc=0.97):
"""draws eccentricities appropriate to given periods, generated according to empirical data from Multiple Star Catalog
""" |
if np.size(per) == 1 or np.std(np.atleast_1d(per))==0:
if np.size(per)>1:
per = per[0]
if per==0:
es = np.zeros(n)
else:
ne=0
while ne<10:
mask = np.absolute(np.log10(MSC_TRIPLEPERS)-np.log10(per))<binsize/2.
es = MSC_TRIPDATA.e[mask]
ne = len(es)
if ne<10:
binsize*=1.1
inds = rand.randint(ne,size=n)
es = es[inds] * (1 + rand.normal(size=n)*fuzz)
else:
longmask = (per > 25)
shortmask = (per <= 25)
es = np.zeros(np.size(per))
elongs = MSC_TRIPDATA.e[MSC_TRIPLEPERS > 25]
eshorts = MSC_TRIPDATA.e[MSC_TRIPLEPERS <= 25]
n = np.size(per)
nlong = longmask.sum()
nshort = shortmask.sum()
nelongs = np.size(elongs)
neshorts = np.size(eshorts)
ilongs = rand.randint(nelongs,size=nlong)
ishorts = rand.randint(neshorts,size=nshort)
es[longmask] = elongs[ilongs]
es[shortmask] = eshorts[ishorts]
es = es * (1 + rand.normal(size=n)*fuzz)
es[es>maxecc] = maxecc
return np.absolute(es) |
<SYSTEM_TASK:>
Returns boolean array that is True where two stars are within Roche lobe
<END_TASK>
<USER_TASK:>
Description:
def withinroche(semimajors,M1,R1,M2,R2):
"""
Returns boolean array that is True where two stars are within Roche lobe
""" |
q = M1/M2
return ((R1+R2)*RSUN) > (rochelobe(q)*semimajors*AU) |
<SYSTEM_TASK:>
Returns semimajor axis in AU given P in days, mstar in solar masses.
<END_TASK>
<USER_TASK:>
Description:
def semimajor(P,mstar=1):
"""Returns semimajor axis in AU given P in days, mstar in solar masses.
""" |
return ((P*DAY/2/np.pi)**2*G*mstar*MSUN)**(1./3)/AU |
<SYSTEM_TASK:>
Returns fraction of total flux in first argument, assuming all are magnitudes.
<END_TASK>
<USER_TASK:>
Description:
def fluxfrac(*mags):
"""Returns fraction of total flux in first argument, assuming all are magnitudes.
""" |
Ftot = 0
for mag in mags:
Ftot += 10**(-0.4*mag)
F1 = 10**(-0.4*mags[0])
return F1/Ftot |
<SYSTEM_TASK:>
Returns a recursive list of all non-hidden files in and below the current
<END_TASK>
<USER_TASK:>
Description:
def get_files(path):
"""
Returns a recursive list of all non-hidden files in and below the current
directory.
""" |
return_files = []
for root, dirs, files in os.walk(path):
# Skip hidden files
files = [f for f in files if not f[0] == '.']
dirs[:] = [d for d in dirs if not d[0] == '.']
for filename in files:
return_files.append(os.path.join(root, filename))
return return_files |
<SYSTEM_TASK:>
Makes a simple plot of signal
<END_TASK>
<USER_TASK:>
Description:
def plot(self, fig=None, plot_trap=False, name=False, trap_color='g',
trap_kwargs=None, **kwargs):
"""
Makes a simple plot of signal
:param fig: (optional)
Argument for :func:`plotutils.setfig`.
:param plot_trap: (optional)
Whether to plot the (best-fit least-sq) trapezoid fit.
:param name: (optional)
Whether to annotate plot with the name of the signal;
can be ``True`` (in which case ``self.name`` will be
used), or any arbitrary string.
:param trap_color: (optional)
Color of trapezoid fit line.
:param trap_kwargs: (optional)
Keyword arguments to pass to trapezoid fit line.
:param **kwargs: (optional)
Additional keyword arguments passed to ``plt.plot``.
""" |
setfig(fig)
plt.plot(self.ts,self.fs,'.',**kwargs)
if plot_trap and hasattr(self,'trapfit'):
if trap_kwargs is None:
trap_kwargs = {}
plt.plot(self.ts, traptransit(self.ts,self.trapfit),
color=trap_color, **trap_kwargs)
if name is not None:
if type(name)==type(''):
text = name
else:
text = self.name
plt.annotate(text,xy=(0.1,0.1),xycoords='axes fraction',fontsize=22)
if hasattr(self,'depthfit') and not np.isnan(self.depthfit[0]):
lo = 1 - 3*self.depthfit[0]
hi = 1 + 2*self.depthfit[0]
else:
lo = 1
hi = 1
sig = qstd(self.fs,0.005)
hi = max(hi,self.fs.mean() + 7*sig)
lo = min(lo,self.fs.mean() - 7*sig)
logging.debug('lo={}, hi={}'.format(lo,hi))
plt.ylim((lo,hi))
plt.xlabel('time [days]')
plt.ylabel('Relative flux') |
<SYSTEM_TASK:>
Plots a 2d density histogram of provided data
<END_TASK>
<USER_TASK:>
Description:
def plot2dhist(xdata,ydata,cmap='binary',interpolation='nearest',
fig=None,logscale=True,xbins=None,ybins=None,
nbins=50,pts_only=False,**kwargs):
"""Plots a 2d density histogram of provided data
:param xdata,ydata: (array-like)
Data to plot.
:param cmap: (optional)
Colormap to use for density plot.
:param interpolation: (optional)
Interpolation scheme for display (passed to ``plt.imshow``).
:param fig: (optional)
Argument passed to :func:`setfig`.
:param logscale: (optional)
If ``True`` then the colormap will be based on a logarithmic
scale, rather than linear.
:param xbins,ybins: (optional)
Bin edges to use (if ``None``, then use ``np.histogram2d`` to
find bins automatically).
:param nbins: (optional)
Number of bins to use (if ``None``, then use ``np.histogram2d`` to
find bins automatically).
:param pts_only: (optional)
If ``True``, then just a scatter plot of the points is made,
rather than the density plot.
:param **kwargs:
Keyword arguments passed either to ``plt.plot`` or ``plt.imshow``
depending upon whether ``pts_only`` is set to ``True`` or not.
""" |
setfig(fig)
if pts_only:
plt.plot(xdata,ydata,**kwargs)
return
ok = (~np.isnan(xdata) & ~np.isnan(ydata) &
~np.isinf(xdata) & ~np.isinf(ydata))
if ~ok.sum() > 0:
logging.warning('{} x values and {} y values are nan'.format(np.isnan(xdata).sum(),
np.isnan(ydata).sum()))
logging.warning('{} x values and {} y values are inf'.format(np.isinf(xdata).sum(),
np.isinf(ydata).sum()))
if xbins is not None and ybins is not None:
H,xs,ys = np.histogram2d(xdata[ok],ydata[ok],bins=(xbins,ybins))
else:
H,xs,ys = np.histogram2d(xdata[ok],ydata[ok],bins=nbins)
H = H.T
if logscale:
H = np.log(H)
extent = [xs[0],xs[-1],ys[0],ys[-1]]
plt.imshow(H,extent=extent,interpolation=interpolation,
aspect='auto',cmap=cmap,origin='lower',**kwargs) |
<SYSTEM_TASK:>
a in AU, R in Rsun, inc & w in radians
<END_TASK>
<USER_TASK:>
Description:
def impact_parameter(a, R, inc, ecc=0, w=0, return_occ=False):
"""a in AU, R in Rsun, inc & w in radians
""" |
b_tra = a*AU*np.cos(inc)/(R*RSUN) * (1-ecc**2)/(1 + ecc*np.sin(w))
if return_occ:
b_tra = a*AU*np.cos(inc)/(R*RSUN) * (1-ecc**2)/(1 - ecc*np.sin(w))
return b_tra, b_occ
else:
return b_tra |
<SYSTEM_TASK:>
Returns the minimum inclination at which two bodies from two given sets eclipse
<END_TASK>
<USER_TASK:>
Description:
def minimum_inclination(P,M1,M2,R1,R2):
"""
Returns the minimum inclination at which two bodies from two given sets eclipse
Only counts systems not within each other's Roche radius
:param P:
Orbital periods.
:param M1,M2,R1,R2:
Masses and radii of primary and secondary stars.
""" |
P,M1,M2,R1,R2 = (np.atleast_1d(P),
np.atleast_1d(M1),
np.atleast_1d(M2),
np.atleast_1d(R1),
np.atleast_1d(R2))
semimajors = semimajor(P,M1+M2)
rads = ((R1+R2)*RSUN/(semimajors*AU))
ok = (~np.isnan(rads) & ~withinroche(semimajors,M1,R1,M2,R2))
if ok.sum() == 0:
logging.error('P: {}'.format(P))
logging.error('M1: {}'.format(M1))
logging.error('M2: {}'.format(M2))
logging.error('R1: {}'.format(R1))
logging.error('R2: {}'.format(R2))
if np.all(withinroche(semimajors,M1,R1,M2,R2)):
raise AllWithinRocheError('All simulated systems within Roche lobe')
else:
raise EmptyPopulationError('no valid systems! (see above)')
mininc = np.arccos(rads[ok].max())*180/np.pi
return mininc |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.